├── server ├── .gitignore ├── .dockerignore ├── tsconfig.jest.json ├── README.md ├── tsconfig.json ├── src │ ├── services │ │ ├── data_service │ │ │ ├── collection.ts │ │ │ ├── db_connector │ │ │ │ ├── index.ts │ │ │ │ ├── factory.ts │ │ │ │ ├── nedb_connector.ts │ │ │ │ └── mongodb_connector.ts │ │ │ ├── index.ts │ │ │ └── migrations.ts │ │ ├── data_layer │ │ │ ├── index.ts │ │ │ ├── basedb.ts │ │ │ ├── utils.ts │ │ │ ├── nedb.ts │ │ │ └── mongodb.ts │ │ ├── json_service.ts │ │ ├── process_service.ts │ │ ├── grafana_service.ts │ │ ├── notification_service.ts │ │ ├── data_puller.ts │ │ ├── analytics_service.ts │ │ └── alert_service.ts │ ├── utils │ │ ├── grafana.ts │ │ ├── uid.ts │ │ ├── reporter.ts │ │ ├── time.ts │ │ ├── url.ts │ │ └── segments.ts │ ├── models │ │ ├── grafana_panel_model.ts │ │ ├── analytics_message_model.ts │ │ ├── analytic_units │ │ │ ├── utils.ts │ │ │ ├── index.ts │ │ │ ├── types.ts │ │ │ ├── pattern_analytic_unit_model.ts │ │ │ ├── threshold_analytic_unit_model.ts │ │ │ ├── analytic_unit_model.ts │ │ │ ├── db.ts │ │ │ └── anomaly_analytic_unit_model.ts │ │ ├── analytics_task_model.ts │ │ ├── analytic_unit_cache_model.ts │ │ ├── detection_model.ts │ │ └── segment_model.ts │ ├── routes │ │ ├── panel_router.ts │ │ ├── detections_router.ts │ │ ├── segments_router.ts │ │ ├── data_router.ts │ │ └── analytic_units_router.ts │ ├── index.ts │ └── config.ts ├── build │ ├── underscore-lodash-wrapper.js │ ├── node-loader.js │ ├── dev-server.js │ ├── webpack.dev.conf.js │ ├── webpack.prod.conf.js │ └── webpack.base.conf.js ├── jest.config.js ├── spec │ ├── setup_tests.ts │ ├── utils_for_tests │ │ ├── segments.ts │ │ ├── detection_spans.ts │ │ └── analytic_units.ts │ ├── segments.jest.ts │ ├── utils │ │ ├── url.jest.ts │ │ └── segments.jest.ts │ ├── models │ │ ├── segment_model.jest.ts │ │ └── detection_model.jest.ts │ └── analytic_controller.jest.ts ├── Dockerfile ├── .vscode │ └── launch.json ├── .env.example └── package.json ├── .dockerignore ├── .github └── FUNDING.yml ├── images ├── hastic_logo.png ├── hastic_server.png └── hastic-server-pic.jpg ├── .gitmodules ├── tools └── prometheus-hastic-exporter │ ├── requirements.txt │ ├── prometheus-hastic-exporter.service │ ├── prometheus-hastic-exporter.py │ └── Hastic Exporter dashboard.json ├── .gitignore ├── mongo-init.js ├── config.example.json ├── Makefile ├── .travis.yml ├── docker-compose.yml ├── docker-compose-mongo.yml └── README.md /server/.gitignore: -------------------------------------------------------------------------------- 1 | dist/* 2 | -------------------------------------------------------------------------------- /server/.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | npm-debug 3 | .vscode 4 | -------------------------------------------------------------------------------- /server/tsconfig.jest.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig" 3 | } 4 | -------------------------------------------------------------------------------- /server/README.md: -------------------------------------------------------------------------------- 1 | # Hastic server 2 | 3 | It is a REST server based on KoaJS 4 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | * 2 | # TODO: we don't need all files from .git 3 | !.git 4 | !server 5 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | patreon: corpglory 4 | -------------------------------------------------------------------------------- /images/hastic_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hastic/hastic-server/HEAD/images/hastic_logo.png -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "analytics"] 2 | path = analytics 3 | url = https://github.com/hastic/analytics 4 | -------------------------------------------------------------------------------- /images/hastic_server.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hastic/hastic-server/HEAD/images/hastic_server.png -------------------------------------------------------------------------------- /images/hastic-server-pic.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hastic/hastic-server/HEAD/images/hastic-server-pic.jpg -------------------------------------------------------------------------------- /tools/prometheus-hastic-exporter/requirements.txt: -------------------------------------------------------------------------------- 1 | prometheus_client>=0.6.0 2 | requests>=2.7.0 3 | python-dateutil>=2.7.3 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | data 2 | dist/**/*.rpm 3 | config.json 4 | .env 5 | 6 | node_modules/ 7 | 8 | .vscode/ 9 | package-lock.json 10 | *.pyc 11 | *.txt 12 | *.log 13 | -------------------------------------------------------------------------------- /server/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "sourceMap": true, 4 | "module": "commonjs", 5 | "target": "es6", 6 | // "strict": true 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /server/src/services/data_service/collection.ts: -------------------------------------------------------------------------------- 1 | export enum Collection { 2 | ANALYTIC_UNITS, 3 | ANALYTIC_UNIT_CACHES, 4 | SEGMENTS, 5 | THRESHOLD, 6 | DETECTION_SPANS, 7 | DB_META 8 | }; 9 | -------------------------------------------------------------------------------- /mongo-init.js: -------------------------------------------------------------------------------- 1 | db.createUser( 2 | { 3 | user: "hastic", 4 | pwd: "password", 5 | roles: [ 6 | { 7 | role: "readWrite", 8 | db: "hastic" 9 | } 10 | ] 11 | } 12 | ); 13 | -------------------------------------------------------------------------------- /server/src/utils/grafana.ts: -------------------------------------------------------------------------------- 1 | import { GRAFANA_URL } from '../config'; 2 | 3 | export function getGrafanaUrl(browserGrafanaUrl: string): string { 4 | return (GRAFANA_URL !== null) ? GRAFANA_URL : browserGrafanaUrl; 5 | } 6 | -------------------------------------------------------------------------------- /server/build/underscore-lodash-wrapper.js: -------------------------------------------------------------------------------- 1 | // we need this module because we can't avoid underscore lib usage in nedb 2 | // missing `pluck` method added 3 | var _ = require('lodash'); 4 | _.pluck = _.map; 5 | module.exports = _; 6 | -------------------------------------------------------------------------------- /server/src/models/grafana_panel_model.ts: -------------------------------------------------------------------------------- 1 | export type GrafanaPanelTemplate = { 2 | // TODO: not any 3 | analyticUnitTemplates: any[] 4 | } 5 | 6 | export type GrafanaTemplateVariables = { 7 | grafanaUrl: string, 8 | panelId: string, 9 | datasourceUrl: string 10 | }; 11 | -------------------------------------------------------------------------------- /tools/prometheus-hastic-exporter/prometheus-hastic-exporter.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=hastic-exporter 3 | 4 | [Service] 5 | RestartSec=3 6 | Restart=always 7 | ExecStart=/usr/bin/hastic-exporter.py http://0.0.0.0:8000 5777 8 | 9 | [Install] 10 | WantedBy=multi-user.target 11 | -------------------------------------------------------------------------------- /server/src/services/data_service/db_connector/index.ts: -------------------------------------------------------------------------------- 1 | import { Collection } from '../collection'; 2 | import { dbCollection } from '../../data_layer'; 3 | 4 | export interface DbConnector { 5 | db: Map; 6 | init(): Promise; 7 | // TODO: static instance? 8 | } 9 | -------------------------------------------------------------------------------- /config.example.json: -------------------------------------------------------------------------------- 1 | { 2 | "HASTIC_PORT": 8000, 3 | "HASTIC_API_KEY": "eyJrIjoiVjZqMHY0dHk4UEE3eEN4MzgzRnd2aURlMWlIdXdHNW4iLCJuIjoiaGFzdGljIiwiaWQiOjF9", 4 | "HASTIC_WEBHOOK_URL": "http://localhost:8080", 5 | "GRAFANA_URL": "http://localhost:3000", 6 | "LEARNING_TIMEOUT": 120, 7 | "HS_AN_LOGGING_LEVEL": "DEBUG" 8 | } 9 | -------------------------------------------------------------------------------- /server/build/node-loader.js: -------------------------------------------------------------------------------- 1 | var path = require('path'); 2 | 3 | // based on: https://github.com/webpack-contrib/node-loader/blob/master/index.js 4 | module.exports = function nodeLoader(m, q) { 5 | return (` 6 | var modulePath = __dirname + '/${path.basename(this.resourcePath)}'; 7 | try { 8 | global.process.dlopen(module, modulePath); 9 | } catch(e) { 10 | throw new Error('dlopen: Cannot open ' + modulePath + ': ' + e); 11 | } 12 | `); 13 | } 14 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: rpm deb 2 | 3 | all: rpm deb 4 | 5 | rpm: 6 | docker run --rm -it -v `pwd`/server:/root/rpmbuild/server \ 7 | -v `pwd`/analytics:/root/rpmbuild/analytics \ 8 | -v `pwd`/.git:/root/rpmbuild/.git \ 9 | -v `pwd`/build/rpmbuild:/root/rpmbuild/rpm \ 10 | -v `pwd`/dist/RPMS:/root/rpmbuild/RPMS \ 11 | -e "NODE_VERSION=6.14.0" \ 12 | -e "HASTIC_RELEASE_VERSION=`cat server/package.json| jq -r .version | sed 's/-/_/g'`" \ 13 | hastic/rpmbuilder rpmbuild -bb rpm/hastic-server.spec 14 | -------------------------------------------------------------------------------- /server/build/dev-server.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process'); 2 | const nodemon = require('nodemon'); 3 | 4 | const webpack = spawn('webpack', ['--config', 'build/webpack.dev.conf.js'], { 5 | stdio: 'inherit', 6 | shell: false 7 | }); 8 | 9 | var env = Object.create(process.env); 10 | env.LOG_LEVEL = 'debug'; 11 | 12 | nodemon({ env, script: 'dist/server-dev.js' }) 13 | .on('start', function () { 14 | console.log('nodemon started'); 15 | }).on('crash', function () { 16 | console.log('hastic-server crashed'); 17 | }); 18 | -------------------------------------------------------------------------------- /server/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "verbose": true, 3 | "globals": { 4 | "ts-jest": { 5 | "useBabelrc": true, 6 | "tsConfigFile": "tsconfig.jest.json" 7 | }, 8 | "GIT_VERSION": "version", 9 | "GIT_COMMITHASH": "commit_hash", 10 | "GIT_BRANCH": "branch" 11 | }, 12 | "transform": { 13 | "\\.ts": "ts-jest" 14 | }, 15 | "testRegex": "(\\.|/)([jt]est)\\.[jt]s$", 16 | "moduleFileExtensions": [ 17 | "ts", 18 | "js", 19 | "json" 20 | ], 21 | "setupFiles": [ 22 | "/spec/setup_tests.ts" 23 | ] 24 | }; 25 | -------------------------------------------------------------------------------- /server/spec/setup_tests.ts: -------------------------------------------------------------------------------- 1 | import { createTestDB } from './utils_for_tests/analytic_units'; 2 | import { clearSegmentsDB } from './utils_for_tests/segments'; 3 | 4 | console.log = jest.fn(); 5 | console.error = jest.fn(); 6 | 7 | jest.mock('../src/config.ts', () => ({ 8 | DATA_PATH: 'fake-data-path', 9 | HASTIC_API_KEY: 'fake-key', 10 | HASTIC_DB_CONNECTION_TYPE: 'nedb', 11 | HASTIC_IN_MEMORY_PERSISTANCE: true, 12 | HASTIC_ALERT_TYPE: 'webhook', 13 | AlertTypes: jest.requireActual('../src/config').AlertTypes, 14 | })); 15 | 16 | clearSegmentsDB(); 17 | createTestDB(); 18 | -------------------------------------------------------------------------------- /server/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:8-alpine AS build 2 | 3 | RUN apk add --no-cache git 4 | 5 | # Note: context starts in the directory above (see docker-compose file) 6 | COPY .git /var/www/.git 7 | COPY server /var/www/server 8 | 9 | WORKDIR /var/www/server 10 | 11 | RUN npm install 12 | RUN npm run build 13 | 14 | FROM node:8-alpine 15 | 16 | # Note: context starts in the directory above (see docker-compose file) 17 | COPY server/package.json /var/www/server/ 18 | 19 | WORKDIR /var/www/server 20 | 21 | COPY --from=build /var/www/server/dist /var/www/server/dist 22 | 23 | VOLUME ["/var/www/data"] 24 | 25 | ENV INSIDE_DOCKER true 26 | 27 | CMD ["npm", "start"] 28 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | services: 2 | - docker 3 | 4 | matrix: 5 | include: 6 | - language: node_js 7 | node_js: 8 | - "8" 9 | env: 10 | - CXX=g++-4.8 11 | before_script: 12 | - cd server 13 | - npm install 14 | script: 15 | - npm test 16 | - language: node_js 17 | node_js: 18 | - "6.14" 19 | env: 20 | - CXX=g++-4.8 21 | before_script: 22 | - cd server 23 | - npm install 24 | script: 25 | - npm run build 26 | - language: python 27 | python: 3.6.6 28 | before_script: 29 | - cd analytics 30 | - pip install -r requirements.txt 31 | script: 32 | - python -m unittest discover 33 | 34 | notifications: 35 | email: false 36 | -------------------------------------------------------------------------------- /server/build/webpack.dev.conf.js: -------------------------------------------------------------------------------- 1 | const base = require('./webpack.base.conf'); 2 | 3 | const webpack = require('webpack'); 4 | 5 | base.mode = 'development'; 6 | base.watch = true; 7 | base.output.filename = "server-dev.js"; 8 | 9 | // https://webpack.js.org/configuration/devtool/ 10 | base.devtool = 'inline-source-map'; 11 | 12 | base.externals = base.externals ? base.externals : []; 13 | base.externals.push( 14 | function(context, request, callback) { 15 | if(request[0] == '.') { 16 | callback(); 17 | } else { 18 | callback(null, "require('" + request + "')"); 19 | } 20 | } 21 | ); 22 | 23 | base.plugins = base.plugins ? base.plugins : []; 24 | base.plugins.push(new webpack.DefinePlugin({ 25 | 'process.env.NODE_ENV': JSON.stringify('development') 26 | })); 27 | 28 | module.exports = base; 29 | -------------------------------------------------------------------------------- /server/src/utils/uid.ts: -------------------------------------------------------------------------------- 1 | import { randomBytes } from 'crypto'; 2 | 3 | 4 | // Copied from https://github.com/louischatriot/nedb/blob/master/lib/customUtils.js 5 | 6 | /** 7 | * Return a random alphanumerical string of length len 8 | * There is a very small probability (less than 1/1,000,000) for the length to be less than len 9 | * (il the base64 conversion yields too many pluses and slashes) but 10 | * that's not an issue here 11 | * The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision) 12 | * See http://en.wikipedia.org/wiki/Birthday_problem 13 | */ 14 | export function uid(len: number): string { 15 | return randomBytes(Math.ceil(Math.max(8, len * 2))) 16 | .toString('base64') 17 | .replace(/[+\/]/g, '') 18 | .slice(0, len); 19 | } -------------------------------------------------------------------------------- /server/src/utils/reporter.ts: -------------------------------------------------------------------------------- 1 | export function availableReporter( 2 | positiveArgs: any|null, 3 | negativeArgs: any|null, 4 | positiveAction = console.log, 5 | negativeAction = console.error, 6 | ) { 7 | let reported = false; 8 | return available => { 9 | if(available && reported) { 10 | reported = false; 11 | if(positiveArgs) { 12 | if(!(positiveArgs instanceof Array)) { 13 | positiveArgs = [ positiveArgs ]; 14 | } 15 | positiveAction.apply(null, positiveArgs); 16 | } 17 | } 18 | 19 | if(!available && !reported) { 20 | reported = true; 21 | if(negativeArgs) { 22 | if(!(negativeArgs instanceof Array)) { 23 | negativeArgs = [ negativeArgs ]; 24 | } 25 | negativeAction.apply(null, negativeArgs); 26 | } 27 | } 28 | } 29 | }; 30 | -------------------------------------------------------------------------------- /server/src/services/data_layer/index.ts: -------------------------------------------------------------------------------- 1 | import { DbQueryWrapper, dbCollection } from './basedb'; 2 | import { NeDbQueryWrapper } from './nedb'; 3 | import { MongoDbQueryWrapper } from './mongodb'; 4 | 5 | import { HASTIC_DB_CONNECTION_TYPE } from '../../config'; 6 | 7 | export enum DBType { 8 | nedb = 'nedb', 9 | mongodb = 'mongodb' 10 | }; 11 | 12 | export { NeDbQueryWrapper, MongoDbQueryWrapper, DbQueryWrapper, dbCollection }; 13 | 14 | export function getDbQueryWrapper(): DbQueryWrapper { 15 | if(HASTIC_DB_CONNECTION_TYPE === DBType.nedb) { 16 | return new NeDbQueryWrapper(); 17 | } 18 | if(HASTIC_DB_CONNECTION_TYPE === DBType.mongodb) { 19 | return new MongoDbQueryWrapper(); 20 | } 21 | 22 | throw new Error( 23 | `"${HASTIC_DB_CONNECTION_TYPE}" HASTIC_DB_CONNECTION_TYPE is not supported. Possible values: "nedb", "mongodb"` 24 | ); 25 | } 26 | -------------------------------------------------------------------------------- /server/src/models/analytics_message_model.ts: -------------------------------------------------------------------------------- 1 | export enum AnalyticsMessageMethod { 2 | TASK = 'TASK', 3 | TASK_RESULT = 'TASK_RESULT', 4 | DETECT = 'DETECT', 5 | PUSH_DETECT = 'PUSH_DETECT', 6 | DATA = 'DATA' 7 | } 8 | 9 | export class AnalyticsMessage { 10 | public constructor( 11 | public method: AnalyticsMessageMethod, 12 | public payload?: any, 13 | public requestId?: number 14 | ) { 15 | 16 | } 17 | 18 | public toObject() { 19 | return { 20 | method: this.method, 21 | payload: this.payload, 22 | requestId: this.requestId 23 | }; 24 | } 25 | 26 | static fromObject(obj: any): AnalyticsMessage { 27 | if(obj.method === undefined) { 28 | throw new Error('No method in obj:' + obj); 29 | } 30 | return new AnalyticsMessage(obj.method, obj.payload, obj.requestId); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /server/spec/utils_for_tests/segments.ts: -------------------------------------------------------------------------------- 1 | import { TEST_ANALYTIC_UNIT_ID } from './analytic_units'; 2 | import * as Segment from '../../src/models/segment_model'; 3 | 4 | import * as _ from 'lodash'; 5 | 6 | export function buildSegments(times: number[][]): Segment.Segment[] { 7 | return times.map(t => { 8 | return new Segment.Segment(TEST_ANALYTIC_UNIT_ID, t[0], t[1], false, false, undefined); 9 | }); 10 | } 11 | 12 | export function convertSegmentsToTimeRanges(segments: Segment.Segment[]): number[][] { 13 | const ranges = segments.map(segment => [segment.from, segment.to]); 14 | return _.sortBy(ranges, range => range[0]); 15 | } 16 | 17 | export async function clearSegmentsDB(): Promise { 18 | const segments = await Segment.findMany(TEST_ANALYTIC_UNIT_ID, { labeled: false, deleted: false }); 19 | await Segment.removeSegments(_.compact(segments.map(s => s.id))); 20 | } 21 | -------------------------------------------------------------------------------- /server/src/services/data_layer/basedb.ts: -------------------------------------------------------------------------------- 1 | import * as nedb from 'nedb'; 2 | import * as mongodb from 'mongodb'; 3 | 4 | export type dbCollection = nedb | mongodb.Collection; 5 | 6 | export interface DbQueryWrapper { 7 | dbInsertOne(collection: dbCollection, doc: object): Promise; 8 | dbInsertMany(collection: dbCollection, docs: object[]): Promise; 9 | dbUpdateOne(collection: dbCollection, query: string | object, updateQuery: object): Promise; 10 | dbUpdateMany(collection: dbCollection, query: string[] | object, updateQuery: object): Promise; 11 | dbFindOne(collection: dbCollection, query: string | object): Promise; 12 | dbFindMany(collection: dbCollection, query: string[] | object, sortQuery: object): Promise; 13 | dbRemoveOne(collection: dbCollection, query: string | object): Promise; 14 | dbRemoveMany(collection: dbCollection, query: string[] | object): Promise; 15 | } 16 | 17 | export class QueryExecutionError extends Error { 18 | } 19 | -------------------------------------------------------------------------------- /server/src/models/analytic_units/utils.ts: -------------------------------------------------------------------------------- 1 | import { DetectorType } from './types'; 2 | import { AnalyticUnit } from './analytic_unit_model'; 3 | import { PatternAnalyticUnit } from './pattern_analytic_unit_model'; 4 | import { AnomalyAnalyticUnit } from './anomaly_analytic_unit_model'; 5 | import { ThresholdAnalyticUnit } from './threshold_analytic_unit_model'; 6 | 7 | import * as _ from 'lodash'; 8 | 9 | 10 | export function createAnalyticUnitFromObject(obj: any): AnalyticUnit { 11 | if (obj === undefined) { 12 | throw new Error('obj is undefined'); 13 | } 14 | 15 | const detectorType: DetectorType = obj.detectorType; 16 | switch (detectorType) { 17 | case DetectorType.PATTERN: 18 | return PatternAnalyticUnit.fromObject(obj); 19 | case DetectorType.ANOMALY: 20 | return AnomalyAnalyticUnit.fromObject(obj); 21 | case DetectorType.THRESHOLD: 22 | return ThresholdAnalyticUnit.fromObject(obj); 23 | 24 | default: 25 | throw new Error(`Can't create analytic unit with type "${detectorType}"`); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /server/src/models/analytic_units/index.ts: -------------------------------------------------------------------------------- 1 | import { createAnalyticUnitFromObject } from './utils'; 2 | import { 3 | AnalyticUnitId, AnalyticUnitStatus, DetectorType, ANALYTIC_UNIT_TYPES 4 | } from './types'; 5 | import { AnalyticUnit } from './analytic_unit_model'; 6 | import { PatternAnalyticUnit } from './pattern_analytic_unit_model'; 7 | import { ThresholdAnalyticUnit, Condition } from './threshold_analytic_unit_model'; 8 | import { AnomalyAnalyticUnit, Bound } from './anomaly_analytic_unit_model'; 9 | import { 10 | findById, 11 | findMany, 12 | create, 13 | remove, 14 | update, 15 | insertMany, 16 | setStatus, 17 | setDetectionTime, 18 | setAlert, 19 | setMetric 20 | } from './db'; 21 | 22 | 23 | export { 24 | AnalyticUnit, PatternAnalyticUnit, ThresholdAnalyticUnit, AnomalyAnalyticUnit, 25 | AnalyticUnitId, AnalyticUnitStatus, Bound, DetectorType, ANALYTIC_UNIT_TYPES, 26 | createAnalyticUnitFromObject, Condition, 27 | findById, findMany, 28 | create, remove, update, insertMany, 29 | setStatus, setDetectionTime, setAlert, setMetric 30 | }; 31 | -------------------------------------------------------------------------------- /server/src/utils/time.ts: -------------------------------------------------------------------------------- 1 | import { HASTIC_TIMEZONE_OFFSET } from '../config'; 2 | 3 | import * as _ from 'lodash'; 4 | import * as moment from 'moment'; 5 | 6 | const MINUTES_IN_HOUR = 60; 7 | const TIME_FORMAT = 'ddd MMM DD YYYY HH:mm:ss UTCZ'; 8 | 9 | export function parseTimeZone(timeZone: string): number { 10 | timeZone = timeZone.replace(/['|"]/g, ''); 11 | const re = /^-?\d{1,2}?:\d{2}$/; 12 | const correctFormat = re.test(timeZone); 13 | if(!correctFormat) { 14 | throw new Error(`Wrong timeZone format in config - "HASTIC_TIMEZONE_OFFSET": ${timeZone}`); 15 | } 16 | const time = _.split(timeZone, ':'); 17 | let minutesOffset = Math.abs(Number(time[0])) * MINUTES_IN_HOUR + Number(time[1]); 18 | if(timeZone.indexOf('-') !== -1) { 19 | minutesOffset = -1 * minutesOffset; 20 | } 21 | return minutesOffset; 22 | } 23 | 24 | export function toTimeZone(time: moment.MomentInput): string { 25 | const utcTime = moment(time).utc(); 26 | const timeWithOffset = utcTime.utcOffset(HASTIC_TIMEZONE_OFFSET); 27 | return timeWithOffset.format(TIME_FORMAT); 28 | } 29 | 30 | -------------------------------------------------------------------------------- /server/.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "launch", 10 | "name": "Launch Program", 11 | "program": "${workspaceFolder}/dist/server.js", 12 | "skipFiles": [ 13 | "/**" 14 | ] 15 | }, 16 | { 17 | "type": "node", 18 | "request": "attach", 19 | "name": "Attach to Remote", 20 | "address": "127.0.0.1", 21 | "sourceMaps": true, 22 | "outFiles": [ "${workspaceFolder}/dist/**/*.js" ], 23 | "sourceMapPathOverrides": { 24 | "webpack:///./~/*": "${workspaceRoot}/node_modules/*", 25 | "webpack:///./*": "${workspaceRoot}/*", 26 | "webpack:///*": "*" 27 | }, 28 | "port": 9229, 29 | "restart": true, 30 | "trace": true, 31 | "timeout": 100000 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /server/src/utils/url.ts: -------------------------------------------------------------------------------- 1 | import * as url from 'url'; 2 | 3 | export function normalizeUrl(grafanaUrl: string) { 4 | if(!grafanaUrl) { 5 | return grafanaUrl; 6 | } 7 | let urlObj = url.parse(grafanaUrl); 8 | if(urlObj.protocol !== 'http:' && urlObj.protocol !== 'https:') { 9 | grafanaUrl = `http://${grafanaUrl}`; 10 | urlObj = url.parse(grafanaUrl); 11 | console.log('No protocol provided in GRAFANA_URL -> inserting "http://"'); 12 | } 13 | if(urlObj.slashes === false) { 14 | urlObj = url.parse(`${urlObj.protocol}//${urlObj.pathname}`); 15 | console.log('No slashes were provided after the protocol -> inserting slashes'); 16 | } 17 | if(urlObj.pathname.slice(-1) === '/') { 18 | urlObj.pathname = urlObj.pathname.slice(0, -1); 19 | console.log('Removing the slash at the end of GRAFANA_URL'); 20 | } 21 | let finalUrl = `${urlObj.protocol}//${urlObj.hostname}`; 22 | if(urlObj.port !== null) { 23 | finalUrl = finalUrl + ':' + urlObj.port; 24 | } 25 | if(urlObj.pathname !== '') { 26 | finalUrl = finalUrl + urlObj.pathname; 27 | } 28 | return finalUrl; 29 | } 30 | -------------------------------------------------------------------------------- /server/spec/segments.jest.ts: -------------------------------------------------------------------------------- 1 | import { TEST_ANALYTIC_UNIT_ID, createTestDB, clearTestDB } from './utils_for_tests/analytic_units'; 2 | import { buildSegments, clearSegmentsDB } from './utils_for_tests/segments'; 3 | 4 | import * as Segment from '../src/models/segment_model'; 5 | 6 | import * as _ from 'lodash'; 7 | 8 | const INITIAL_SEGMENTS = buildSegments([[0, 1], [2, 3], [4, 5]]); 9 | 10 | beforeAll(async () => { 11 | await clearTestDB(); 12 | await createTestDB(); 13 | }); 14 | 15 | beforeEach(async () => { 16 | await Segment.mergeAndInsertSegments(INITIAL_SEGMENTS); 17 | }); 18 | 19 | afterEach(async () => { 20 | await clearSegmentsDB(); 21 | }); 22 | 23 | describe('mergeAndInsertSegments', function() { 24 | it('should be merged before insertion', async function() { 25 | const segmentsToInsert = buildSegments([[1, 2]]); 26 | await Segment.mergeAndInsertSegments(segmentsToInsert); 27 | 28 | let actualSegments = await Segment.findMany(TEST_ANALYTIC_UNIT_ID, {}); 29 | actualSegments.forEach(s => { s.id = undefined }); 30 | actualSegments = _.sortBy(actualSegments, s => s.from); 31 | expect(actualSegments).toEqual(buildSegments([[0, 3], [4, 5]])); 32 | }); 33 | }); 34 | 35 | -------------------------------------------------------------------------------- /server/build/webpack.prod.conf.js: -------------------------------------------------------------------------------- 1 | const semver = require('semver'); 2 | 3 | const webpack = require('webpack'); 4 | const path = require('path'); 5 | const fs = require('fs'); 6 | 7 | var base = require('./webpack.base.conf'); 8 | 9 | const TARGET_NODE_VERSION = process.versions.node; 10 | 11 | base.mode = 'production'; 12 | base.output.filename = "server.js"; 13 | base.optimization.minimize = true; 14 | 15 | const prodRules = [ 16 | { 17 | test: /\.js$/, 18 | use: { 19 | loader: 'babel-loader', 20 | options: { 21 | plugins: ["transform-object-rest-spread"], // for transpiling "ws" lib 22 | // it's necessare only for node < 8.3.0, 23 | // so could be optimized 24 | presets: [ 25 | ["env", { "targets": { "node": TARGET_NODE_VERSION }}] 26 | ] 27 | } 28 | } 29 | }, 30 | { 31 | test: /\.node$/, 32 | use: [ 33 | { loader: './build/node-loader' }, 34 | { loader: 'file-loader', options: { name: '[name].[ext]' } } 35 | ] 36 | } 37 | ]; 38 | 39 | base.module.rules = [...base.module.rules, ...prodRules]; 40 | 41 | module.exports = base; 42 | -------------------------------------------------------------------------------- /server/src/services/data_service/db_connector/factory.ts: -------------------------------------------------------------------------------- 1 | import { DBType } from '../../data_layer'; 2 | import { DbConnector } from './index'; 3 | import { MongodbConnector } from './mongodb_connector'; 4 | import { NedbConnector } from './nedb_connector'; 5 | 6 | import * as config from '../../../config'; 7 | 8 | 9 | export class DbConnectorFactory { 10 | private static _connector: DbConnector; 11 | 12 | private constructor() { } 13 | 14 | public static async getDbConnector(): Promise { 15 | if(this._connector !== undefined) { 16 | return this._connector; 17 | } 18 | 19 | let connector: DbConnector; 20 | switch(config.HASTIC_DB_CONNECTION_TYPE) { 21 | case DBType.nedb: 22 | connector = NedbConnector.instance; 23 | break; 24 | 25 | case DBType.mongodb: 26 | connector = MongodbConnector.instance; 27 | break; 28 | 29 | default: 30 | throw new Error( 31 | `"${config.HASTIC_DB_CONNECTION_TYPE}" HASTIC_DB_CONNECTION_TYPE is not supported. Possible values: "nedb", "mongodb"` 32 | ); 33 | } 34 | 35 | await connector.init(); 36 | this._connector = connector; 37 | return this._connector; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /server/spec/utils_for_tests/detection_spans.ts: -------------------------------------------------------------------------------- 1 | import { TEST_ANALYTIC_UNIT_ID } from './analytic_units'; 2 | 3 | import * as Detection from '../../src/models/detection_model'; 4 | 5 | import * as _ from 'lodash'; 6 | 7 | export type DetectionSpanOptions = { from: number, to: number, status: Detection.DetectionStatus }; 8 | 9 | export function buildSpans(options: DetectionSpanOptions[]): Detection.DetectionSpan[] { 10 | return options.map(option => { 11 | return new Detection.DetectionSpan(TEST_ANALYTIC_UNIT_ID, option.from, option.to, option.status); 12 | }); 13 | } 14 | 15 | export async function insertSpans(options: DetectionSpanOptions[]): Promise { 16 | const spansToInsert = buildSpans(options); 17 | const insertPromises = spansToInsert.map(async span => Detection.insertSpan(span)); 18 | await Promise.all(insertPromises); 19 | } 20 | 21 | export function convertSpansToOptions(spans: Detection.DetectionSpan[]): DetectionSpanOptions[] { 22 | const spansOptions = spans.map(span => ({ from: span.from, to: span.to, status: span.status })); 23 | return _.sortBy(spansOptions, spanOptions => spanOptions.from); 24 | } 25 | 26 | export async function clearSpansDB(): Promise { 27 | await Detection.clearSpans(TEST_ANALYTIC_UNIT_ID); 28 | } 29 | -------------------------------------------------------------------------------- /server/src/services/data_layer/utils.ts: -------------------------------------------------------------------------------- 1 | import { FilterQuery, ObjectID } from 'mongodb'; 2 | 3 | //TODO: move to DbQueryWrapper 4 | 5 | export function wrapIdToQuery(query: string | object): object { 6 | if(typeof query === 'string') { 7 | return { _id: query }; 8 | } 9 | return query; 10 | } 11 | 12 | export function wrapIdToMongoDbQuery(query: FilterQuery): object { 13 | if(typeof query === 'string') { 14 | return { _id: new ObjectID(query) }; 15 | } 16 | if(typeof query._id === 'string') { 17 | return { _id: new ObjectID(query._id) }; 18 | } 19 | return query; 20 | } 21 | 22 | export function wrapIdsToQuery(query: string[] | object): object { 23 | if(Array.isArray(query)) { 24 | return { _id: { $in: query } }; 25 | } 26 | return query; 27 | } 28 | 29 | // mongodb uses ObjectIds to store _id 30 | // we should wrap ids into ObjectID to generate correct query 31 | export function wrapIdsToMongoDbQuery(query: string[] | object): object { 32 | if(Array.isArray(query)) { 33 | query = query.map(id => new ObjectID(id)); 34 | return { _id: { $in: query } }; 35 | } 36 | return query; 37 | } 38 | 39 | export function isEmptyArray(obj: any): boolean { 40 | if(!Array.isArray(obj)) { 41 | return false; 42 | } 43 | return obj.length == 0; 44 | } 45 | -------------------------------------------------------------------------------- /server/src/routes/panel_router.ts: -------------------------------------------------------------------------------- 1 | import { GrafanaPanelTemplate, GrafanaTemplateVariables } from '../models/grafana_panel_model'; 2 | import { exportPanel, importPanel } from '../services/grafana_service'; 3 | 4 | import * as Router from 'koa-router'; 5 | 6 | 7 | async function exportGrafanaPanelTemplate(ctx: Router.IRouterContext) { 8 | const panelId = ctx.request.query.panelId as string; 9 | if(panelId === undefined) { 10 | throw new Error('Cannot export analytic units with undefined panelId'); 11 | } 12 | 13 | const panelTemplate = await exportPanel(panelId); 14 | ctx.response.body = panelTemplate; 15 | } 16 | 17 | async function importGrafanaPanelTemplate(ctx: Router.IRouterContext) { 18 | const { panelTemplate, templateVariables } = ctx.request.body as { 19 | panelTemplate: GrafanaPanelTemplate, 20 | templateVariables: GrafanaTemplateVariables 21 | }; 22 | 23 | if(panelTemplate.analyticUnitTemplates === undefined) { 24 | throw new Error('Cannot import analytic units with undefined analyticUnitTemplates'); 25 | } 26 | await importPanel(panelTemplate, templateVariables); 27 | ctx.response.status = 200; 28 | } 29 | 30 | export var router = new Router(); 31 | 32 | router.get('/template', exportGrafanaPanelTemplate); 33 | router.post('/template', importGrafanaPanelTemplate); 34 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2.1' 2 | services: 3 | server: 4 | image: hastic/server:latest 5 | build: 6 | dockerfile: server/Dockerfile 7 | context: . 8 | environment: 9 | HASTIC_API_KEY: ${HASTIC_API_KEY?"HASTIC_API_KEY variable is required"} 10 | GRAFANA_URL: ${GRAFANA_URL?"GRAFANA_URL variable is required"} 11 | 12 | HASTIC_ALERT_TYPE: ${HASTIC_ALERT_TYPE} 13 | HASTIC_ALERTMANAGER_URL: ${HASTIC_ALERTMANAGER_URL} 14 | HASTIC_WEBHOOK_URL: ${HASTIC_WEBHOOK_URL} 15 | HASTIC_INSTANCE_NAME: ${HASTIC_INSTANCE_NAME} 16 | HASTIC_ALERT_IMAGE: ${HASTIC_ALERT_IMAGE} 17 | HASTIC_DB_CONNECTION_STRING: ${HASTIC_DB_CONNECTION_STRING} 18 | HASTIC_DB_CONNECTION_TYPE: ${HASTIC_DB_CONNECTION_TYPE} 19 | HASTIC_TIMEZONE_OFFSET: ${HASTIC_TIMEZONE_OFFSET} 20 | 21 | ports: 22 | - ${HASTIC_PORT:-8000}:8000 23 | volumes: 24 | - data-volume:/var/www/data 25 | networks: 26 | - hastic-network 27 | restart: always 28 | 29 | analytics: 30 | image: hastic/analytics:latest 31 | build: analytics 32 | environment: 33 | # TODO: use any port for server connection 34 | HASTIC_SERVER_URL: "ws://server:8002" 35 | HS_AN_LOGGING_LEVEL: ${HS_AN_LOGGING_LEVEL} 36 | networks: 37 | - hastic-network 38 | restart: always 39 | 40 | volumes: 41 | data-volume: 42 | 43 | networks: 44 | hastic-network: 45 | -------------------------------------------------------------------------------- /server/spec/utils/url.jest.ts: -------------------------------------------------------------------------------- 1 | import { normalizeUrl } from '../../src/utils/url'; 2 | 3 | describe('Normalize URL', function() { 4 | const cases = [ 5 | { value: '127.0.0.1:8000', expected: 'http://127.0.0.1:8000' }, 6 | { value: '127.0.0.1:8000/', expected: 'http://127.0.0.1:8000' }, 7 | { value: 'localhost:8000', expected: 'http://localhost:8000' }, 8 | { value: 'localhost:8000/', expected: 'http://localhost:8000' }, 9 | { value: 'http://localhost:3000', expected: 'http://localhost:3000' }, 10 | { value: 'http://localhost:3000/', expected: 'http://localhost:3000' }, 11 | { value: 'https://localhost:8000', expected: 'https://localhost:8000' }, 12 | { value: 'https://localhost:8000/', expected: 'https://localhost:8000' }, 13 | { value: 'http://example.com', expected: 'http://example.com' }, 14 | { value: 'http://example.com/', expected: 'http://example.com' }, 15 | { value: 'https://example.com', expected: 'https://example.com' }, 16 | { value: 'https://example.com/', expected: 'https://example.com' }, 17 | { value: 'https://example.com/grafana', expected: 'https://example.com/grafana' }, 18 | { value: 'https://example.com/grafana/', expected: 'https://example.com/grafana' }, 19 | ]; 20 | 21 | it('should normalize URLs correctly', function() { 22 | cases.forEach(testCase => { 23 | expect(normalizeUrl(testCase.value)).toBe(testCase.expected); 24 | }); 25 | }); 26 | }); 27 | -------------------------------------------------------------------------------- /server/src/routes/detections_router.ts: -------------------------------------------------------------------------------- 1 | import * as AnalyticsController from '../controllers/analytics_controller'; 2 | import { AnalyticUnitId } from '../models/analytic_units'; 3 | import { DetectionSpan } from '../models/detection_model'; 4 | 5 | import * as Router from 'koa-router'; 6 | 7 | 8 | declare type DetectionSpansResponse = { 9 | spans: DetectionSpan[] 10 | } 11 | 12 | export async function getDetectionSpans(ctx: Router.IRouterContext) { 13 | const id = ctx.request.query.id as string; 14 | if(id === undefined || id === '') { 15 | throw new Error('analyticUnitId (id) is missing'); 16 | } 17 | 18 | let from: number = +ctx.request.query.from; 19 | if(isNaN(from) || ctx.request.query.from === '') { 20 | throw new Error(`from is missing or corrupted (got ${ctx.request.query.from})`); 21 | } 22 | let to: number = +ctx.request.query.to; 23 | if(isNaN(to) || ctx.request.query.to === '') { 24 | throw new Error(`to is missing or corrupted (got ${ctx.request.query.to})`); 25 | } 26 | 27 | if(from >= to) { 28 | throw new Error(`'from' timestamp ${from} must be less than 'to' timestamp ${to}`); 29 | } 30 | 31 | let response: DetectionSpansResponse = { spans: [] }; 32 | // TODO: invalidate 33 | response.spans = await AnalyticsController.getDetectionSpans(id, from, to); 34 | ctx.response.body = response; 35 | } 36 | 37 | export const router = new Router(); 38 | 39 | router.get('/spans', getDetectionSpans); 40 | -------------------------------------------------------------------------------- /server/src/services/json_service.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | 3 | 4 | async function getJsonData(filename: string): Promise { 5 | var data = await new Promise((resolve, reject) => { 6 | fs.readFile(filename, 'utf8', (err, data) => { 7 | if(err) { 8 | console.error(err); 9 | reject('Can`t read file'); 10 | } else { 11 | resolve(data); 12 | } 13 | }); 14 | }); 15 | 16 | try { 17 | return JSON.parse(data); 18 | } catch(e) { 19 | console.error(e); 20 | throw new Error('Wrong file format'); 21 | } 22 | } 23 | 24 | function writeJsonData(filename: string, data: Object) { 25 | return new Promise((resolve, reject) => { 26 | fs.writeFile(filename, JSON.stringify(data), 'utf8', (err) => { 27 | if(err) { 28 | console.error(err); 29 | reject('Cat`t write file'); 30 | } else { 31 | resolve(); 32 | } 33 | }); 34 | }) 35 | } 36 | 37 | function getJsonDataSync(filename: string) { 38 | let data = fs.readFileSync(filename, 'utf8'); 39 | try { 40 | return JSON.parse(data); 41 | } catch(e) { 42 | console.error(e); 43 | throw new Error('Wrong file format'); 44 | } 45 | } 46 | 47 | function writeJsonDataSync(filename: string, data: Object) { 48 | fs.writeFileSync(filename, JSON.stringify(data)); 49 | } 50 | 51 | export { 52 | getJsonData, 53 | writeJsonData, 54 | getJsonDataSync, 55 | writeJsonDataSync 56 | } 57 | -------------------------------------------------------------------------------- /server/.env.example: -------------------------------------------------------------------------------- 1 | # (required) Grafana URL which can be queried from hastic-server host (e.g. http://localhost:3000), 2 | GRAFANA_URL=http://localhost:3000 3 | 4 | # (required) API-key of your Grafana instance 5 | # (e.g. eyJrIjoiVjZqMHY0dHk4UEE3eEN4MzgzRnd2aURlMWlIdXdHNW4iLCJuIjoiaGFzdGljIiwiaWQiOjF9), 6 | # see https://grafana.com/docs/grafana/latest/http_api/auth/#create-api-token 7 | HASTIC_API_KEY= 8 | 9 | # (optional) websockets URL to connect to hastic-server from analytics 10 | HASTIC_SERVER_URL=ws://localhost:8002 11 | 12 | # (optional) port you want to run server on, default: 8000 13 | HASTIC_PORT=8000 14 | 15 | # (optional) use it if you want to get webhooks on detections (e.g. http://localhost:8080) 16 | HASTIC_WEBHOOK_URL=http://localhost:8080 17 | 18 | # (optional) type of alerts (detections) receiver (e.g. webhook or alertmanager) 19 | HASTIC_ALERT_TYPE= 20 | 21 | # (optional) URL to send alerts if ALERT_TYPE is alertmanager (e.g http://localhost:9093) 22 | HASTIC_ALERTMANAGER_URL= 23 | 24 | # (optional) Hastic instance name which is used in alerts 25 | HASTIC_INSTANCE_NAME= 26 | 27 | # (optional) whether to send a chart in the notification 28 | HASTIC_ALERT_IMAGE= 29 | 30 | # (optional) connection-string for MongoDB (not used with nedb), (e.g. hastic:password@mongodb.example.com:27017/hastic) 31 | HASTIC_DB_CONNECTION_STRING= 32 | 33 | # (optional) database type. Can have the following values: nedb, mongodb 34 | HASTIC_DB_CONNECTION_TYPE= 35 | 36 | # (optional) timezone offset in hours from utc (e.g -3:30) 37 | HASTIC_TIMEZONE_OFFSET= 38 | -------------------------------------------------------------------------------- /server/src/routes/segments_router.ts: -------------------------------------------------------------------------------- 1 | import * as AnalyticsController from '../controllers/analytics_controller'; 2 | 3 | import { AnalyticUnitId } from '../models/analytic_units'; 4 | import * as Segment from '../models/segment_model'; 5 | 6 | import * as Router from 'koa-router'; 7 | 8 | 9 | export async function getSegments(ctx: Router.IRouterContext) { 10 | const id = ctx.request.query.id as string; 11 | if(id === undefined || id === '') { 12 | throw new Error('analyticUnitId (id) is missing'); 13 | } 14 | let from = +ctx.request.query.from; 15 | if(isNaN(from)) { 16 | from = undefined; 17 | } 18 | let to = +ctx.request.query.to; 19 | if(isNaN(to)) { 20 | to = undefined; 21 | } 22 | 23 | const segments = await Segment.findIntersectedSegments(id, from, to); 24 | ctx.response.body = { segments }; 25 | } 26 | 27 | async function updateSegments(ctx: Router.IRouterContext) { 28 | const { 29 | addedSegments, id, removedSegments: removedIds 30 | } = ctx.request.body as { 31 | addedSegments: any[], id: AnalyticUnitId, removedSegments: Segment.SegmentId[] 32 | }; 33 | 34 | const segmentsToInsert: Segment.Segment[] = addedSegments.map( 35 | s => Segment.Segment.fromObject({ analyticUnitId: id, ...s }) 36 | ); 37 | 38 | const { addedIds } = await AnalyticsController.updateSegments( 39 | id, segmentsToInsert, removedIds 40 | ); 41 | 42 | ctx.response.body = { addedIds }; 43 | } 44 | 45 | export const router = new Router(); 46 | 47 | router.get('/', getSegments); 48 | router.patch('/', updateSegments); 49 | -------------------------------------------------------------------------------- /server/src/services/process_service.ts: -------------------------------------------------------------------------------- 1 | import * as config from '../config'; 2 | 3 | var exitHandlers: (() => void)[] = []; 4 | var exitHandled = false; 5 | 6 | /** 7 | * Add a callback for closing programm bacause of any reason 8 | * 9 | * @param callback a sync function 10 | */ 11 | export function registerExitHandler(callback: () => void) { 12 | exitHandlers.push(callback); 13 | } 14 | 15 | function exitHandler(options: any, err?: any) { 16 | if(exitHandled) { 17 | return; 18 | } 19 | exitHandled = true; 20 | for(let i = 0; i < exitHandlers.length; i++) { 21 | try { 22 | exitHandlers[i](); 23 | } catch(e) { 24 | console.error('Got error during exit: ' + e); 25 | if(!config.PRODUCTION_MODE && e instanceof Error) { 26 | console.error(e.stack); 27 | } 28 | } 29 | } 30 | console.log('process exited successfully'); 31 | process.exit(); 32 | } 33 | 34 | function catchException(options: any, err: any) { 35 | console.log('Server exception:'); 36 | console.log(err); 37 | exitHandler({ exit: true }); 38 | } 39 | 40 | //do something when app is closing 41 | process.on('exit', exitHandler.bind(null, { cleanup:true })); 42 | 43 | //catches ctrl+c event 44 | process.on('SIGINT', exitHandler.bind(null, { exit:true })); 45 | 46 | // catches "kill pid" (for example: nodemon restart) 47 | process.on('SIGUSR1', exitHandler.bind(null, { exit:true })); 48 | process.on('SIGUSR2', exitHandler.bind(null, { exit:true })); 49 | 50 | //catches uncaught exceptions 51 | process.on('uncaughtException', catchException.bind(null, { exit:true })); -------------------------------------------------------------------------------- /server/src/models/analytics_task_model.ts: -------------------------------------------------------------------------------- 1 | import { AnalyticUnitId } from './analytic_units'; 2 | 3 | import { uid } from "../utils/uid"; 4 | 5 | 6 | const UID_LENGTH = 16; 7 | 8 | export type AnalyticsTaskId = string; 9 | export enum AnalyticsTaskType { 10 | LEARN = 'LEARN', 11 | DETECT = 'DETECT', 12 | PUSH_DETECT = 'PUSH_DETECT', 13 | CANCEL = 'CANCEL', 14 | PUSH = 'PUSH', 15 | PROCESS = 'PROCESS' 16 | }; 17 | 18 | export class AnalyticsTask { 19 | 20 | constructor( 21 | public analyticUnitId: AnalyticUnitId, 22 | public type: AnalyticsTaskType, 23 | public payload?: any, 24 | private _id?: AnalyticsTaskId 25 | ) { 26 | if(analyticUnitId === undefined) { 27 | throw new Error('analyticUnitId is undefined'); 28 | } 29 | if(type === undefined || type === null) { 30 | throw new Error('type is undefined or null'); 31 | } 32 | 33 | } 34 | 35 | public get id(): AnalyticsTaskId { 36 | if(this._id === undefined) { 37 | this._id = uid(UID_LENGTH); 38 | } 39 | return this._id; 40 | } 41 | 42 | public toObject() { 43 | return { 44 | _id: this.id, 45 | analyticUnitId: this.analyticUnitId, 46 | type: this.type, 47 | payload: this.payload 48 | }; 49 | } 50 | 51 | static fromObject(obj: any): AnalyticsTask { 52 | if(obj === undefined) { 53 | throw new Error('obj is undefined'); 54 | } 55 | return new AnalyticsTask( 56 | obj.analyticUnitId, 57 | obj.type as AnalyticsTaskType, 58 | obj.payload, 59 | obj._id, 60 | ); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /server/src/models/analytic_units/types.ts: -------------------------------------------------------------------------------- 1 | import { Metric } from '@corpglory/tsdb-kit'; 2 | 3 | 4 | export type AnalyticUnitId = string; 5 | export enum AnalyticUnitStatus { 6 | READY = 'READY', 7 | PENDING = 'PENDING', 8 | LEARNING = 'LEARNING', 9 | DETECTION = 'DETECTION', 10 | SUCCESS = 'SUCCESS', 11 | FAILED = 'FAILED' 12 | }; 13 | 14 | export type FindManyQuery = { 15 | name?: string, 16 | grafanaUrl?: string, 17 | panelId?: string, 18 | type?: string, 19 | metric?: Metric, 20 | alert?: boolean, 21 | id?: AnalyticUnitId, 22 | lastDetectionTime?: number, 23 | status?: AnalyticUnitStatus, 24 | error?: string, 25 | labeledColor?: string, 26 | deletedColor?: string, 27 | detectorType?: DetectorType, 28 | visible?: boolean, 29 | collapsed?: boolean 30 | }; 31 | 32 | export const ANALYTIC_UNIT_TYPES = { 33 | pattern: [ 34 | { 35 | name: 'General', 36 | value: 'GENERAL' 37 | }, 38 | { 39 | name: 'Peak', 40 | value: 'PEAK' 41 | }, 42 | { 43 | name: 'Trough', 44 | value: 'TROUGH' 45 | }, 46 | { 47 | name: 'Jump', 48 | value: 'JUMP' 49 | }, 50 | { 51 | name: 'Drop', 52 | value: 'DROP' 53 | } 54 | ], 55 | anomaly: [ 56 | { 57 | name: 'Confidence Snake', 58 | value: 'ANOMALY' 59 | } 60 | ], 61 | threshold: [ 62 | { 63 | name: 'Threshold', 64 | value: 'THRESHOLD' 65 | } 66 | ] 67 | }; 68 | 69 | export enum DetectorType { 70 | PATTERN = 'pattern', 71 | ANOMALY = 'anomaly', 72 | THRESHOLD = 'threshold' 73 | }; 74 | -------------------------------------------------------------------------------- /server/src/routes/data_router.ts: -------------------------------------------------------------------------------- 1 | import * as AnalyticUnit from '../models/analytic_units'; 2 | import * as AnalyticsController from '../controllers/analytics_controller'; 3 | 4 | import * as Router from 'koa-router'; 5 | 6 | 7 | async function query(ctx: Router.IRouterContext) { 8 | 9 | let queryFrom = ctx.request.query.from as string; 10 | let queryTo = ctx.request.query.to as string; 11 | const analyticUnitId = ctx.request.query.analyticUnitId as string; 12 | 13 | if(analyticUnitId === undefined) { 14 | throw new Error(`data router error: request must contain analyticUnitId`); 15 | } 16 | 17 | if(queryFrom === undefined) { 18 | throw new Error(`data router error: request must contain 'from'`) 19 | } 20 | 21 | if(queryTo === undefined) { 22 | throw new Error(`data router error: request must contain 'to'`) 23 | } 24 | 25 | const from = +queryFrom; 26 | const to = +queryTo; 27 | 28 | if(from === NaN) { 29 | throw new Error(`from must be not NaN`); 30 | } 31 | 32 | if(to === NaN) { 33 | throw new Error(`to must be not NaN`); 34 | } 35 | 36 | if(to <= from) { 37 | throw new Error(`data router error: 'to' must be greater than 'from' (from:${from} to:${to})`); 38 | } 39 | 40 | const analyticUnit = await AnalyticUnit.findById(analyticUnitId); 41 | 42 | if(analyticUnit === null) { 43 | throw new Error(`can't find analytic unit ${analyticUnitId}`); 44 | } 45 | 46 | const results = await AnalyticsController.getHSR(analyticUnit, from, to); 47 | ctx.response.body = { results }; 48 | } 49 | 50 | export const router = new Router(); 51 | 52 | router.get('/', query); 53 | -------------------------------------------------------------------------------- /server/build/webpack.base.conf.js: -------------------------------------------------------------------------------- 1 | const webpack = require('webpack'); 2 | const GitRevisionPlugin = require('git-revision-webpack-plugin'); 3 | const gitRevisionPlugin = new GitRevisionPlugin(); 4 | 5 | const path = require('path'); 6 | 7 | 8 | function resolve(p) { 9 | return path.resolve(__dirname, './../', p); 10 | } 11 | 12 | module.exports = { 13 | target: 'node', 14 | node: { 15 | __dirname: false, 16 | __filename: false, 17 | }, 18 | entry: [ 'babel-polyfill', './src/index.ts' ], 19 | output: { 20 | path: resolve('dist') 21 | }, 22 | optimization: { 23 | minimize: false 24 | }, 25 | plugins: [ 26 | new webpack.DefinePlugin({ 27 | 'GIT_VERSION': JSON.stringify(gitRevisionPlugin.version()), 28 | 'GIT_COMMITHASH': JSON.stringify(gitRevisionPlugin.commithash()), 29 | 'GIT_BRANCH': JSON.stringify(gitRevisionPlugin.branch()), 30 | }) 31 | ], 32 | resolve: { 33 | // default `modules` value is `["node_modules"]` 34 | // we change it to resolve `underscore-lodash-wrapper` 35 | modules: [__dirname, 'node_modules'], 36 | extensions: [".ts", ".js"], 37 | alias: { 38 | 'any-promise': 'es6-promise', 39 | 'underscore': 'underscore-lodash-wrapper' 40 | } 41 | }, 42 | stats: { 43 | warningsFilter: [ 44 | 'mongodb-client-encryption', 45 | /ws\/lib/, 46 | /mongodb\/lib\/core/, 47 | /require_optional/ 48 | ] 49 | }, 50 | module: { 51 | rules: [ 52 | { 53 | test: /\.ts$/, 54 | use: [ 55 | { loader: 'babel-loader' }, 56 | { loader: 'ts-loader' } 57 | ] 58 | } 59 | ] 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /server/src/models/analytic_units/pattern_analytic_unit_model.ts: -------------------------------------------------------------------------------- 1 | import { AnalyticUnit } from './analytic_unit_model'; 2 | import { AnalyticUnitId, AnalyticUnitStatus, DetectorType } from './types'; 3 | 4 | import { Metric } from '@corpglory/tsdb-kit'; 5 | 6 | 7 | export class PatternAnalyticUnit extends AnalyticUnit { 8 | constructor( 9 | name: string, 10 | grafanaUrl: string, 11 | panelId: string, 12 | type: string, 13 | metric?: Metric, 14 | alert?: boolean, 15 | id?: AnalyticUnitId, 16 | lastDetectionTime?: number, 17 | status?: AnalyticUnitStatus, 18 | error?: string, 19 | labeledColor?: string, 20 | deletedColor?: string, 21 | visible?: boolean, 22 | collapsed?: boolean 23 | ) { 24 | super( 25 | name, 26 | grafanaUrl, 27 | panelId, 28 | type, 29 | metric, 30 | alert, 31 | id, 32 | lastDetectionTime, 33 | status, 34 | error, 35 | labeledColor, 36 | deletedColor, 37 | DetectorType.PATTERN, 38 | visible, 39 | collapsed 40 | ); 41 | } 42 | 43 | toObject() { 44 | const baseObject = super.toObject(); 45 | return { 46 | ...baseObject 47 | }; 48 | } 49 | 50 | toPanelObject() { 51 | const baseObject = super.toPanelObject(); 52 | return { 53 | ...baseObject 54 | }; 55 | } 56 | 57 | static fromObject(obj: any) { 58 | // TODO: remove duplication 59 | let metric: Metric | undefined = undefined; 60 | if(obj.metric !== undefined) { 61 | metric = Metric.fromObject(obj.metric); 62 | } 63 | 64 | return new PatternAnalyticUnit( 65 | obj.name, 66 | obj.grafanaUrl, 67 | obj.panelId, 68 | obj.type, 69 | metric, 70 | obj.alert, 71 | obj._id, 72 | obj.lastDetectionTime, 73 | obj.status, 74 | obj.error, 75 | obj.labeledColor, 76 | obj.deletedColor, 77 | obj.visible, 78 | obj.collapsed 79 | ); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /docker-compose-mongo.yml: -------------------------------------------------------------------------------- 1 | version: '2.1' 2 | services: 3 | server: 4 | image: hastic/server:latest 5 | build: 6 | dockerfile: server/Dockerfile 7 | context: . 8 | environment: 9 | HASTIC_API_KEY: ${HASTIC_API_KEY?"HASTIC_API_KEY variable is required"} 10 | GRAFANA_URL: ${GRAFANA_URL?"GRAFANA_URL variable is required"} 11 | 12 | HASTIC_DB_CONNECTION_STRING: ${HASTIC_MONGO_USER:-hastic}:${HASTIC_MONGO_PASSWORD:-password}@mongo:27017/${HASTIC_MONGO_DB:-hastic} 13 | 14 | HASTIC_ALERT_TYPE: ${HASTIC_ALERT_TYPE} 15 | HASTIC_ALERTMANAGER_URL: ${HASTIC_ALERTMANAGER_URL} 16 | HASTIC_WEBHOOK_URL: ${HASTIC_WEBHOOK_URL} 17 | HASTIC_INSTANCE_NAME: ${HASTIC_INSTANCE_NAME} 18 | HASTIC_ALERT_IMAGE: ${HASTIC_ALERT_IMAGE} 19 | HASTIC_TIMEZONE_OFFSET: ${HASTIC_TIMEZONE_OFFSET} 20 | 21 | HASTIC_DB_CONNECTION_TYPE: mongodb 22 | 23 | ports: 24 | - ${HASTIC_PORT:-8000}:8000 25 | networks: 26 | - hastic-network 27 | restart: always 28 | depends_on: 29 | - mongo 30 | 31 | analytics: 32 | image: hastic/analytics:latest 33 | build: analytics 34 | environment: 35 | # TODO: use any port for server connection 36 | HASTIC_SERVER_URL: "ws://server:8002" 37 | HS_AN_LOGGING_LEVEL: ${HS_AN_LOGGING_LEVEL} 38 | networks: 39 | - hastic-network 40 | restart: always 41 | 42 | mongo: 43 | image: mongo:4.2.1-bionic 44 | environment: 45 | MONGO_INITDB_ROOT_USERNAME: ${HASTIC_MONGO_ADMIN_USER:-admin} 46 | MONGO_INITDB_ROOT_PASSWORD: ${HASTIC_MONGO_ADMIN_PASSWORD:-password} 47 | MONGO_INITDB_DATABASE: ${HASTIC_MONGO_DB:-hastic} 48 | networks: 49 | - hastic-network 50 | restart: always 51 | volumes: 52 | - hastic_mongo:/data/db 53 | - hastic_mongo_config:/data/configdb 54 | - ./mongo-init.js:/docker-entrypoint-initdb.d/mongo-init.js:ro 55 | 56 | volumes: 57 | hastic_mongo: 58 | hastic_mongo_config: 59 | 60 | networks: 61 | hastic-network: 62 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ### This repo is obsolete, see new [rust version of hastic](https://code.corpglory.net/hastic/hastic) 2 | 3 |

4 | 5 | 6 | 7 | 8 |

9 | 10 | 11 | [Hastic server](https://hastic.io) 12 | ================ 13 | [Website](https://hastic.io) | 14 | [Twitter](https://twitter.com/hasticio) | 15 | `#hastic:matrix.org` 16 | 17 | [![Build Status](https://travis-ci.org/hastic/hastic-server.svg?branch=master)](https://travis-ci.org/hastic/hastic-server) 18 | 19 | 20 | Implementation of basic pattern recognition for anomaly detection. 21 | 22 | Implementation of analytics unit for Hastic. 23 | 24 | See also: 25 | * [Getting started](https://github.com/hastic/hastic-server/wiki#getting-started) 26 | * [FAQ](https://github.com/hastic/hastic-server/wiki/FAQ) 27 | * [Hastic Grafana App](https://github.com/hastic/hastic-grafana-app) —- [Grafana](https://grafana.com/) application for visualization of configurations. 28 | * [Webhooks](https://github.com/hastic/hastic-server/wiki/Webhooks) -- notifications about events. 29 | * [Installation from source](https://github.com/hastic/hastic-server/wiki/Installation-from-source) 30 | * [Changelog](https://github.com/hastic/hastic-server/wiki/Changelog) 31 | * [Roadmap](https://github.com/hastic/hastic-server/wiki/Roadmap) 32 | 33 | ## Download & Install 34 | 35 | Please visit https://hastic.io/downloads 36 | 37 | ## Support and Consulting 38 | 39 | Commercial support, professional services **or any help** — send us your inquiry at ping@hastic.io 40 | 41 | ## About CorpGlory Inc. 42 | Hastic is a project developed by [CorpGlory Inc.](https://corpglory.com/), a company that provides high-quality software development, data visualization, Grafana and monitoring consulting. 43 | -------------------------------------------------------------------------------- /server/spec/models/segment_model.jest.ts: -------------------------------------------------------------------------------- 1 | import { TEST_ANALYTIC_UNIT_ID } from '../utils_for_tests/analytic_units'; 2 | import { buildSegments, clearSegmentsDB, convertSegmentsToTimeRanges } from '../utils_for_tests/segments'; 3 | 4 | import * as Segment from '../../src/models/segment_model'; 5 | 6 | afterEach(async () => { 7 | await clearSegmentsDB(); 8 | }); 9 | 10 | describe('mergeAndInsertSegments', function() { 11 | const initialSegments = buildSegments([[0, 1], [2, 3], [4, 5]]); 12 | 13 | beforeEach(async () => { 14 | await Segment.mergeAndInsertSegments(initialSegments); 15 | }); 16 | 17 | it('Segments should be merged before insertion', async function() { 18 | const segmentsToInsert = buildSegments([[1, 2]]); 19 | await Segment.mergeAndInsertSegments(segmentsToInsert); 20 | 21 | const actualSegments = await Segment.findMany(TEST_ANALYTIC_UNIT_ID, {}); 22 | const actualRanges = convertSegmentsToTimeRanges(actualSegments); 23 | 24 | expect(actualRanges).toEqual([[0, 3], [4, 5]]); 25 | }); 26 | }); 27 | 28 | describe('findIntersectedSegments', () => { 29 | const initialSegments = buildSegments([[0, 3], [5, 6], [10, 13]]); 30 | 31 | beforeEach(async () => { 32 | await Segment.mergeAndInsertSegments(initialSegments); 33 | }); 34 | 35 | it('should find intersected segments', async () => { 36 | const testCases = [ 37 | { from: 1, to: 4, expected: [[0, 3]] }, 38 | { from: 11, to: 12, expected: [[10, 13]] }, 39 | { from: 6, to: 10, expected: [[5, 6], [10, 13]] }, 40 | { from: 16, to: 17, expected: [] }, 41 | { from: 5, expected: [[5, 6], [10, 13]] }, 42 | { to: 5, expected: [[0, 3], [5, 6]] }, 43 | { expected: [[0, 3], [5, 6], [10, 13]] } 44 | ]; 45 | 46 | for(let testCase of testCases) { 47 | const foundSegments = await Segment.findIntersectedSegments( 48 | TEST_ANALYTIC_UNIT_ID, testCase.from, testCase.to 49 | ); 50 | const foundRanges = convertSegmentsToTimeRanges(foundSegments); 51 | expect(foundRanges).toEqual(testCase.expected); 52 | } 53 | }); 54 | }); 55 | -------------------------------------------------------------------------------- /server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hastic-server", 3 | "version": "0.5.1", 4 | "description": "REST server for managing data for analytics", 5 | "scripts": { 6 | "start": "node dist/server.js", 7 | "dev": "NODE_ENV=development node build/dev-server.js", 8 | "build": "webpack --config build/webpack.prod.conf.js", 9 | "test": "jest --config jest.config.js" 10 | }, 11 | "repository": { 12 | "type": "git", 13 | "url": "git+https://github.com/hastic/hastic-server.git" 14 | }, 15 | "author": "CorpGlory Inc.", 16 | "license": "GPL-3.0-or-later", 17 | "bugs": { 18 | "url": "https://github.com/hastic/hastic-server/issues" 19 | }, 20 | "homepage": "https://github.com/hastic/hastic-server#readme", 21 | "dependencies": {}, 22 | "devDependencies": { 23 | "@corpglory/tsdb-kit": "^1.1.1", 24 | "@slack/web-api": "^6.0.0", 25 | "@types/jest": "^23.3.14", 26 | "@types/koa": "^2.0.46", 27 | "@types/koa-bodyparser": "^4.2.0", 28 | "@types/koa-router": "^7.0.31", 29 | "@types/lodash": "^4.14.116", 30 | "@types/mongodb": "^3.3.1", 31 | "@types/nedb": "^1.8.0", 32 | "@types/ws": "^6.0.4", 33 | "axios": "^0.18.0", 34 | "babel-core": "^6.26.3", 35 | "babel-jest": "^23.4.2", 36 | "babel-loader": "^7.1.4", 37 | "babel-plugin-transform-object-rest-spread": "^6.26.0", 38 | "babel-polyfill": "^6.26.0", 39 | "babel-preset-env": "^1.7.0", 40 | "babel-preset-es2015": "^6.24.1", 41 | "dotenv": "^8.2.0", 42 | "es6-promise": "^4.2.4", 43 | "event-stream": "3.3.4", 44 | "file-loader": "^1.1.11", 45 | "git-revision-webpack-plugin": "^3.0.4", 46 | "jest": "^23.1.1", 47 | "koa": "^2.0.46", 48 | "koa-bodyparser": "^4.2.0", 49 | "koa-router": "^7.0.31", 50 | "lodash": "^4.17.10", 51 | "mongodb": "3.3.2", 52 | "nedb": "^1.8.0", 53 | "node-loader": "^0.6.0", 54 | "nodemon": "^1.17.5", 55 | "ts-jest": "^23.1.1", 56 | "ts-loader": "^4.4.1", 57 | "typescript": "^4.3.2", 58 | "url": "^0.11.0", 59 | "webpack": "^4.12.0", 60 | "webpack-cli": "^3.0.8", 61 | "ws": "^7.2.1" 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /server/src/services/data_service/db_connector/nedb_connector.ts: -------------------------------------------------------------------------------- 1 | import { Collection } from '../collection'; 2 | import { DbConnector } from './index'; 3 | import { dbCollection } from '../../data_layer'; 4 | import * as config from '../../../config'; 5 | 6 | import * as nedb from 'nedb'; 7 | import * as fs from 'fs'; 8 | 9 | 10 | type NedbCollectionConfig = { 11 | filename: string, 12 | timestampData?: boolean 13 | }; 14 | 15 | function maybeCreateDir(path: string): void { 16 | if (fs.existsSync(path)) { 17 | return; 18 | } 19 | console.log('data service: mkdir: ' + path); 20 | fs.mkdirSync(path); 21 | } 22 | 23 | function checkDataFolders(): void { 24 | [ 25 | config.DATA_PATH 26 | ].forEach(maybeCreateDir); 27 | } 28 | 29 | export class NedbConnector implements DbConnector { 30 | private static _instance: NedbConnector; 31 | private static COLLECTION_TO_CONFIG_MAPPING = new Map([ 32 | [Collection.ANALYTIC_UNITS, { filename: config.ANALYTIC_UNITS_DATABASE_PATH, timestampData: true }], 33 | [Collection.ANALYTIC_UNIT_CACHES, { filename: config.ANALYTIC_UNIT_CACHES_DATABASE_PATH }], 34 | [Collection.SEGMENTS, { filename: config.SEGMENTS_DATABASE_PATH }], 35 | [Collection.THRESHOLD, { filename: config.THRESHOLD_DATABASE_PATH }], 36 | [Collection.DETECTION_SPANS, { filename: config.DETECTION_SPANS_DATABASE_PATH }], 37 | [Collection.DB_META, { filename: config.DB_META_PATH }], 38 | ]); 39 | 40 | private _db = new Map(); 41 | 42 | private constructor() { 43 | if(NedbConnector._instance !== undefined) { 44 | throw new Error(`Can't create 2nd instance of singleton MongodbConnector class`); 45 | } 46 | } 47 | 48 | async init(): Promise { 49 | checkDataFolders(); 50 | 51 | const inMemoryOnly = config.HASTIC_DB_IN_MEMORY; 52 | // TODO: it can throw an error, so we should catch it 53 | NedbConnector.COLLECTION_TO_CONFIG_MAPPING.forEach( 54 | (config: NedbCollectionConfig, collection: Collection) => { 55 | this._db.set(collection, new nedb({ ...config, autoload: true, inMemoryOnly })); 56 | } 57 | ); 58 | } 59 | 60 | get db(): Map { 61 | return this._db; 62 | } 63 | 64 | static get instance(): NedbConnector { 65 | if (this._instance === undefined) { 66 | this._instance = new this(); 67 | } 68 | return this._instance; 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /tools/prometheus-hastic-exporter/prometheus-hastic-exporter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from prometheus_client import start_http_server, Metric, REGISTRY 3 | import json 4 | import requests 5 | import sys 6 | import time 7 | import dateutil.parser as dt 8 | 9 | class JsonCollector(object): 10 | 11 | def __init__(self, endpoint): 12 | self._endpoint = endpoint 13 | 14 | def collect(self): 15 | 16 | response = None 17 | try: 18 | resp = requests.get(self._endpoint).content.decode('UTF-8') 19 | response = json.loads(resp) 20 | except Exception as e: 21 | print('got exception, skip polling loop {}'.format(e)) 22 | return 23 | 24 | commitHash = response.get('git', {}).get('commitHash') 25 | packageVersion = response.get('packageVersion') 26 | 27 | metrics = { 28 | 'activeWebhooks': response.get('activeWebhooks'), 29 | 'ready': int(response.get('analytics', {}).get('ready', 0)), 30 | 'tasksQueueLength': response.get('analytics', {}).get('tasksQueueLength'), 31 | 'awaitedTasksNumber': response.get('awaitedTasksNumber'), 32 | 'detectionsCount': response.get('detectionsCount') 33 | } 34 | 35 | for name, value in metrics.items(): 36 | if value is not None: 37 | metric = Metric(name, name, 'gauge') 38 | metric.add_sample(name, value=value, labels={'commitHash': commitHash, 'packageVersion': packageVersion}) 39 | yield metric 40 | else: 41 | print('{} value is {}, skip metric'.format(name, value)) 42 | 43 | lastAlive = response.get('analytics', {}).get('lastAlive') 44 | if lastAlive: 45 | lastAlive = int(dt.parse(lastAlive).timestamp()) * 1000 #ms 46 | metric = Metric('lastAlive', 'lastAlive', 'gauge') 47 | metric.add_sample('lastAlive', value=lastAlive, labels={'commitHash': commitHash, 'packageVersion': packageVersion}) 48 | yield metric 49 | 50 | timestamp = response.get('timestamp') 51 | if timestamp: 52 | timestamp = int(dt.parse(timestamp).timestamp()) * 1000 #ms 53 | metric = Metric('timestamp', 'timestamp', 'gauge') 54 | metric.add_sample('timestamp', value=timestamp, labels={'commitHash': commitHash, 'packageVersion': packageVersion}) 55 | yield metric 56 | 57 | 58 | if __name__ == '__main__': 59 | hastic_url = sys.argv[1] 60 | exporter_port = int(sys.argv[2]) 61 | 62 | start_http_server(exporter_port) 63 | REGISTRY.register(JsonCollector(hastic_url)) 64 | 65 | while True: time.sleep(1) 66 | -------------------------------------------------------------------------------- /server/src/services/data_service/db_connector/mongodb_connector.ts: -------------------------------------------------------------------------------- 1 | import { Collection } from '../collection'; 2 | import { DbConnector } from './index'; 3 | import { dbCollection } from '../../data_layer'; 4 | import * as config from '../../../config'; 5 | 6 | import * as mongodb from 'mongodb'; 7 | 8 | 9 | export class MongodbConnector implements DbConnector { 10 | private static _instance: MongodbConnector; 11 | private static COLLECTION_TO_NAME_MAPPING = new Map([ 12 | [Collection.ANALYTIC_UNITS, 'analytic_units'], 13 | [Collection.ANALYTIC_UNIT_CACHES, 'analytic_unit_caches'], 14 | [Collection.SEGMENTS, 'segments'], 15 | [Collection.THRESHOLD, 'threshold'], 16 | [Collection.DETECTION_SPANS, 'detection_spans'], 17 | [Collection.DB_META, 'db_meta'] 18 | ]); 19 | 20 | private _db = new Map(); 21 | private _client: mongodb.MongoClient; 22 | 23 | private constructor() { 24 | if(MongodbConnector._instance !== undefined) { 25 | throw new Error(`Can't create 2nd instance of singleton MongodbConnector class`); 26 | } 27 | } 28 | 29 | async init(): Promise { 30 | const dbConfig = config.HASTIC_DB_CONFIG; 31 | const uri = `mongodb://${dbConfig.user}:${dbConfig.password}@${dbConfig.url}`; 32 | const auth = { 33 | user: dbConfig.user, 34 | password: dbConfig.password 35 | }; 36 | this._client = new mongodb.MongoClient(uri, { 37 | useNewUrlParser: true, 38 | auth, 39 | autoReconnect: true, 40 | useUnifiedTopology: true, 41 | // TODO: it should be configurable 42 | authMechanism: 'SCRAM-SHA-1', 43 | authSource: dbConfig.dbName 44 | }); 45 | 46 | try { 47 | const client: mongodb.MongoClient = await this._client.connect(); 48 | const hasticDb: mongodb.Db = client.db(dbConfig.dbName); 49 | MongodbConnector.COLLECTION_TO_NAME_MAPPING.forEach( 50 | (name: string, collection: Collection) => { 51 | this._db.set(collection, hasticDb.collection(name)); 52 | } 53 | ); 54 | } catch(err) { 55 | console.log(`got error while connecting to MongoDB: ${err}`); 56 | // TODO: throw a better error, e.g.: ServiceInitializationError 57 | throw err; 58 | } 59 | } 60 | 61 | get db(): Map { 62 | return this._db; 63 | } 64 | 65 | static get instance(): MongodbConnector { 66 | if(this._instance === undefined) { 67 | this._instance = new this(); 68 | } 69 | return this._instance; 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /server/src/models/analytic_units/threshold_analytic_unit_model.ts: -------------------------------------------------------------------------------- 1 | import { AnalyticUnit } from './analytic_unit_model'; 2 | import { AnalyticUnitId, AnalyticUnitStatus, DetectorType } from './types'; 3 | 4 | import { Metric } from '@corpglory/tsdb-kit'; 5 | 6 | 7 | export enum Condition { 8 | ABOVE = '>', 9 | ABOVE_OR_EQUAL = '>=', 10 | EQUAL = '=', 11 | LESS_OR_EQUAL = '<=', 12 | LESS = '<', 13 | NO_DATA = 'NO_DATA' 14 | }; 15 | 16 | export class ThresholdAnalyticUnit extends AnalyticUnit { 17 | 18 | public learningAfterUpdateRequired = true; 19 | 20 | constructor( 21 | name: string, 22 | grafanaUrl: string, 23 | panelId: string, 24 | type: string, 25 | public value: number, 26 | public condition: Condition, 27 | metric?: Metric, 28 | alert?: boolean, 29 | id?: AnalyticUnitId, 30 | lastDetectionTime?: number, 31 | status?: AnalyticUnitStatus, 32 | error?: string, 33 | labeledColor?: string, 34 | deletedColor?: string, 35 | visible?: boolean, 36 | collapsed?: boolean 37 | ) { 38 | super( 39 | name, 40 | grafanaUrl, 41 | panelId, 42 | type, 43 | metric, 44 | alert, 45 | id, 46 | lastDetectionTime, 47 | status, 48 | error, 49 | labeledColor, 50 | deletedColor, 51 | DetectorType.THRESHOLD, 52 | visible, 53 | collapsed 54 | ); 55 | } 56 | 57 | toObject() { 58 | const baseObject = super.toObject(); 59 | return { 60 | ...baseObject, 61 | ...this.analyticProps 62 | }; 63 | } 64 | 65 | toPanelObject() { 66 | const baseObject = super.toPanelObject(); 67 | return { 68 | ...baseObject, 69 | ...this.analyticProps 70 | }; 71 | } 72 | 73 | static fromObject(obj: any) { 74 | // TODO: remove duplication 75 | let metric: Metric | undefined = undefined; 76 | if (obj.metric !== undefined) { 77 | metric = Metric.fromObject(obj.metric); 78 | } 79 | 80 | return new ThresholdAnalyticUnit( 81 | obj.name, 82 | obj.grafanaUrl, 83 | obj.panelId, 84 | obj.type, 85 | obj.value, 86 | obj.condition, 87 | metric, 88 | obj.alert, 89 | obj._id, 90 | obj.lastDetectionTime, 91 | obj.status, 92 | obj.error, 93 | obj.labeledColor, 94 | obj.deletedColor, 95 | obj.visible, 96 | obj.collapsed 97 | ); 98 | } 99 | 100 | get analyticProps() { 101 | return { 102 | value: this.value, 103 | condition: this.condition 104 | } 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /server/spec/utils_for_tests/analytic_units.ts: -------------------------------------------------------------------------------- 1 | import * as AnalyticUnit from '../../src/models/analytic_units'; 2 | import * as AnalyticUnitCache from '../../src/models/analytic_unit_cache_model'; 3 | 4 | 5 | import { Metric } from '@corpglory/tsdb-kit'; 6 | 7 | import * as _ from 'lodash'; 8 | 9 | 10 | export const TEST_ANALYTIC_UNIT_ID: AnalyticUnit.AnalyticUnitId = 'testid'; 11 | 12 | const DEFAULT_DATASOURCE_STRUCTURE = { 13 | url: "api/datasources/proxy/5/query", 14 | data: null, 15 | params: { 16 | db:"dbname", 17 | q: "SELECT mean(\"value\") FROM \"autogen\".\"tcpconns_value\" WHERE time >= now() - 6h GROUP BY time(20s) fill(null)", 18 | epoch: "ms" 19 | }, 20 | type: "influxdb" 21 | }; 22 | 23 | const DEFAULT_TARGETS_STRUCTURE = [ 24 | { 25 | groupBy: [ 26 | { 27 | params: ["$__interval"], 28 | type: "time" 29 | }, 30 | { 31 | params: ["null"], 32 | type: "fill" 33 | } 34 | ], 35 | measurement: "tcpconns_value", 36 | orderByTime: "ASC", 37 | policy: "autogen", 38 | refId: "A", 39 | resultFormat: "time_series", 40 | select: [[{"params":["value"],"type":"field"},{"params":[],"type":"mean"}]],"tags":[] 41 | } 42 | ]; 43 | 44 | export const DEFAULT_METRIC = new Metric( 45 | DEFAULT_DATASOURCE_STRUCTURE, 46 | DEFAULT_TARGETS_STRUCTURE 47 | ); 48 | 49 | export async function createTestDB(createCache = true) { 50 | const analyticUnitObject = AnalyticUnitObject.getAnalyticUnitObject(); 51 | const unit = AnalyticUnit.createAnalyticUnitFromObject(analyticUnitObject); 52 | await AnalyticUnit.create(unit); 53 | 54 | if(createCache) { 55 | await AnalyticUnitCache.create(TEST_ANALYTIC_UNIT_ID); 56 | await AnalyticUnitCache.setData(TEST_ANALYTIC_UNIT_ID, { timeStep: 1 }); 57 | } 58 | return unit; 59 | } 60 | 61 | export async function clearTestDB() { 62 | await AnalyticUnit.remove(TEST_ANALYTIC_UNIT_ID); 63 | await AnalyticUnitCache.remove(TEST_ANALYTIC_UNIT_ID); 64 | } 65 | 66 | export class AnalyticUnitObject { 67 | 68 | constructor( 69 | public _id: AnalyticUnit.AnalyticUnitId = TEST_ANALYTIC_UNIT_ID, 70 | public name: string = 'name', 71 | public grafanaUrl: string = 'grafanaUrl', 72 | public panelId: string = 'panelId', 73 | public type: string = 'type', 74 | public metric: Metric = DEFAULT_METRIC, 75 | public alert: boolean = false, 76 | public labeledColor: string = '#FF99FF', 77 | public deletedColor: string = '#00f0ff', 78 | public detectorType: AnalyticUnit.DetectorType = AnalyticUnit.DetectorType.ANOMALY, 79 | public visible: boolean = true, 80 | public collapsed: boolean = false 81 | ){}; 82 | 83 | static getAnalyticUnitObject(): AnalyticUnitObject { 84 | return new AnalyticUnitObject(); 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /server/src/models/analytic_units/analytic_unit_model.ts: -------------------------------------------------------------------------------- 1 | import { 2 | AnalyticUnitId, AnalyticUnitStatus, DetectorType 3 | } from './types'; 4 | 5 | import { Metric } from '@corpglory/tsdb-kit'; 6 | 7 | import * as _ from 'lodash'; 8 | 9 | export abstract class AnalyticUnit { 10 | 11 | public learningAfterUpdateRequired = false; 12 | 13 | constructor( 14 | public name: string, 15 | public grafanaUrl: string, 16 | public panelId: string, 17 | // TODO: enum type 18 | // TODO: type -> subType 19 | public type: string, 20 | public metric?: Metric, 21 | public alert?: boolean, 22 | public id?: AnalyticUnitId, 23 | public lastDetectionTime?: number, 24 | public status?: AnalyticUnitStatus, 25 | public error?: string, 26 | public labeledColor?: string, 27 | public deletedColor?: string, 28 | // TODO: detectorType -> type 29 | public detectorType?: DetectorType, 30 | public visible?: boolean, 31 | public collapsed?: boolean 32 | ) { 33 | 34 | if(name === undefined) { 35 | throw new Error(`Missing field "name"`); 36 | } 37 | if(grafanaUrl === undefined) { 38 | throw new Error(`Missing field "grafanaUrl"`); 39 | } 40 | if(type === undefined) { 41 | throw new Error(`Missing field "type"`); 42 | } 43 | } 44 | 45 | public toObject(): any { 46 | let metric; 47 | if(this.metric !== undefined) { 48 | metric = this.metric.toObject(); 49 | } 50 | 51 | return { 52 | _id: this.id, 53 | name: this.name, 54 | grafanaUrl: this.grafanaUrl, 55 | panelId: this.panelId, 56 | type: this.type, 57 | metric, 58 | alert: this.alert, 59 | lastDetectionTime: this.lastDetectionTime, 60 | status: this.status, 61 | error: this.error, 62 | labeledColor: this.labeledColor, 63 | deletedColor: this.deletedColor, 64 | detectorType: this.detectorType, 65 | visible: this.visible, 66 | collapsed: this.collapsed 67 | }; 68 | } 69 | 70 | public toPanelObject(): any { 71 | return { 72 | id: this.id, 73 | name: this.name, 74 | type: this.type, 75 | alert: this.alert, 76 | labeledColor: this.labeledColor, 77 | deletedColor: this.deletedColor, 78 | detectorType: this.detectorType, 79 | visible: this.visible, 80 | collapsed: this.collapsed 81 | }; 82 | } 83 | 84 | public toTemplate(): any { 85 | const obj = _.cloneDeep(this.toObject()); 86 | 87 | delete obj._id; 88 | obj.grafanaUrl = '${GRAFANA_URL}'; 89 | obj.panelId = '${PANEL_ID}'; 90 | obj.metric.datasource.url = '${DATASOURCE_URL}'; 91 | 92 | return obj; 93 | } 94 | 95 | get analyticProps () { 96 | return {}; 97 | } 98 | 99 | } 100 | -------------------------------------------------------------------------------- /server/src/models/analytic_units/db.ts: -------------------------------------------------------------------------------- 1 | import { createAnalyticUnitFromObject } from './utils'; 2 | import { AnalyticUnit } from './analytic_unit_model'; 3 | import { AnalyticUnitId, FindManyQuery } from './types'; 4 | import { Collection } from '../../services/data_service/collection'; 5 | import { DataService, SortingOrder } from '../../services/data_service'; 6 | 7 | import { Metric } from '@corpglory/tsdb-kit'; 8 | 9 | import * as _ from 'lodash'; 10 | 11 | 12 | const db = DataService.getInstance().makeDBQ(Collection.ANALYTIC_UNITS); 13 | 14 | export async function findById(id: AnalyticUnitId): Promise { 15 | let obj = await db.findOne(id); 16 | if (obj === null) { 17 | return null; 18 | } 19 | return createAnalyticUnitFromObject(obj); 20 | } 21 | 22 | export async function findMany(query: FindManyQuery): Promise { 23 | const analyticUnits = await db.findMany(query, { 24 | createdAt: SortingOrder.ASCENDING, 25 | name: SortingOrder.ASCENDING 26 | }); 27 | if (analyticUnits === null) { 28 | return []; 29 | } 30 | return analyticUnits.map(createAnalyticUnitFromObject); 31 | } 32 | 33 | 34 | /** 35 | * Creates and updates new unit.id 36 | * 37 | * @param unit to create 38 | * @returns unit.id 39 | */ 40 | export async function create(unit: AnalyticUnit): Promise { 41 | let obj = unit.toObject(); 42 | return db.insertOne(obj); 43 | } 44 | 45 | export async function insertMany(analyticUnits: AnalyticUnit[]): Promise { 46 | return db.insertMany(analyticUnits); 47 | } 48 | 49 | export async function remove(id: AnalyticUnitId): Promise { 50 | // TODO: remove it`s segments 51 | // TODO: remove it`s cache 52 | await db.removeOne(id); 53 | } 54 | 55 | /** 56 | * Changes values of analytic unit fields to according values of obj 57 | * 58 | * @param id analytic unit id 59 | * @param obj object with keys and values which need to be updated in analytic unit 60 | */ 61 | export async function update(id: AnalyticUnitId, obj: any) { 62 | const analyticUnitObj = await db.findOne(id); 63 | if(analyticUnitObj === null) { 64 | throw new Error(`Analytic unit ${id} doesn't exist`); 65 | } 66 | 67 | const analyticUnit = createAnalyticUnitFromObject(analyticUnitObj); 68 | let updateObj: any = analyticUnit.toPanelObject(); 69 | delete updateObj.id; 70 | updateObj = _.mapValues(updateObj, (value, key) => { 71 | if(_.has(obj, key)) { 72 | return obj[key]; 73 | } 74 | return value; 75 | }); 76 | 77 | return db.updateOne(id, updateObj); 78 | } 79 | 80 | export async function setStatus(id: AnalyticUnitId, status: string, error?: string) { 81 | return db.updateOne(id, { status, error }); 82 | } 83 | 84 | export async function setDetectionTime(id: AnalyticUnitId, lastDetectionTime: number) { 85 | return db.updateOne(id, { lastDetectionTime }); 86 | } 87 | 88 | export async function setAlert(id: AnalyticUnitId, alert: boolean) { 89 | return db.updateOne(id, { alert }); 90 | } 91 | 92 | export async function setMetric(id: AnalyticUnitId, metric: Metric) { 93 | return db.updateOne(id, { metric }); 94 | } 95 | -------------------------------------------------------------------------------- /server/src/models/analytic_units/anomaly_analytic_unit_model.ts: -------------------------------------------------------------------------------- 1 | import { AnalyticUnit } from './analytic_unit_model'; 2 | import { AnalyticUnitId, AnalyticUnitStatus, DetectorType } from './types'; 3 | 4 | import { Metric } from '@corpglory/tsdb-kit'; 5 | 6 | type SeasonalityPeriod = { 7 | unit: string, 8 | value: number 9 | } 10 | 11 | export enum Bound { 12 | ALL = 'ALL', 13 | UPPER = 'UPPER', 14 | LOWER = 'LOWER' 15 | }; 16 | export class AnomalyAnalyticUnit extends AnalyticUnit { 17 | 18 | public learningAfterUpdateRequired = true; 19 | 20 | constructor( 21 | name: string, 22 | grafanaUrl: string, 23 | panelId: string, 24 | type: string, 25 | public alpha: number, 26 | public confidence: number, 27 | public seasonality: number, //seasonality in ms 28 | private seasonalityPeriod: SeasonalityPeriod, 29 | public enableBounds: Bound, 30 | metric?: Metric, 31 | alert?: boolean, 32 | id?: AnalyticUnitId, 33 | lastDetectionTime?: number, 34 | status?: AnalyticUnitStatus, 35 | error?: string, 36 | labeledColor?: string, 37 | deletedColor?: string, 38 | visible?: boolean, 39 | collapsed?: boolean 40 | ) { 41 | super( 42 | name, 43 | grafanaUrl, 44 | panelId, 45 | type, 46 | metric, 47 | alert, 48 | id, 49 | lastDetectionTime, 50 | status, 51 | error, 52 | labeledColor, 53 | deletedColor, 54 | DetectorType.ANOMALY, 55 | visible, 56 | collapsed 57 | ); 58 | } 59 | 60 | toObject() { 61 | const baseObject = super.toObject(); 62 | return { 63 | ...baseObject, 64 | ...this.analyticProps 65 | }; 66 | } 67 | 68 | toPanelObject() { 69 | const baseObject = super.toPanelObject(); 70 | return { 71 | ...baseObject, 72 | ...this.analyticProps 73 | }; 74 | } 75 | 76 | static fromObject(obj: any) { 77 | // TODO: remove duplication 78 | let metric: Metric | undefined = undefined; 79 | if (obj.metric !== undefined) { 80 | metric = Metric.fromObject(obj.metric); 81 | } 82 | 83 | return new AnomalyAnalyticUnit( 84 | obj.name, 85 | obj.grafanaUrl, 86 | obj.panelId, 87 | obj.type, 88 | obj.alpha, 89 | obj.confidence, 90 | obj.seasonality, 91 | obj.seasonalityPeriod, 92 | obj.enableBounds, 93 | metric, 94 | obj.alert, 95 | obj._id, 96 | obj.lastDetectionTime, 97 | obj.status, 98 | obj.error, 99 | obj.labeledColor, 100 | obj.deletedColor, 101 | obj.visible, 102 | obj.collapsed 103 | ); 104 | } 105 | 106 | get analyticProps() { 107 | return { 108 | alpha: this.alpha, 109 | confidence: this.confidence, 110 | seasonality: this.seasonality, 111 | seasonalityPeriod: this.seasonalityPeriod, 112 | enableBounds: this.enableBounds 113 | }; 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /server/src/models/analytic_unit_cache_model.ts: -------------------------------------------------------------------------------- 1 | import { AnalyticUnitId, AnalyticUnit } from './analytic_units'; 2 | import { Collection } from '../services/data_service/collection'; 3 | import { DataService } from '../services/data_service'; 4 | 5 | import * as _ from 'lodash'; 6 | 7 | 8 | const db = DataService.getInstance().makeDBQ(Collection.ANALYTIC_UNIT_CACHES); 9 | // TODO: count milliseconds in index from dataset 10 | const MILLISECONDS_IN_INDEX = 60000; 11 | 12 | type FindManyQuery = { 13 | _id: { $in: AnalyticUnitId[] } 14 | }; 15 | 16 | export class AnalyticUnitCache { 17 | public constructor( 18 | public id: AnalyticUnitId, 19 | public data?: any 20 | ) { 21 | if(id === undefined) { 22 | throw new Error(`Missing field "id"`); 23 | } 24 | } 25 | 26 | public toObject() { 27 | return { 28 | data: this.data || null, 29 | _id: this.id 30 | }; 31 | } 32 | 33 | public toTemplate(): any { 34 | return { 35 | ...this.toObject(), 36 | _id: undefined 37 | }; 38 | } 39 | 40 | static fromObject(obj: any): AnalyticUnitCache { 41 | return new AnalyticUnitCache( 42 | obj._id, 43 | obj.data, 44 | ); 45 | } 46 | 47 | public getIntersection(): number { 48 | if( 49 | this.data !== undefined && 50 | this.data !== null && 51 | this.data.windowSize !== undefined 52 | ) { 53 | //TODO: return one window size after resolving https://github.com/hastic/hastic-server/issues/508 54 | if(this.data.timeStep !== undefined) { 55 | return this.data.windowSize * 2 * this.data.timeStep; 56 | } else { 57 | return this.data.windowSize * 2 * MILLISECONDS_IN_INDEX; 58 | } 59 | } 60 | // TODO: default window size 61 | return 3 * MILLISECONDS_IN_INDEX; 62 | } 63 | 64 | public get timeStep(): number | undefined { 65 | if(this.data !== undefined) { 66 | return this.data.timeStep; 67 | } 68 | return undefined; 69 | } 70 | } 71 | 72 | export async function findById(id: AnalyticUnitId): Promise { 73 | let obj = await db.findOne(id); 74 | if(obj === null) { 75 | return null; 76 | } 77 | return AnalyticUnitCache.fromObject(obj); 78 | } 79 | 80 | export async function findMany(query: FindManyQuery): Promise { 81 | let caches = await db.findMany(query); 82 | if(caches === null) { 83 | return []; 84 | } 85 | return caches.map(cache => AnalyticUnitCache.fromObject(cache)); 86 | } 87 | 88 | export async function create(id: AnalyticUnitId): Promise { 89 | let cache = new AnalyticUnitCache(id); 90 | return db.insertOne(cache.toObject()); 91 | } 92 | 93 | export async function insertMany(caches: any[]): Promise { 94 | return db.insertMany(caches); 95 | } 96 | 97 | export async function setData(id: AnalyticUnitId, data: any) { 98 | return db.updateOne(id, { data }); 99 | } 100 | 101 | export async function remove(id: AnalyticUnitId): Promise { 102 | await db.removeOne(id); 103 | } 104 | -------------------------------------------------------------------------------- /server/src/services/grafana_service.ts: -------------------------------------------------------------------------------- 1 | import { GrafanaPanelTemplate, GrafanaTemplateVariables } from '../models/grafana_panel_model'; 2 | 3 | import * as AnalyticUnit from '../models/analytic_units'; 4 | import * as AnalyticUnitCache from '../models/analytic_unit_cache_model'; 5 | import * as DetectionSpan from '../models/detection_model'; 6 | import * as Segment from '../models/segment_model'; 7 | 8 | import * as _ from 'lodash'; 9 | 10 | 11 | export async function exportPanel(panelId: string): Promise { 12 | const analyticUnits = await AnalyticUnit.findMany({ panelId }); 13 | const analyticUnitIds = analyticUnits.map(analyticUnit => analyticUnit.id); 14 | 15 | const [caches, detectionSpans, segments] = await Promise.all([ 16 | AnalyticUnitCache.findMany({ _id: { $in: analyticUnitIds } }), 17 | DetectionSpan.findByAnalyticUnitIds(analyticUnitIds), 18 | Segment.findByAnalyticUnitIds(analyticUnitIds) 19 | ]); 20 | 21 | // TODO: not any 22 | let analyticUnitTemplates: any[] = []; 23 | 24 | analyticUnits.forEach(analyticUnit => { 25 | const analyticUnitTemplate = analyticUnit.toTemplate(); 26 | 27 | let analyticUnitCache = _.find(caches, cache => cache.id === analyticUnit.id) || null; 28 | if(analyticUnitCache !== null) { 29 | analyticUnitCache = analyticUnitCache.toTemplate(); 30 | } 31 | 32 | const analyticUnitSegments = segments 33 | .filter(segment => segment.analyticUnitId === analyticUnit.id) 34 | .map(segment => segment.toTemplate()); 35 | 36 | const analyticUnitSpans = detectionSpans 37 | .filter(span => span.analyticUnitId === analyticUnit.id) 38 | .map(span => span.toTemplate()); 39 | 40 | analyticUnitTemplates.push({ 41 | ...analyticUnitTemplate, 42 | cache: analyticUnitCache, 43 | segments: analyticUnitSegments, 44 | detectionSpans: analyticUnitSpans 45 | }); 46 | }); 47 | 48 | return { analyticUnitTemplates }; 49 | } 50 | 51 | export async function importPanel( 52 | panelTemplate: GrafanaPanelTemplate, 53 | variables: GrafanaTemplateVariables 54 | ): Promise { 55 | await Promise.all(panelTemplate.analyticUnitTemplates.map( 56 | template => _importAnalyticUnitTemplate(template, variables) 57 | )); 58 | } 59 | 60 | export async function _importAnalyticUnitTemplate(analyticUnitTemplate: any, variables: GrafanaTemplateVariables) { 61 | analyticUnitTemplate.grafanaUrl = variables.grafanaUrl; 62 | analyticUnitTemplate.panelId = variables.panelId; 63 | analyticUnitTemplate.metric.datasource.url = variables.datasourceUrl; 64 | 65 | const cache = _.clone(analyticUnitTemplate.cache); 66 | const segments = _.clone(analyticUnitTemplate.segments); 67 | const detectionSpans = _.clone(analyticUnitTemplate.detectionSpans); 68 | 69 | delete analyticUnitTemplate.cache; 70 | delete analyticUnitTemplate.segments; 71 | delete analyticUnitTemplate.detectionSpans; 72 | 73 | const [ newAnalyticUnitId ] = await AnalyticUnit.insertMany([analyticUnitTemplate]); 74 | 75 | if(cache !== null) { 76 | cache._id = newAnalyticUnitId; 77 | } 78 | 79 | segments.forEach(segment => segment.analyticUnitId = newAnalyticUnitId); 80 | detectionSpans.forEach(detectionSpan => detectionSpan.analyticUnitId = newAnalyticUnitId); 81 | 82 | return Promise.all([ 83 | AnalyticUnitCache.insertMany([cache]), 84 | Segment.insertMany(segments), 85 | DetectionSpan.insertMany(detectionSpans) 86 | ]); 87 | } 88 | -------------------------------------------------------------------------------- /server/spec/models/detection_model.jest.ts: -------------------------------------------------------------------------------- 1 | import { TEST_ANALYTIC_UNIT_ID } from '../utils_for_tests/analytic_units'; 2 | import { insertSpans, clearSpansDB, convertSpansToOptions } from '../utils_for_tests/detection_spans'; 3 | 4 | import * as Detection from '../../src/models/detection_model'; 5 | 6 | import * as _ from 'lodash'; 7 | 8 | afterEach(clearSpansDB); 9 | 10 | describe('insertSpan', () => { 11 | it('should merge spans with the same status', async () => { 12 | /* 13 | * Config for test 14 | * insert -- what we want to insert in our test database 15 | * expectedAfterInsertion -- expected database state after insertion 16 | */ 17 | const insertSteps = [ 18 | { 19 | insert: [ 20 | { from: 1, to: 3, status: Detection.DetectionStatus.READY }, 21 | { from: 4, to: 5, status: Detection.DetectionStatus.RUNNING } 22 | ], 23 | expectedAfterInsertion: [ 24 | { from: 1, to: 3, status: Detection.DetectionStatus.READY }, 25 | { from: 4, to: 5, status: Detection.DetectionStatus.RUNNING } 26 | ] 27 | }, 28 | { 29 | insert: [ { from: 5, to: 9, status: Detection.DetectionStatus.RUNNING } ], 30 | expectedAfterInsertion: [ 31 | { from: 1, to: 3, status: Detection.DetectionStatus.READY }, 32 | { from: 4, to: 9, status: Detection.DetectionStatus.RUNNING } 33 | ] 34 | }, 35 | { 36 | insert: [ { from: 2, to: 5, status: Detection.DetectionStatus.READY } ], 37 | expectedAfterInsertion: [ 38 | { from: 1, to: 5, status: Detection.DetectionStatus.READY }, 39 | { from: 4, to: 9, status: Detection.DetectionStatus.RUNNING } 40 | ] 41 | }, 42 | ]; 43 | 44 | for(let step of insertSteps) { 45 | await insertSpans(step.insert); 46 | const spansInDB = await Detection.findMany(TEST_ANALYTIC_UNIT_ID, {}); 47 | const spansOptions = convertSpansToOptions(spansInDB); 48 | expect(spansOptions).toEqual(step.expectedAfterInsertion); 49 | } 50 | }); 51 | 52 | 53 | it('should merge spans if existing span is inside the one being inserted', async () => { 54 | await insertSpans([ 55 | { from: 1, to: 6, status: Detection.DetectionStatus.RUNNING } 56 | ]); 57 | 58 | const expectedSpans = [ 59 | { from: 1, to: 6, status: Detection.DetectionStatus.RUNNING } 60 | ]; 61 | const spansInDB = await Detection.findMany(TEST_ANALYTIC_UNIT_ID, {}); 62 | const spansOptions = convertSpansToOptions(spansInDB); 63 | expect(spansOptions).toEqual(expectedSpans); 64 | }); 65 | }); 66 | 67 | describe('getIntersectedSpans', () => { 68 | it('should find all intersections with the inserted span', async () => { 69 | await insertSpans([ 70 | { from: 1, to: 3, status: Detection.DetectionStatus.READY }, 71 | { from: 4, to: 5, status: Detection.DetectionStatus.RUNNING } 72 | ]); 73 | 74 | const testCases = [ 75 | { 76 | from: 1, to: 5, 77 | expected: [ 78 | { from: 1, to: 3, status: Detection.DetectionStatus.READY }, 79 | { from: 4, to: 5, status: Detection.DetectionStatus.RUNNING } 80 | ] 81 | }, 82 | { from: 4, to: 5, expected: [{ from: 4, to: 5, status: Detection.DetectionStatus.RUNNING }] }, 83 | { from: 6, to: 7, expected: [] } 84 | ] 85 | 86 | for(let testCase of testCases) { 87 | const intersectedSpans = await Detection.getIntersectedSpans(TEST_ANALYTIC_UNIT_ID, testCase.from, testCase.to); 88 | const intersectedSpansOptions = convertSpansToOptions(intersectedSpans); 89 | expect(intersectedSpansOptions).toEqual(testCase.expected); 90 | } 91 | }); 92 | }); 93 | 94 | -------------------------------------------------------------------------------- /server/src/index.ts: -------------------------------------------------------------------------------- 1 | import { router as analyticUnitsRouter } from './routes/analytic_units_router'; 2 | import { router as segmentsRouter } from './routes/segments_router'; 3 | import { router as dataRouter } from './routes/data_router'; 4 | import { router as detectionsRouter } from './routes/detections_router'; 5 | import { router as panelRouter } from './routes/panel_router'; 6 | 7 | import * as AnalyticsController from './controllers/analytics_controller'; 8 | 9 | import * as ProcessService from './services/process_service'; 10 | 11 | import { HASTIC_PORT, PACKAGE_VERSION, GIT_INFO, HASTIC_INSTANCE_NAME } from './config'; 12 | 13 | import { applyDBMigrations } from './services/data_service/migrations'; 14 | 15 | import * as Koa from 'koa'; 16 | import * as Router from 'koa-router'; 17 | import * as bodyParser from 'koa-bodyparser'; 18 | 19 | import { createServer } from 'http'; 20 | 21 | init(); 22 | 23 | async function init() { 24 | await applyDBMigrations(); 25 | 26 | const app = new Koa(); 27 | let httpServer = createServer(app.callback()); 28 | 29 | AnalyticsController.init(); 30 | ProcessService.registerExitHandler(AnalyticsController.terminate); 31 | 32 | app.on('error', (err, ctx) => { 33 | console.log('got server error:'); 34 | console.log(err); 35 | }); 36 | 37 | app.use(bodyParser()); 38 | 39 | app.use(async function(ctx, next) { 40 | ctx.set('Access-Control-Allow-Origin', '*'); 41 | ctx.set('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, PATCH, OPTIONS'); 42 | ctx.set('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept'); 43 | await next(); 44 | }); 45 | app.use(async function(ctx, next) { 46 | try { 47 | await next(); 48 | } catch (e) { 49 | console.error(e); 50 | ctx.response.status = 500; 51 | ctx.response.body = { 52 | code: 500, 53 | message: `${ctx.method} ${ctx.url} error: ${e.message}` 54 | }; 55 | } 56 | }); 57 | 58 | 59 | const rootRouter = new Router(); 60 | rootRouter.use('/analyticUnits', analyticUnitsRouter.routes(), analyticUnitsRouter.allowedMethods()); 61 | rootRouter.use('/segments', segmentsRouter.routes(), segmentsRouter.allowedMethods()); 62 | rootRouter.use('/query', dataRouter.routes(), dataRouter.allowedMethods()); 63 | rootRouter.use('/detections', detectionsRouter.routes(), detectionsRouter.allowedMethods()); 64 | rootRouter.use('/panels', panelRouter.routes(), panelRouter.allowedMethods()); 65 | 66 | rootRouter.get('/', async (ctx) => { 67 | const activeWebhooks = await AnalyticsController.getActiveWebhooks(); 68 | 69 | ctx.response.body = { 70 | server: 'OK', 71 | analytics: { 72 | ready: AnalyticsController.isAnalyticReady(), 73 | lastAlive: AnalyticsController.analyticsLastAlive(), 74 | tasksQueueLength: AnalyticsController.getQueueLength() 75 | }, 76 | instanceName: HASTIC_INSTANCE_NAME, 77 | awaitedTasksNumber: AnalyticsController.getTaskResolversLength(), 78 | detectionsCount: AnalyticsController.getDetectionsCount(), 79 | nodeVersion: process.version, 80 | packageVersion: PACKAGE_VERSION, 81 | npmUserAgent: process.env.npm_config_user_agent, 82 | docker: process.env.INSIDE_DOCKER !== undefined, 83 | serverPort: HASTIC_PORT, 84 | git: GIT_INFO, 85 | activeWebhooks: activeWebhooks.length, 86 | timestamp: new Date(Date.now()) 87 | }; 88 | }); 89 | 90 | app 91 | .use(rootRouter.routes()) 92 | .use(rootRouter.allowedMethods()); 93 | 94 | httpServer.listen({ port: HASTIC_PORT, exclusive: true }, () => { 95 | console.log(`Server is running on :${HASTIC_PORT}`); 96 | }); 97 | 98 | httpServer.on('error', (err) => { 99 | console.error(`Http server error: ${err.message}`) 100 | }); 101 | 102 | ProcessService.registerExitHandler(() => { 103 | httpServer.close(); 104 | }); 105 | 106 | } 107 | -------------------------------------------------------------------------------- /server/spec/analytic_controller.jest.ts: -------------------------------------------------------------------------------- 1 | import { queryByMetric } from '@corpglory/tsdb-kit'; 2 | 3 | jest.mock('@corpglory/tsdb-kit', () => ( 4 | { 5 | ...(jest.requireActual('@corpglory/tsdb-kit')), 6 | queryByMetric: jest.fn((metric, url, from, to, apiKey) => { 7 | return { values:[], columns:[] } 8 | }) 9 | } 10 | )); 11 | 12 | import { runDetect, onDetect, getHSR } from '../src/controllers/analytics_controller'; 13 | import * as AnalyticUnitCache from '../src/models/analytic_unit_cache_model'; 14 | import * as Segment from '../src/models/segment_model'; 15 | import { TEST_ANALYTIC_UNIT_ID, createTestDB, clearTestDB, DEFAULT_METRIC } from './utils_for_tests/analytic_units'; 16 | import { buildSegments, clearSegmentsDB, convertSegmentsToTimeRanges } from './utils_for_tests/segments'; 17 | import { HASTIC_API_KEY, GRAFANA_URL } from '../src/config'; 18 | 19 | import * as _ from 'lodash'; 20 | import * as AnalyticUnit from '../src/models/analytic_units'; 21 | 22 | const WINDOW_SIZE = 10; 23 | const TIME_STEP = 1000; 24 | 25 | beforeEach(async () => { 26 | await clearTestDB(); 27 | await createTestDB(); 28 | }); 29 | 30 | describe('Check detection range', function() { 31 | it('range should be >= 2 * windowSize * timeStep', async () => { 32 | const from = 1500000000000; 33 | const to = 1500000000001; 34 | const expectedFrom = to - WINDOW_SIZE * TIME_STEP * 2; 35 | 36 | await AnalyticUnitCache.setData(TEST_ANALYTIC_UNIT_ID, { timeStep: TIME_STEP, windowSize: WINDOW_SIZE }); 37 | console.log(await AnalyticUnitCache.findById(TEST_ANALYTIC_UNIT_ID)); 38 | await runDetect(TEST_ANALYTIC_UNIT_ID, from, to); 39 | expect(queryByMetric).toBeCalledWith(DEFAULT_METRIC, GRAFANA_URL, expectedFrom, to, HASTIC_API_KEY); 40 | }); 41 | }); 42 | 43 | describe('onDetect', () => { 44 | const INITIAL_SEGMENTS = buildSegments([[0, 1], [2, 3], [4, 5]]); 45 | 46 | beforeEach(async () => { 47 | await Segment.mergeAndInsertSegments(INITIAL_SEGMENTS); 48 | }); 49 | 50 | afterEach(async () => { 51 | await clearSegmentsDB(); 52 | }); 53 | 54 | it('should not send a webhook after merging', async () => { 55 | const detectedSegmentIds = await onDetect({ 56 | analyticUnitId: TEST_ANALYTIC_UNIT_ID, 57 | segments: buildSegments([[5, 6]]), 58 | lastDetectionTime: 0, 59 | cache: { 60 | data: { 61 | timeStep: 1 62 | } 63 | } 64 | }); 65 | const detectedSegments = await Promise.all( 66 | detectedSegmentIds.map(id => Segment.findOne(id)) 67 | ); 68 | 69 | const detectedRanges = convertSegmentsToTimeRanges(detectedSegments); 70 | expect(detectedRanges).toEqual([]); 71 | }); 72 | 73 | it('should send a webhook when there was no merging', async () => { 74 | const detectedSegmentIds = await onDetect({ 75 | analyticUnitId: TEST_ANALYTIC_UNIT_ID, 76 | segments: buildSegments([[7, 8]]), 77 | lastDetectionTime: 0 78 | }); 79 | 80 | const detectedSegments = await Promise.all( 81 | detectedSegmentIds.map(id => Segment.findOne(id)) 82 | ); 83 | 84 | const detectedRanges = convertSegmentsToTimeRanges(detectedSegments); 85 | expect(detectedRanges).toEqual([[7, 8]]); 86 | }); 87 | }); 88 | 89 | describe('getHSR', function() { 90 | let cacheToSave: AnalyticUnitCache.AnalyticUnitCache; 91 | 92 | beforeAll(async () => { 93 | await clearTestDB(); 94 | await createTestDB(false); 95 | }); 96 | 97 | afterAll(async () => { 98 | await AnalyticUnitCache.create(TEST_ANALYTIC_UNIT_ID); 99 | await AnalyticUnitCache.setData(TEST_ANALYTIC_UNIT_ID, cacheToSave.data); 100 | }); 101 | 102 | it('should return nothing if unit state is LEARNING', async () => { 103 | const unit = await AnalyticUnit.findById(TEST_ANALYTIC_UNIT_ID); 104 | unit.status = AnalyticUnit.AnalyticUnitStatus.LEARNING; 105 | const result = await getHSR(unit, 9000, 100000); 106 | expect(result).toEqual({"hsr": {"columns": [], "values": []}}); 107 | }); 108 | }); 109 | -------------------------------------------------------------------------------- /server/src/services/data_service/index.ts: -------------------------------------------------------------------------------- 1 | import { Collection } from './collection'; 2 | import { getDbQueryWrapper, dbCollection } from '../data_layer'; 3 | import { DbConnector } from './db_connector'; 4 | import { DbConnectorFactory } from './db_connector/factory'; 5 | 6 | 7 | export enum SortingOrder { ASCENDING = 1, DESCENDING = -1 }; 8 | 9 | /** 10 | * Class which helps to make queries to your collection 11 | * 12 | * @param { string | object } query: a key as a string or mongodb-style query 13 | */ 14 | export type DBQ = { 15 | findOne: (query: string | object) => Promise, 16 | findMany: (query: string[] | object, sortQuery?: object) => Promise, 17 | insertOne: (document: object) => Promise, 18 | insertMany: (documents: object[]) => Promise, 19 | updateOne: (query: string | object, updateQuery: any) => Promise, 20 | updateMany: (query: string[] | object, updateQuery: any) => Promise, 21 | removeOne: (query: string) => Promise 22 | removeMany: (query: string[] | object) => Promise 23 | } 24 | 25 | export class DataService { 26 | private static _instance: DataService; 27 | private _queryWrapper = getDbQueryWrapper(); 28 | 29 | private constructor() { 30 | if(DataService._instance !== undefined) { 31 | throw new Error(`Can't create 2nd instance of singleton class`); 32 | } 33 | } 34 | 35 | public static getInstance(): DataService { 36 | if(DataService._instance === undefined) { 37 | DataService._instance = new DataService(); 38 | } 39 | return DataService._instance; 40 | } 41 | 42 | public makeDBQ(collection: Collection): DBQ { 43 | return { 44 | findOne: async (query: object | string) => { 45 | const dbCollection = await this._getDbCollectionFromCollection(collection); 46 | return this._queryWrapper.dbFindOne(dbCollection, query); 47 | }, 48 | findMany: async (query: object | string[], sortQuery: object) => { 49 | const dbCollection = await this._getDbCollectionFromCollection(collection); 50 | return this._queryWrapper.dbFindMany(dbCollection, query, sortQuery); 51 | }, 52 | insertOne: async (doc: object) => { 53 | const dbCollection = await this._getDbCollectionFromCollection(collection); 54 | return this._queryWrapper.dbInsertOne(dbCollection, doc); 55 | }, 56 | insertMany: async (docs: object[]) => { 57 | const dbCollection = await this._getDbCollectionFromCollection(collection); 58 | return this._queryWrapper.dbInsertMany(dbCollection, docs); 59 | }, 60 | updateOne: async(query: object | string, updateQuery: object) => { 61 | const dbCollection = await this._getDbCollectionFromCollection(collection); 62 | return this._queryWrapper.dbUpdateOne(dbCollection, query, updateQuery); 63 | }, 64 | updateMany: async (query: object | string[], updateQuery: object) => { 65 | const dbCollection = await this._getDbCollectionFromCollection(collection); 66 | return this._queryWrapper.dbUpdateMany(dbCollection, query, updateQuery); 67 | }, 68 | removeOne: async (query: string | object) => { 69 | const dbCollection = await this._getDbCollectionFromCollection(collection); 70 | return this._queryWrapper.dbRemoveOne(dbCollection, query); 71 | }, 72 | removeMany: async (query: object | string[]) => { 73 | const dbCollection = await this._getDbCollectionFromCollection(collection); 74 | return this._queryWrapper.dbRemoveMany(dbCollection, query); 75 | } 76 | }; 77 | } 78 | 79 | private async _getConnector(): Promise { 80 | try { 81 | const connector = await DbConnectorFactory.getDbConnector(); 82 | return connector; 83 | } catch (err) { 84 | console.log(`data service got an error while connecting to database: ${err}`); 85 | throw err; 86 | } 87 | } 88 | 89 | private async _getDbCollectionFromCollection(collection: Collection): Promise { 90 | const connector = await this._getConnector(); 91 | const db = connector.db; 92 | 93 | let dbCollection = db.get(collection); 94 | if(dbCollection === undefined) { 95 | throw new Error('Can`t find collection ' + collection); 96 | } 97 | return dbCollection; 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /server/src/services/data_layer/nedb.ts: -------------------------------------------------------------------------------- 1 | import { DbQueryWrapper } from './basedb'; 2 | import { wrapIdToQuery, wrapIdsToQuery, isEmptyArray } from './utils'; 3 | 4 | import * as nedb from 'nedb'; 5 | 6 | 7 | export class NeDbQueryWrapper implements DbQueryWrapper { 8 | async dbInsertOne(nd: nedb, doc: object): Promise { 9 | return new Promise((resolve, reject) => { 10 | nd.insert(doc, (err, newDoc: any) => { 11 | if(err) { 12 | reject(err); 13 | } else { 14 | resolve(newDoc._id); 15 | } 16 | }) 17 | }); 18 | } 19 | 20 | async dbInsertMany(nd: nedb, docs: object[]): Promise { 21 | if(docs.length === 0) { 22 | return Promise.resolve([]); 23 | } 24 | return new Promise((resolve, reject) => { 25 | nd.insert(docs, (err, newDocs: any[]) => { 26 | if(err) { 27 | reject(err); 28 | } else { 29 | resolve(newDocs.map(d => d._id)); 30 | } 31 | }); 32 | }); 33 | } 34 | 35 | async dbUpdateOne(nd: nedb, query: string | object, updateQuery: object): Promise { 36 | // https://github.com/louischatriot/nedb#updating-documents 37 | let nedbUpdateQuery = { $set: updateQuery } 38 | query = wrapIdToQuery(query); 39 | return new Promise((resolve, reject) => { 40 | nd.update( 41 | query, 42 | nedbUpdateQuery, 43 | { returnUpdatedDocs: true }, 44 | (err: Error, numAffected: number, affectedDocument: any) => { 45 | if(err) { 46 | reject(err); 47 | } else { 48 | resolve(); 49 | } 50 | } 51 | ); 52 | }); 53 | } 54 | 55 | async dbUpdateMany(nd: nedb, query: string[] | object, updateQuery: object): Promise { 56 | // https://github.com/louischatriot/nedb#updating-documents 57 | if(isEmptyArray(query)) { 58 | return; 59 | } 60 | let nedbUpdateQuery = { $set: updateQuery }; 61 | query = wrapIdsToQuery(query); 62 | return new Promise((resolve, reject) => { 63 | nd.update( 64 | query, 65 | nedbUpdateQuery, 66 | { returnUpdatedDocs: true, multi: true }, 67 | (err: Error, numAffected: number, affectedDocuments: any[]) => { 68 | if(err) { 69 | reject(err); 70 | } else { 71 | resolve(); 72 | } 73 | } 74 | ); 75 | }); 76 | } 77 | 78 | async dbFindOne(nd: nedb, query: string | object): Promise { 79 | query = wrapIdToQuery(query); 80 | return new Promise((resolve, reject) => { 81 | nd.findOne(query, (err, doc) => { 82 | if(err) { 83 | reject(err); 84 | } else { 85 | resolve(doc); 86 | } 87 | }); 88 | }); 89 | } 90 | 91 | async dbFindMany(nd: nedb, query: string[] | object, sortQuery: object = {}): Promise { 92 | if(isEmptyArray(query)) { 93 | return Promise.resolve([]); 94 | } 95 | query = wrapIdsToQuery(query); 96 | return new Promise((resolve, reject) => { 97 | nd.find(query).sort(sortQuery).exec((err, docs: any[]) => { 98 | if(err) { 99 | reject(err); 100 | } else { 101 | resolve(docs); 102 | } 103 | }); 104 | }); 105 | } 106 | 107 | async dbRemoveOne(nd: nedb, query: string | object): Promise { 108 | query = wrapIdToQuery(query); 109 | return new Promise((resolve, reject) => { 110 | nd.remove(query, { /* options */ }, (err, numRemoved) => { 111 | if(err) { 112 | reject(err); 113 | } else { 114 | if(numRemoved > 1) { 115 | throw new Error(`Removed ${numRemoved} elements with query: ${JSON.stringify(query)}. Only one is Ok.`); 116 | } else { 117 | resolve(numRemoved == 1); 118 | } 119 | } 120 | }); 121 | }); 122 | } 123 | 124 | async dbRemoveMany(nd: nedb, query: string[] | object): Promise { 125 | if(isEmptyArray(query)) { 126 | return Promise.resolve(0); 127 | } 128 | query = wrapIdsToQuery(query); 129 | return new Promise((resolve, reject) => { 130 | nd.remove(query, { multi: true }, (err, numRemoved) => { 131 | if(err) { 132 | reject(err); 133 | } else { 134 | resolve(numRemoved); 135 | } 136 | }); 137 | }); 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /server/src/services/data_layer/mongodb.ts: -------------------------------------------------------------------------------- 1 | import { DbQueryWrapper, QueryExecutionError } from './basedb'; 2 | 3 | import { Collection, FilterQuery, ObjectID } from 'mongodb'; 4 | import { wrapIdToMongoDbQuery, wrapIdsToMongoDbQuery, isEmptyArray } from './utils'; 5 | 6 | import * as _ from 'lodash'; 7 | 8 | 9 | export class MongoDbQueryWrapper implements DbQueryWrapper { 10 | 11 | async dbInsertOne(collection: Collection, doc: any): Promise { 12 | // http://mongodb.github.io/node-mongodb-native/3.1/api/Collection.html#insertOne 13 | 14 | // TODO: move to utils 15 | if(doc._id !== undefined) { 16 | doc._id = new ObjectID(doc._id); 17 | } 18 | const newDoc = await collection.insertOne(doc); 19 | return newDoc.insertedId.toString(); 20 | } 21 | 22 | async dbInsertMany(collection: Collection, docs: any[]): Promise { 23 | // http://mongodb.github.io/node-mongodb-native/3.1/api/Collection.html#insertMany 24 | if(docs.length === 0) { 25 | return []; 26 | } 27 | 28 | // TODO: move to utils 29 | docs.forEach(doc => { 30 | if(doc._id !== undefined) { 31 | doc._id = new ObjectID(doc._id); 32 | } 33 | }); 34 | const newDocs = await collection.insertMany(docs); 35 | return _.map(newDocs.insertedIds, (id: ObjectID) => id.toString()); 36 | } 37 | 38 | async dbUpdateOne(collection: Collection, query: FilterQuery, updateQuery: any): Promise { 39 | // http://mongodb.github.io/node-mongodb-native/3.1/api/Collection.html#updateOne 40 | 41 | // "_id" is immutable. Mongo throws an exception if updateQuery contains "_id" field. 42 | if(updateQuery._id !== undefined) { 43 | delete updateQuery._id; 44 | } 45 | let mongodbUpdateQuery = { $set: updateQuery } 46 | query = wrapIdToMongoDbQuery(query); 47 | 48 | await collection.updateOne( 49 | query, 50 | mongodbUpdateQuery 51 | ); 52 | } 53 | 54 | async dbUpdateMany(collection: Collection, query: string[] | object, updateQuery: any): Promise { 55 | // http://mongodb.github.io/node-mongodb-native/3.1/api/Collection.html#updateMany 56 | if(isEmptyArray(query)) { 57 | return; 58 | } 59 | // "_id" is immutable. Mongo throws an exception if updateQuery contains "_id" field. 60 | if(updateQuery._id !== undefined) { 61 | delete updateQuery._id; 62 | } 63 | let mongodbUpdateQuery = { $set: updateQuery }; 64 | query = wrapIdsToMongoDbQuery(query); 65 | await collection.updateMany( 66 | query, 67 | mongodbUpdateQuery 68 | ); 69 | } 70 | 71 | async dbFindOne(collection: Collection, query: FilterQuery): Promise { 72 | // http://mongodb.github.io/node-mongodb-native/3.1/api/Collection.html#findOne 73 | query = wrapIdToMongoDbQuery(query); 74 | let doc = await collection.findOne(query); 75 | // TODO: move to utils 76 | if(doc !== null) { 77 | doc._id = doc._id.toString(); 78 | } 79 | return doc; 80 | } 81 | 82 | async dbFindMany(collection: Collection, query: string[] | object, sortQuery: object = {}): Promise { 83 | // http://mongodb.github.io/node-mongodb-native/3.1/api/Collection.html#find 84 | if(isEmptyArray(query)) { 85 | return []; 86 | } 87 | query = wrapIdsToMongoDbQuery(query); 88 | try { 89 | const docs = await collection.find(query).sort(sortQuery).toArray(); 90 | docs.forEach(doc => { 91 | if (doc !== null) { 92 | doc._id = doc._id.toString(); 93 | } 94 | }); 95 | return docs; 96 | } catch(error) { 97 | console.error(`Can't get query result for query ${JSON.stringify(query)} in collection: ${collection.namespace}`); 98 | throw new QueryExecutionError(`MongoDB query error: ${error.message}`); 99 | } 100 | } 101 | 102 | async dbRemoveOne(collection: Collection, query: FilterQuery): Promise { 103 | // http://mongodb.github.io/node-mongodb-native/3.1/api/Collection.html#deleteOne 104 | query = wrapIdToMongoDbQuery(query); 105 | const deleted = await collection.deleteOne(query); 106 | if(deleted.deletedCount > 1) { 107 | throw new Error(`Removed ${deleted.deletedCount} elements with query: ${JSON.stringify(query)}. Only one is Ok.`); 108 | } 109 | return deleted.deletedCount === 1; 110 | } 111 | 112 | async dbRemoveMany(collection: Collection, query: string[] | object): Promise { 113 | // http://mongodb.github.io/node-mongodb-native/3.1/api/Collection.html#deleteMany 114 | if(isEmptyArray(query)) { 115 | return 0; 116 | } 117 | query = wrapIdsToMongoDbQuery(query); 118 | const deleted = await collection.deleteMany(query); 119 | return deleted.deletedCount; 120 | } 121 | } 122 | 123 | -------------------------------------------------------------------------------- /server/src/routes/analytic_units_router.ts: -------------------------------------------------------------------------------- 1 | import * as AnalyticsController from '../controllers/analytics_controller'; 2 | import * as AnalyticUnit from '../models/analytic_units'; 3 | 4 | import { saveAnalyticUnitFromObject } from '../controllers/analytics_controller'; 5 | 6 | import * as Router from 'koa-router'; 7 | import * as _ from 'lodash'; 8 | 9 | 10 | async function getStatus(ctx: Router.IRouterContext) { 11 | let analyticUnitId = ctx.request.query.id as string; 12 | if(analyticUnitId === undefined) { 13 | throw new Error('Cannot get status of undefined id'); 14 | } 15 | 16 | let analyticUnit = await AnalyticUnit.findById(analyticUnitId); 17 | if(analyticUnit === null) { 18 | throw new Error(`Cannot find analytic unit with id ${analyticUnitId}`); 19 | } 20 | 21 | ctx.response.body = { 22 | status: analyticUnit.status 23 | }; 24 | 25 | if(analyticUnit.status === AnalyticUnit.AnalyticUnitStatus.FAILED) { 26 | ctx.response.body = { 27 | errorMessage: analyticUnit.error 28 | }; 29 | } 30 | } 31 | 32 | async function getUnits(ctx: Router.IRouterContext) { 33 | const panelId = ctx.request.query.panelId as string; 34 | if(panelId === undefined) { 35 | throw new Error('Cannot get units of undefined panelId'); 36 | } 37 | 38 | let analyticUnits = await AnalyticUnit.findMany({ panelId }); 39 | if(analyticUnits === null) { 40 | analyticUnits = []; 41 | } 42 | 43 | const analyticUnitObjects = analyticUnits.map(analyticUnit => analyticUnit.toPanelObject()); 44 | 45 | ctx.response.body = { 46 | analyticUnits: analyticUnitObjects 47 | }; 48 | } 49 | 50 | function getTypes(ctx: Router.IRouterContext) { 51 | ctx.response.body = AnalyticUnit.ANALYTIC_UNIT_TYPES; 52 | } 53 | 54 | async function createUnit(ctx: Router.IRouterContext) { 55 | const id = await saveAnalyticUnitFromObject(ctx.request.body); 56 | 57 | ctx.response.body = { id }; 58 | } 59 | 60 | async function updateUnit(ctx: Router.IRouterContext) { 61 | const analyticUnitObj = ctx.request.body as unknown as AnalyticUnit.AnalyticUnit; 62 | if(analyticUnitObj.id === undefined) { 63 | throw new Error('Cannot update undefined id'); 64 | } 65 | 66 | await AnalyticUnit.update(analyticUnitObj.id, analyticUnitObj); 67 | // TODO: check if learning is necessary without database query 68 | const analyticUnit = await AnalyticUnit.findById(analyticUnitObj.id); 69 | 70 | if(analyticUnit.learningAfterUpdateRequired) { 71 | await AnalyticsController.runLearning(analyticUnitObj.id); 72 | } 73 | 74 | ctx.response.body = { 75 | code: 200, 76 | message: 'Success' 77 | }; 78 | } 79 | 80 | async function updateMetric(ctx: Router.IRouterContext) { 81 | const { analyticUnitId, metric, datasource } = ctx.request.body as { 82 | analyticUnitId: AnalyticUnit.AnalyticUnitId, metric: any, datasource: any 83 | }; 84 | if(analyticUnitId === undefined) { 85 | throw new Error('Cannot update undefined id'); 86 | } 87 | if(metric === undefined) { 88 | throw new Error('Cannot set undefined metric'); 89 | } 90 | if(datasource === undefined) { 91 | throw new Error('Cannot set undefined datasource'); 92 | } 93 | 94 | await AnalyticsController.setMetric(analyticUnitId, metric, datasource); 95 | 96 | ctx.response.body = { 97 | code: 200, 98 | message: 'Success' 99 | }; 100 | } 101 | 102 | async function updateAlert(ctx: Router.IRouterContext) { 103 | const { analyticUnitId, alert } = ctx.request.body as { 104 | analyticUnitId: AnalyticUnit.AnalyticUnitId, alert: boolean 105 | }; 106 | if(analyticUnitId === undefined) { 107 | throw new Error('Cannot update undefined id'); 108 | } 109 | if(alert === undefined) { 110 | throw new Error('Cannot set undefined alert status'); 111 | } 112 | 113 | await AnalyticsController.setAlert(analyticUnitId, alert); 114 | 115 | ctx.response.body = { 116 | code: 200, 117 | message: 'Success' 118 | }; 119 | 120 | } 121 | 122 | async function deleteUnit(ctx: Router.IRouterContext) { 123 | const analyticUnitId = ctx.request.query.id as string; 124 | if(analyticUnitId === undefined) { 125 | throw new Error('Cannot delete undefined id'); 126 | } 127 | await AnalyticsController.remove(analyticUnitId); 128 | ctx.response.body = { 129 | code: 200, 130 | message: 'Success' 131 | }; 132 | } 133 | 134 | async function runDetect(ctx: Router.IRouterContext) { 135 | const { ids, from, to } = ctx.request.body as { 136 | ids: AnalyticUnit.AnalyticUnitId[], from: number, to: number 137 | }; 138 | 139 | await Promise.all(ids.map(id => AnalyticsController.runLearningWithDetection(id, from, to))); 140 | 141 | ctx.response.body = { 142 | code: 200, 143 | message: 'Success' 144 | }; 145 | } 146 | 147 | 148 | export var router = new Router(); 149 | 150 | router.get('/units', getUnits); 151 | router.get('/status', getStatus); 152 | router.get('/types', getTypes); 153 | router.patch('/metric', updateMetric); 154 | router.patch('/alert', updateAlert); 155 | 156 | router.post('/', createUnit); 157 | router.delete('/', deleteUnit); 158 | router.patch('/', updateUnit); 159 | 160 | router.post('/detect', runDetect); 161 | -------------------------------------------------------------------------------- /server/src/models/detection_model.ts: -------------------------------------------------------------------------------- 1 | import { AnalyticUnitId } from './analytic_units'; 2 | import { Collection } from '../services/data_service/collection'; 3 | import { DataService } from '../services/data_service'; 4 | 5 | import * as _ from 'lodash'; 6 | 7 | 8 | const db = DataService.getInstance().makeDBQ(Collection.DETECTION_SPANS); 9 | 10 | export enum DetectionStatus { 11 | READY = 'READY', 12 | RUNNING = 'RUNNING', 13 | FAILED = 'FAILED' 14 | } 15 | 16 | export type SpanId = string; 17 | 18 | /** 19 | * Detection-span represents the state of dataset segment: 20 | * - READY: detection is done 21 | * - RUNNING: detection is running 22 | * - FAILED: detection failed 23 | */ 24 | export class DetectionSpan { 25 | constructor( 26 | public analyticUnitId: AnalyticUnitId, 27 | public from: number, 28 | public to: number, 29 | public status: DetectionStatus, 30 | public id?: SpanId, 31 | ) { 32 | if(analyticUnitId === undefined) { 33 | throw new Error('AnalyticUnitId is undefined'); 34 | } 35 | if(from === undefined) { 36 | throw new Error('from is undefined'); 37 | } 38 | if(isNaN(from)) { 39 | throw new Error('from is NaN'); 40 | } 41 | if(to === undefined) { 42 | throw new Error('to is undefined'); 43 | } 44 | if(isNaN(to)) { 45 | throw new Error('to is NaN'); 46 | } 47 | if(status === undefined) { 48 | throw new Error('status is undefined'); 49 | } 50 | } 51 | 52 | public toObject() { 53 | return { 54 | _id: this.id, 55 | analyticUnitId: this.analyticUnitId, 56 | from: this.from, 57 | to: this.to, 58 | status: this.status 59 | }; 60 | } 61 | 62 | public toTemplate(): any { 63 | return { 64 | ...this.toObject(), 65 | _id: undefined, 66 | analyticUnitId: undefined 67 | }; 68 | } 69 | 70 | static fromObject(obj: any): DetectionSpan { 71 | if(obj === undefined) { 72 | throw new Error('obj is undefined'); 73 | } 74 | return new DetectionSpan( 75 | obj.analyticUnitId, 76 | +obj.from, +obj.to, 77 | obj.status, 78 | obj._id 79 | ); 80 | } 81 | } 82 | 83 | export type FindManyQuery = { 84 | status?: DetectionStatus, 85 | // TODO: 86 | // from?: { $gte?: number, $lte?: number } 87 | // to?: { $gte?: number, $lte?: number } 88 | timeFromLTE?: number, 89 | timeToGTE?: number, 90 | timeFromGTE?: number, 91 | timeToLTE?: number, 92 | } 93 | 94 | export async function findMany(id: AnalyticUnitId, query?: FindManyQuery): Promise { 95 | let dbQuery: any = { analyticUnitId: id }; 96 | if(query.status !== undefined) { 97 | dbQuery.status = query.status; 98 | } 99 | if(query.timeFromLTE !== undefined) { 100 | dbQuery.from = { $lte: query.timeFromLTE }; 101 | } 102 | if(query.timeToGTE !== undefined) { 103 | dbQuery.to = { $gte: query.timeToGTE }; 104 | } 105 | if(query.timeFromGTE !== undefined) { 106 | dbQuery.from = { $gte: query.timeFromGTE }; 107 | } 108 | if(query.timeToLTE !== undefined) { 109 | dbQuery.to = { $lte: query.timeToLTE }; 110 | } 111 | 112 | const spans = await db.findMany(dbQuery); 113 | if(spans === null) { 114 | return []; 115 | } 116 | return spans.map(DetectionSpan.fromObject); 117 | } 118 | 119 | // TODO: maybe it could have a better name 120 | export async function findByAnalyticUnitIds(analyticUnitIds: AnalyticUnitId[]): Promise { 121 | const spans = await db.findMany({ analyticUnitId: { $in: analyticUnitIds } }); 122 | 123 | if(spans === null) { 124 | return []; 125 | } 126 | return spans.map(DetectionSpan.fromObject); 127 | } 128 | 129 | export async function getIntersectedSpans( 130 | analyticUnitId: AnalyticUnitId, 131 | from: number, 132 | to: number, 133 | status?: DetectionStatus 134 | ): Promise { 135 | return findMany(analyticUnitId, { status, timeFromLTE: to, timeToGTE: from }); 136 | } 137 | 138 | export async function insertSpan(span: DetectionSpan): Promise { 139 | let spanToInsert = span.toObject(); 140 | 141 | const intersections = await getIntersectedSpans(span.analyticUnitId, span.from, span.to); 142 | if(_.isEmpty(intersections)) { 143 | return db.insertOne(spanToInsert); 144 | } 145 | const spansWithSameStatus = intersections.filter( 146 | intersectedSpan => intersectedSpan.status === span.status 147 | ); 148 | 149 | let from = span.from; 150 | let to = span.to; 151 | 152 | if(!_.isEmpty(spansWithSameStatus)) { 153 | let minFrom = _.minBy(spansWithSameStatus, s => s.from).from; 154 | from = Math.min(from, minFrom); 155 | 156 | let maxTo = _.maxBy(spansWithSameStatus, s => s.to).to; 157 | to = Math.max(to, maxTo); 158 | } 159 | 160 | const spansInside = intersections.filter( 161 | intersectedSpan => intersectedSpan.from >= span.from && intersectedSpan.to <= span.to 162 | ); 163 | const spanIdsToRemove = _.concat( 164 | spansWithSameStatus.map(s => s.id), 165 | spansInside.map(s => s.id) 166 | ); 167 | 168 | await db.removeMany(spanIdsToRemove); 169 | 170 | spanToInsert = new DetectionSpan(span.analyticUnitId, from, to, span.status).toObject(); 171 | 172 | return db.insertOne(spanToInsert); 173 | } 174 | 175 | export async function insertMany(detectionSpans: any[]): Promise { 176 | return db.insertMany(detectionSpans); 177 | } 178 | 179 | export function clearSpans(analyticUnitId: AnalyticUnitId) { 180 | return db.removeMany({ analyticUnitId }); 181 | } 182 | -------------------------------------------------------------------------------- /server/src/services/data_service/migrations.ts: -------------------------------------------------------------------------------- 1 | /* 2 | How to add a migration: 3 | - create migration function 4 | - add it with the next revision number to REVISIONS Map 5 | It will be automatically applied if actual DB revision < added revision 6 | 7 | Note: do not import code from other modules here because it can be changed 8 | */ 9 | 10 | import { Collection } from './collection'; 11 | import { DataService } from './index'; 12 | 13 | import * as _ from 'lodash'; 14 | 15 | 16 | const metaDB = DataService.getInstance().makeDBQ(Collection.DB_META); 17 | const analyticUnitsDB = DataService.getInstance().makeDBQ(Collection.ANALYTIC_UNITS); 18 | const analyticUnitCachesDB = DataService.getInstance().makeDBQ(Collection.ANALYTIC_UNIT_CACHES); 19 | const thresholdsDB = DataService.getInstance().makeDBQ(Collection.THRESHOLD); 20 | 21 | const DB_META_ID = '000000000000000000000001'; //24 symbols for mongodb 22 | 23 | type DbMeta = { 24 | revision: number 25 | }; 26 | 27 | const REVISIONS = new Map([ 28 | [1, convertPanelUrlToPanelId], 29 | [2, convertUnderscoreToCamelCase], 30 | [3, integrateThresholdsIntoAnalyticUnits], 31 | [4, addDetectorTypes], 32 | [5, switchBoundsDisabling] 33 | ]); 34 | 35 | export async function applyDBMigrations() { 36 | let meta: DbMeta = await metaDB.findOne(DB_META_ID); 37 | if(meta === null) { 38 | meta = { revision: 0 }; 39 | await metaDB.insertOne({ _id: DB_META_ID, ...meta }); 40 | } 41 | 42 | await REVISIONS.forEach(async (migration, revision) => { 43 | if(meta.revision < revision) { 44 | console.log(`Applying migration ${revision}`); 45 | await migration(); 46 | 47 | meta.revision = revision; 48 | await metaDB.updateOne(DB_META_ID, meta); 49 | } 50 | }); 51 | } 52 | 53 | async function convertPanelUrlToPanelId() { 54 | const analyticUnits = await analyticUnitsDB.findMany({ panelUrl: { $exists: true } }); 55 | if(analyticUnits.length === 0) { 56 | return; 57 | } 58 | 59 | const PANEL_URL_REGEX = /^(.+)\/d\/([^\/]+)\/.+panelId=(\d+)/; 60 | const NEW_PANEL_URL_REGEX = /^(.+)\/dashboard\/(\w+).+panelId=(\d+)/; 61 | const updatedAnalyticUnits = analyticUnits 62 | .map(analyticUnit => { 63 | const parsedPanelUrl = analyticUnit.panelUrl.match(PANEL_URL_REGEX) || analyticUnit.panelUrl.match(NEW_PANEL_URL_REGEX); 64 | if(parsedPanelUrl === null) { 65 | return null; 66 | } 67 | const grafanaUrl = parsedPanelUrl[1]; 68 | const dashboardId = parsedPanelUrl[2]; 69 | const oldPanelId = parsedPanelUrl[3]; 70 | const panelId = `${dashboardId}/${oldPanelId}`; 71 | 72 | return { 73 | _id: analyticUnit._id, 74 | grafanaUrl, 75 | panelId 76 | }; 77 | }) 78 | .filter(analyticUnit => analyticUnit !== null); 79 | 80 | console.log(updatedAnalyticUnits); 81 | const promises = updatedAnalyticUnits.map(analyticUnit => 82 | analyticUnitsDB.updateOne(analyticUnit._id, { 83 | panelUrl: undefined, 84 | ...analyticUnit 85 | }) 86 | ); 87 | 88 | await Promise.all(promises); 89 | } 90 | 91 | async function convertUnderscoreToCamelCase() { 92 | const analyticUnitCaches = await analyticUnitCachesDB.findMany({}); 93 | 94 | const updatedAnalyticUnitCaches = analyticUnitCaches.map(analyticUnitCache => { 95 | let data = null; 96 | if(analyticUnitCache.data !== null) { 97 | data = _.mapKeys(analyticUnitCache.data, (value, key) => _.camelCase(key)); 98 | } 99 | 100 | return { data, _id: analyticUnitCache._id }; 101 | }); 102 | 103 | const promises = updatedAnalyticUnitCaches.map(analyticUnitCache => 104 | analyticUnitCachesDB.updateOne(analyticUnitCache._id, { data: analyticUnitCache.data }) 105 | ); 106 | 107 | await Promise.all(promises); 108 | } 109 | 110 | async function integrateThresholdsIntoAnalyticUnits() { 111 | const thresholds = await thresholdsDB.findMany({}); 112 | 113 | const promises = thresholds.map(threshold => 114 | analyticUnitsDB.updateOne(threshold._id, { 115 | value: threshold.value, 116 | condition: threshold.condition 117 | }) 118 | ); 119 | 120 | await Promise.all(promises); 121 | await thresholdsDB.removeMany({}); 122 | } 123 | 124 | async function addDetectorTypes() { 125 | const analyticUnits = await analyticUnitsDB.findMany({ detectorType: { $exists: false } }); 126 | 127 | const promises = analyticUnits.map(analyticUnit => 128 | analyticUnitsDB.updateOne(analyticUnit._id, { detectorType: getDetectorByType(analyticUnit.type) }) 129 | ); 130 | 131 | await Promise.all(promises); 132 | } 133 | 134 | async function switchBoundsDisabling() { 135 | const analyticUnits = await analyticUnitsDB.findMany({ disableBound: { $exists: true } }); 136 | 137 | const promises = analyticUnits.map(analyticUnit => { 138 | let enableBounds; 139 | if(analyticUnit.disableBound === 'NONE') { 140 | enableBounds = 'ALL'; 141 | } 142 | if(analyticUnit.disableBound === 'UPPER') { 143 | enableBounds = 'LOWER'; 144 | } else { 145 | enableBounds = 'UPPER'; 146 | } 147 | analyticUnitsDB.updateOne(analyticUnit._id, { enableBounds }) 148 | }); 149 | 150 | await Promise.all(promises); 151 | } 152 | 153 | function getDetectorByType(analyticUnitType: string): string { 154 | const analyticUnitTypesMapping = { 155 | pattern: [ 'GENERAL', 'PEAK', 'TROUGH', 'JUMP', 'DROP' ], 156 | anomaly: [ 'ANOMALY' ], 157 | threshold: [ 'THRESHOLD' ] 158 | }; 159 | 160 | let detector; 161 | _.forOwn(analyticUnitTypesMapping, (types, detectorType) => { 162 | if(_.includes(types, analyticUnitType)) { 163 | detector = detectorType; 164 | } 165 | }); 166 | 167 | if(detector === undefined) { 168 | throw new Error(`Can't find detector for analytic unit of type "${analyticUnitType}"`); 169 | } 170 | return detector; 171 | } 172 | -------------------------------------------------------------------------------- /server/src/services/notification_service.ts: -------------------------------------------------------------------------------- 1 | import * as AnalyticUnit from '../models/analytic_units'; 2 | import * as config from '../config'; 3 | 4 | import { WebClient } from '@slack/web-api'; 5 | import axios from 'axios'; 6 | import * as _ from 'lodash'; 7 | 8 | 9 | export enum WebhookType { 10 | DETECT = 'DETECT', 11 | FAILURE = 'FAILURE', 12 | RECOVERY = 'RECOVERY', 13 | MESSAGE = 'MESSAGE' 14 | } 15 | 16 | export type MetaInfo = { 17 | type: WebhookType, 18 | from: number, 19 | to: number, 20 | params?: any 21 | } 22 | 23 | export type AnalyticMeta = { 24 | type: WebhookType, 25 | analyticUnitType: string, 26 | analyticUnitName: string, 27 | analyticUnitId: AnalyticUnit.AnalyticUnitId, 28 | grafanaUrl: string, 29 | from: number, 30 | to: number 31 | message?: any 32 | } 33 | 34 | export declare type Notification = { 35 | text: string, 36 | meta: MetaInfo | AnalyticMeta, 37 | image?: Buffer 38 | } 39 | 40 | // TODO: split notifiers into 3 files 41 | export interface Notifier { 42 | sendNotification(notification: Notification): Promise; 43 | } 44 | 45 | // TODO: singleton 46 | export function getNotifier(): Notifier { 47 | if(config.HASTIC_ALERT_TYPE === config.AlertTypes.WEBHOOK) { 48 | return new WebhookNotifier(); 49 | } 50 | 51 | if(config.HASTIC_ALERT_TYPE === config.AlertTypes.ALERTMANAGER) { 52 | return new AlertManagerNotifier(); 53 | } 54 | 55 | if(config.HASTIC_ALERT_TYPE === config.AlertTypes.SLACK) { 56 | return new SlackNotifier(); 57 | } 58 | 59 | throw new Error(`${config.HASTIC_ALERT_TYPE} alert type not supported`); 60 | } 61 | 62 | class WebhookNotifier implements Notifier { 63 | async sendNotification(notification: Notification) { 64 | if(config.HASTIC_WEBHOOK_URL === null) { 65 | console.log(`HASTIC_WEBHOOK_URL is not set, skip sending notification: ${notification.text}`); 66 | return; 67 | } 68 | 69 | notification.text += `\nInstance: ${config.HASTIC_INSTANCE_NAME}`; 70 | const data = JSON.stringify({ 71 | ...notification, 72 | image: notification.image === undefined ? notification.image : notification.image.toString('base64') 73 | }); 74 | 75 | const options = { 76 | method: 'POST', 77 | url: config.HASTIC_WEBHOOK_URL, 78 | data, 79 | headers: { 'Content-Type': 'application/json' } 80 | }; 81 | 82 | await axios(options); 83 | } 84 | } 85 | 86 | type PostableAlertLabels = { 87 | alertname: string; 88 | [key: string]: string 89 | }; 90 | 91 | type PostableAlertAnnotations = { 92 | message?: string; 93 | summary?: string; 94 | }; 95 | 96 | type PostableAlert = { 97 | labels: PostableAlertLabels, 98 | annotations: PostableAlertAnnotations 99 | generatorURL?: string, 100 | endsAt?: string 101 | }; 102 | 103 | class AlertManagerNotifier implements Notifier { 104 | 105 | /** 106 | * @throws {Error} from axios if query fails 107 | */ 108 | async sendNotification(notification: Notification) { 109 | if(config.HASTIC_ALERTMANAGER_URL === null) { 110 | console.log(`HASTIC_ALERTMANAGER_URL is not set, skip sending notification: ${notification.text}`); 111 | return; 112 | } 113 | 114 | let generatorURL: string; 115 | let labels: PostableAlertLabels = { 116 | alertname: notification.meta.type, 117 | instance: config.HASTIC_INSTANCE_NAME 118 | }; 119 | let annotations: PostableAlertAnnotations = { 120 | message: notification.text 121 | }; 122 | 123 | if(_.has(notification.meta, 'grafanaUrl')) { 124 | generatorURL = (notification.meta as AnalyticMeta).grafanaUrl; 125 | labels.alertname = (notification.meta as AnalyticMeta).analyticUnitName; 126 | labels.analyticUnitId = (notification.meta as AnalyticMeta).analyticUnitId; 127 | labels.analyticUnitType = (notification.meta as AnalyticMeta).analyticUnitType; 128 | annotations.message = `${(notification.meta as AnalyticMeta).message}\nURL: ${generatorURL}`; 129 | } 130 | 131 | annotations.message += `\nInstance: ${config.HASTIC_INSTANCE_NAME}`; 132 | 133 | let alertData: PostableAlert = { 134 | labels, 135 | annotations, 136 | generatorURL 137 | }; 138 | 139 | let options = { 140 | method: 'POST', 141 | url: `${config.HASTIC_ALERTMANAGER_URL}/api/v2/alerts`, 142 | data: JSON.stringify([alertData]), 143 | headers: { 'Content-Type': 'application/json' } 144 | }; 145 | 146 | // first part: send "start" request 147 | await axios(options); 148 | // TODO: resolve FAILURE alert only after RECOVERY event 149 | // second part: send "end" request 150 | options.data = JSON.stringify([alertData]); 151 | await axios(options); 152 | } 153 | } 154 | 155 | class SlackNotifier implements Notifier { 156 | async sendNotification(notification: Notification) { 157 | if(config.HASTIC_SLACK_API_TOKEN === null) { 158 | console.log(`HASTIC_SLACK_API_TOKEN is not set, skip sending notification: ${notification.text}`); 159 | return; 160 | } 161 | 162 | if(config.HASTIC_SLACK_NOTIFICATION_CHANNEL === null) { 163 | console.log(`HASTIC_SLACK_NOTIFICATION_CHANNEL is not set, skip sending notification: ${notification.text}`); 164 | return; 165 | } 166 | 167 | notification.text += `\nInstance: ${config.HASTIC_INSTANCE_NAME}`; 168 | 169 | const client = new WebClient(); 170 | 171 | let imageUrl = ''; 172 | if(notification.image !== undefined) { 173 | const uploadedFile = await client.files.upload({ 174 | channels: config.HASTIC_SLACK_NOTIFICATION_CHANNEL, 175 | file: notification.image, 176 | token: config.HASTIC_SLACK_API_TOKEN 177 | }); 178 | 179 | if(uploadedFile.file) { 180 | // @ts-ignore 181 | imageUrl = uploadedFile.file.url_private; 182 | } 183 | } 184 | 185 | await client.chat.postMessage({ 186 | text: notification.text, 187 | attachments: [{ image_url: imageUrl }], 188 | channel: config.HASTIC_SLACK_NOTIFICATION_CHANNEL, 189 | token: config.HASTIC_SLACK_API_TOKEN 190 | }); 191 | } 192 | } 193 | -------------------------------------------------------------------------------- /server/src/utils/segments.ts: -------------------------------------------------------------------------------- 1 | //TODO: move this code to span model 2 | 3 | import * as _ from 'lodash'; 4 | 5 | 6 | export declare type Segment = { 7 | readonly from: number, 8 | readonly to: number 9 | } 10 | 11 | export class IntegerSegment { 12 | readonly from: number; 13 | readonly to: number; 14 | 15 | constructor(from: number, to: number) { 16 | if(!(Number.isInteger(from) || !Number.isFinite(from))) { 17 | throw new Error(`From should be an Integer or Infinity, but got ${from}`); 18 | } 19 | if(!(Number.isInteger(to) || !Number.isFinite(to))) { 20 | throw new Error(`To should be an Integer or Infinity, but got ${from}`); 21 | } 22 | 23 | let l = IntegerSegment.lengthBetweenPoints(from, to); 24 | if(l < 1) { 25 | throw new Error( 26 | `Length of segment is less than 1: [${from}, ${to}]. 27 | It's not possible for IntegerSegment` 28 | ); 29 | } 30 | this.from = from; 31 | this.to = to; 32 | } 33 | 34 | get length(): number { 35 | return IntegerSegment.lengthBetweenPoints(this.from, this.to); 36 | } 37 | 38 | insersect(segment: IntegerSegment): IntegerSegment | undefined { 39 | let from = Math.max(this.from, segment.from); 40 | let to = Math.min(this.to, segment.to); 41 | if(IntegerSegment.lengthBetweenPoints(from, to) >= 1) { 42 | return new IntegerSegment(from, to); 43 | } 44 | return undefined; 45 | } 46 | 47 | toString(): string { 48 | return `[${this.from}, ${this.to}]`; 49 | } 50 | 51 | static lengthBetweenPoints(from: number, to: number): number { 52 | let l = to - from + 1; // because [x, x] has length 1 53 | if(isNaN(l)) { // when [Infinity, Infinity] or [-Infinity, -Infinity] 54 | return 0; 55 | } else { 56 | return Math.max(l, 0); // becase [x, x - 1] we consider as zero length 57 | } 58 | } 59 | } 60 | 61 | export class IntegerSegmentsSet { 62 | 63 | private _segments: IntegerSegment[]; 64 | 65 | constructor(segments: IntegerSegment[], noramlized: boolean = false) { 66 | this._segments = segments; 67 | if(noramlized !== true) { 68 | this._normalize(); 69 | } 70 | } 71 | 72 | private _normalize() { 73 | if(this._segments.length === 0) { 74 | return; 75 | } 76 | let sortedSegments = _.sortBy(this._segments, s => s.from); 77 | let lastFrom = sortedSegments[0].from; 78 | let lastTo = sortedSegments[0].to; 79 | let mergedSegments: IntegerSegment[] = []; 80 | for(let i = 1; i < sortedSegments.length; i++) { 81 | let currentSegment = sortedSegments[i]; 82 | if(lastTo + 1 >= currentSegment.from) { // because [a, x], [x + 1, b] is [a, b] 83 | lastTo = Math.max(currentSegment.to, lastTo); // we can be inside previous 84 | continue; 85 | } 86 | mergedSegments.push(new IntegerSegment(lastFrom, lastTo)); 87 | lastFrom = currentSegment.from; 88 | lastTo = currentSegment.to; 89 | } 90 | mergedSegments.push(new IntegerSegment(lastFrom, lastTo)); 91 | this._segments = mergedSegments; 92 | } 93 | 94 | get segments(): IntegerSegment[] { 95 | return this._segments; 96 | } 97 | 98 | inversed(): IntegerSegmentsSet { 99 | var invertedSegments: IntegerSegment[] = []; 100 | if(this._segments.length === 0) { 101 | invertedSegments = [new IntegerSegment(-Infinity, Infinity)]; 102 | } else { 103 | let push = (f: number, t: number) => { 104 | if(IntegerSegment.lengthBetweenPoints(f, t) > 0) { 105 | invertedSegments.push(new IntegerSegment(f, t)); 106 | } 107 | } 108 | _.reduce(this._segments, (prev: IntegerSegment | null, s: IntegerSegment) => { 109 | if(prev === null) { 110 | push(-Infinity, s.from - 1); 111 | } else { 112 | push(prev.to + 1, s.from - 1); 113 | } 114 | return s; 115 | }, null); 116 | push(this._segments[this._segments.length - 1].to + 1, Infinity); 117 | } 118 | return new IntegerSegmentsSet(invertedSegments, true); 119 | } 120 | 121 | intersect(other: IntegerSegmentsSet): IntegerSegmentsSet { 122 | let result: IntegerSegment[] = []; 123 | 124 | if(this._segments.length === 0 || other.segments.length === 0) { 125 | return new IntegerSegmentsSet([], true); 126 | } 127 | 128 | let currentSegmentIndex = 0; 129 | let withSegmentIndex = 0; 130 | 131 | do { 132 | let currentSegemet = this.segments[currentSegmentIndex]; 133 | let withSegment = other.segments[withSegmentIndex]; 134 | if(currentSegemet.to < withSegment.from) { 135 | currentSegmentIndex++; 136 | continue; 137 | } 138 | if(withSegment.to < currentSegemet.from) { 139 | withSegmentIndex++; 140 | continue; 141 | } 142 | let segmentsIntersection = currentSegemet.insersect(withSegment); 143 | if(segmentsIntersection === undefined) { 144 | throw new Error( 145 | `Impossible condition, segments ${currentSegemet} and ${withSegment} don't interset` 146 | ) 147 | } 148 | result.push(segmentsIntersection); 149 | 150 | if(currentSegemet.to < withSegment.to) { 151 | currentSegmentIndex++; 152 | } else { 153 | withSegmentIndex++; 154 | } 155 | } while ( 156 | currentSegmentIndex < this._segments.length && 157 | withSegmentIndex < other.segments.length 158 | ) 159 | 160 | return new IntegerSegmentsSet(result, true); 161 | } 162 | 163 | sub(other: IntegerSegmentsSet): IntegerSegmentsSet { 164 | let inversed = other.inversed(); 165 | return this.intersect(inversed); 166 | } 167 | 168 | } 169 | 170 | // TODO: move from utils and use generator 171 | /** 172 | * 173 | * @param inputSegment a big segment which we will cut 174 | * @param cutSegments segments to cut the inputSegment. Segments can overlay. 175 | * 176 | * @returns array of segments remain after cut 177 | */ 178 | export function cutSegmentWithSegments(inputSegment: Segment, cutSegments: Segment[]): Segment[] { 179 | let setA = new IntegerSegmentsSet([new IntegerSegment(inputSegment.from, inputSegment.to)]); 180 | let setB = new IntegerSegmentsSet(cutSegments.map( 181 | s => new IntegerSegment(s.from, s.to) 182 | )); 183 | let setResult = setA.sub(setB); 184 | return setResult.segments.map(s => ({ from: s.from, to: s.to })); 185 | } 186 | -------------------------------------------------------------------------------- /server/src/config.ts: -------------------------------------------------------------------------------- 1 | import { getJsonDataSync } from './services/json_service'; 2 | import { normalizeUrl } from './utils/url'; 3 | import { parseTimeZone } from './utils/time'; 4 | 5 | import * as _ from 'lodash'; 6 | import * as moment from 'moment'; 7 | import * as path from 'path'; 8 | import * as fs from 'fs'; 9 | import * as os from 'os'; 10 | import { exit } from 'process'; // it's very bad to use it in config, but life is full of pain 11 | import * as dotenv from 'dotenv'; 12 | import { URL } from 'url'; 13 | 14 | const EXIT_CODE_MISSING_FIELD = 3; 15 | const EXIT_CODE_BAD_VALUE_FIELD = 4; 16 | 17 | // GIT_BRANCH, GIT_COMMITHASH, GIT_VERSION variables are defined by webpack 18 | // TypeScript doesn't know that these variables exist 19 | declare const GIT_BRANCH: string; 20 | declare const GIT_COMMITHASH: string; 21 | declare const GIT_VERSION: string; 22 | 23 | dotenv.config(); 24 | 25 | let configFile = path.join(__dirname, '../../config.json'); 26 | let configExists = fs.existsSync(configFile); 27 | 28 | // TODO: move to data_layer 29 | export type DBConfig = { 30 | user: string, 31 | password: string, 32 | url: string, 33 | dbName: string 34 | } 35 | 36 | export const ANALYTICS_PATH = path.join(__dirname, '../../analytics'); 37 | 38 | export const HASTIC_DB_IN_MEMORY = getConfigFieldAndPrintOrExit('HASTIC_IN_MEMORY_PERSISTANCE', false); 39 | // TODO: enum for DB types 40 | export const HASTIC_DB_CONNECTION_TYPE = getConfigFieldAndPrintOrExit('HASTIC_DB_CONNECTION_TYPE', 'nedb', ['nedb', 'mongodb']); 41 | 42 | //connection string syntax: :@/ 43 | export const HASTIC_DB_CONNECTION_STRING = getConfigFieldAndPrintOrExit( 44 | 'HASTIC_DB_CONNECTION_STRING', 45 | 'hastic:password@mongodb:27017/hastic' 46 | ); 47 | 48 | export const HASTIC_DB_CONFIG = getDbConfig(HASTIC_DB_CONNECTION_STRING); 49 | 50 | export const DATA_PATH = path.join(__dirname, '../../data'); 51 | export const ANALYTIC_UNITS_DATABASE_PATH = path.join(DATA_PATH, 'analytic_units.db'); 52 | export const ANALYTIC_UNIT_CACHES_DATABASE_PATH = path.join(DATA_PATH, 'analytic_unit_caches.db'); 53 | export const SEGMENTS_DATABASE_PATH = path.join(DATA_PATH, 'segments.db'); 54 | export const THRESHOLD_DATABASE_PATH = path.join(DATA_PATH, 'treshold.db'); 55 | export const DETECTION_SPANS_DATABASE_PATH = path.join(DATA_PATH, 'detection_spans.db'); 56 | export const DB_META_PATH = path.join(DATA_PATH, 'db_meta.db'); 57 | 58 | export const HASTIC_PORT = getConfigFieldAndPrintOrExit('HASTIC_PORT', '8000'); 59 | export const HASTIC_API_KEY = getConfigFieldAndPrintOrExit('HASTIC_API_KEY'); 60 | export const GRAFANA_URL = normalizeUrl(getConfigFieldAndPrintOrExit('GRAFANA_URL', null)); 61 | 62 | // TODO: save orgId in analytic_units.db 63 | export const ORG_ID = getConfigFieldAndPrintOrExit('ORG_ID', 1); 64 | 65 | export enum AlertTypes { 66 | WEBHOOK = 'webhook', 67 | ALERTMANAGER = 'alertmanager', 68 | SLACK = 'slack' 69 | }; 70 | export const HASTIC_ALERT_TYPE = getConfigFieldAndPrintOrExit('HASTIC_ALERT_TYPE', AlertTypes.WEBHOOK, _.values(AlertTypes)); 71 | export const HASTIC_ALERT_IMAGE = getConfigFieldAndPrintOrExit('HASTIC_ALERT_IMAGE', false); 72 | 73 | export const HASTIC_WEBHOOK_URL = getConfigFieldAndPrintOrExit('HASTIC_WEBHOOK_URL', null); 74 | export const HASTIC_TIMEZONE_OFFSET = getTimeZoneOffset(); 75 | 76 | export const HASTIC_ALERTMANAGER_URL = getConfigFieldAndPrintOrExit('HASTIC_ALERTMANAGER_URL', null); 77 | 78 | export const HASTIC_SLACK_API_TOKEN = getConfigFieldAndPrintOrExit('HASTIC_SLACK_API_TOKEN', null); 79 | export const HASTIC_SLACK_NOTIFICATION_CHANNEL = getConfigFieldAndPrintOrExit('HASTIC_SLACK_NOTIFICATION_CHANNEL', null); 80 | 81 | export const ANALYTICS_PING_INTERVAL = 500; // ms 82 | export const PACKAGE_VERSION = getPackageVersion(); 83 | export const GIT_INFO = { 84 | branch: GIT_BRANCH, 85 | commitHash: GIT_COMMITHASH, 86 | version: GIT_VERSION 87 | }; 88 | export const INSIDE_DOCKER = process.env.INSIDE_DOCKER !== undefined; 89 | export const PRODUCTION_MODE = process.env.NODE_ENV !== 'development'; 90 | 91 | // TODO: maybe rename it to "HASTIC_SERVER_ANALYTICS_URL" 92 | export const HASTIC_SERVER_URL = getHasticServerUrl(); 93 | export const HASTIC_INSTANCE_NAME = getConfigFieldAndPrintOrExit('HASTIC_INSTANCE_NAME', os.hostname()); 94 | 95 | 96 | /** 97 | * You get a value or exit from the main process 98 | */ 99 | function getConfigFieldAndPrintOrExit(field: string, defaultVal?: any, allowedVals?: any[]) { 100 | let val; 101 | 102 | if(process.env[field] !== undefined) { 103 | val = process.env[field]; 104 | } else if(configExists) { 105 | let config: any = getJsonDataSync(configFile); 106 | 107 | if(config[field] !== undefined) { 108 | val = config[field]; 109 | } 110 | } 111 | 112 | if(val === undefined || val == '') { 113 | if(defaultVal === undefined) { 114 | console.log(`Please configure ${field}`); 115 | exit(EXIT_CODE_MISSING_FIELD); 116 | } 117 | 118 | val = defaultVal; 119 | } 120 | 121 | if(allowedVals !== undefined && !_.includes(allowedVals, val)) { 122 | console.log(`${field} value must be one of: ${allowedVals}, got ${val}`); 123 | exit(EXIT_CODE_BAD_VALUE_FIELD); 124 | } 125 | 126 | console.log(`${field}: ${val}`); 127 | return val; 128 | } 129 | 130 | function getPackageVersion() { 131 | if(process.env.npm_package_version !== undefined) { 132 | return process.env.npm_package_version; 133 | } else { 134 | let packageFile = path.join(__dirname, '../package.json'); 135 | if(fs.existsSync(packageFile)) { 136 | let packageJson: any = getJsonDataSync(packageFile); 137 | return packageJson.version; 138 | } else { 139 | console.log(`Can't find package file ${packageFile}`); 140 | return null; 141 | } 142 | } 143 | } 144 | 145 | // TODO: move to data_layer 146 | function getDbConfig(connectionStr: string): DBConfig { 147 | const [user, password] = connectionStr.split('@')[0].split(':'); 148 | const [dbName, ...urlParts] = connectionStr.split('@')[1].split('/').reverse(); 149 | const url = urlParts.reverse().join('/'); 150 | 151 | const config = { 152 | user, 153 | password, 154 | url, 155 | dbName 156 | }; 157 | return config; 158 | } 159 | 160 | function getTimeZoneOffset(): number { 161 | let configTimeZone = getConfigFieldAndPrintOrExit('HASTIC_TIMEZONE_OFFSET', null); 162 | if(configTimeZone !== null) { 163 | return parseTimeZone(configTimeZone); 164 | } else { 165 | const serverUtcOffset = moment().utcOffset(); 166 | return serverUtcOffset; 167 | } 168 | } 169 | 170 | function getHasticServerUrl() { 171 | const urlString = getConfigFieldAndPrintOrExit('HASTIC_SERVER_URL', 'ws://localhost:8002'); 172 | 173 | try { 174 | const url = new URL(urlString); 175 | if(url.protocol !== 'ws:') { 176 | throw new Error(`Invalid protocol ${url.protocol}`); 177 | } 178 | 179 | return url; 180 | } catch(e) { 181 | console.log(`Invalid HASTIC_SERVER_URL, value must be url, got: ${urlString}`); 182 | exit(EXIT_CODE_BAD_VALUE_FIELD); 183 | } 184 | } 185 | -------------------------------------------------------------------------------- /server/src/services/data_puller.ts: -------------------------------------------------------------------------------- 1 | import { AnalyticsTask, AnalyticsTaskType } from '../models/analytics_task_model'; 2 | import * as AnalyticUnit from '../models/analytic_units'; 3 | import * as AnalyticUnitCache from '../models/analytic_unit_cache_model'; 4 | import { AnalyticsService } from './analytics_service'; 5 | import { HASTIC_API_KEY } from '../config'; 6 | import { availableReporter } from '../utils/reporter'; 7 | import { AlertService } from './alert_service'; 8 | import { getGrafanaUrl } from '../utils/grafana'; 9 | 10 | import { queryByMetric, GrafanaUnavailable, DatasourceUnavailable } from '@corpglory/tsdb-kit'; 11 | 12 | import * as _ from 'lodash'; 13 | 14 | 15 | type MetricDataChunk = { values: [number, number][], columns: string[] }; 16 | 17 | const PULL_PERIOD_MS = 5000; 18 | 19 | 20 | export class DataPuller { 21 | 22 | private _analyticReadyConsoleReporter = availableReporter( 23 | 'data puller: analytic ready, start pushing', 24 | 'data puller: analytic service not ready, return empty result' 25 | ); 26 | 27 | private _grafanaAvailableConsoleReporter = availableReporter( 28 | 'data puller: connected to Grafana', 29 | `data puller: can't connect to Grafana. Check GRAFANA_URL` 30 | ); 31 | 32 | private _unitTimes: { [analyticUnitId: string]: number } = {}; 33 | 34 | constructor(private analyticsService: AnalyticsService, private alertService: AlertService) {}; 35 | 36 | public addUnit(analyticUnit: AnalyticUnit.AnalyticUnit) { 37 | console.log(`start pulling analytic unit ${analyticUnit.id}`); 38 | this._runAnalyticUnitPuller(analyticUnit); 39 | } 40 | 41 | public deleteUnit(analyticUnitId: AnalyticUnit.AnalyticUnitId) { 42 | if(_.has(this._unitTimes, analyticUnitId)) { 43 | delete this._unitTimes[analyticUnitId]; 44 | console.log(`analytic unit ${analyticUnitId} deleted from data puller`); 45 | } 46 | } 47 | 48 | private async pullData(unit: AnalyticUnit.AnalyticUnit, from: number, to: number): Promise { 49 | if(unit === undefined) { 50 | throw Error(`data puller: can't pull undefined unit`); 51 | } 52 | const grafanaUrl = getGrafanaUrl(unit.grafanaUrl); 53 | let data = queryByMetric(unit.metric, grafanaUrl, from, to, HASTIC_API_KEY); 54 | return data; 55 | } 56 | 57 | private pushData(unit: AnalyticUnit.AnalyticUnit, data: any) { 58 | if(unit === undefined || data === undefined) { 59 | throw Error(`data puller can't push unit: ${unit} data: ${data}`); 60 | } 61 | let task = new AnalyticsTask(unit.id, AnalyticsTaskType.PUSH, data); 62 | if(_.isEmpty(data.cache)) { 63 | console.log('push empty cache to analytics') 64 | } 65 | try { 66 | this.analyticsService.sendTask(task); 67 | let fromTime = new Date(data.from).toLocaleTimeString(); 68 | let toTime = new Date(data.to).toLocaleTimeString(); 69 | console.log(`pushed ${data.data.length} points to unit: ${unit.id} ${fromTime}-${toTime}`); 70 | } catch(e) { 71 | console.log(`data puller got error while push data ${e.message}`); 72 | } 73 | } 74 | 75 | // TODO: group analyticUnits by panelID and send same dataset for group 76 | public async runPuller() { 77 | const analyticUnits = await AnalyticUnit.findMany({ alert: true }); 78 | 79 | console.log(`starting data puller with ${JSON.stringify(analyticUnits.map(u => u.id))} analytic units`); 80 | 81 | _.each(analyticUnits, analyticUnit => { 82 | this._runAnalyticUnitPuller(analyticUnit); 83 | }); 84 | 85 | console.log('data puller started'); 86 | } 87 | 88 | public stopPuller() { 89 | this._unitTimes = {}; 90 | console.log('data puller stopped'); 91 | } 92 | 93 | private async _runAnalyticUnitPuller(analyticUnit: AnalyticUnit.AnalyticUnit) { 94 | console.log(`run data puller for analytic unit ${analyticUnit.id}`); 95 | const time = Date.now(); 96 | this._unitTimes[analyticUnit.id] = time; 97 | 98 | const dataGenerator = this.getDataGenerator( 99 | analyticUnit, PULL_PERIOD_MS 100 | ); 101 | 102 | for await (const data of dataGenerator) { 103 | if(!_.has(this._unitTimes, analyticUnit.id)) { 104 | console.log(`data puller: ${analyticUnit.id} not in _unitTimes, break`); 105 | break; 106 | } 107 | 108 | if(data.values.length === 0) { 109 | continue; 110 | } 111 | 112 | const now = Date.now(); 113 | let payloadValues = data.values; 114 | let cache = await AnalyticUnitCache.findById(analyticUnit.id); 115 | if(cache !== null) { 116 | cache = cache.data 117 | } 118 | const detector = analyticUnit.detectorType; 119 | let payload = { 120 | data: payloadValues, 121 | from: this._unitTimes[analyticUnit.id], 122 | to: now, 123 | analyticUnitType: analyticUnit.type, 124 | detector, 125 | cache 126 | }; 127 | this.pushData(analyticUnit, payload); 128 | this._unitTimes[analyticUnit.id] = now; 129 | } 130 | } 131 | 132 | async * getDataGenerator(analyticUnit: AnalyticUnit.AnalyticUnit, duration: number): 133 | AsyncIterableIterator { 134 | 135 | const getData = async () => { 136 | this._analyticReadyConsoleReporter(this.analyticsService.ready); 137 | if(!this.analyticsService.ready) { 138 | return { 139 | columns: [], 140 | values: [] 141 | }; 142 | } 143 | 144 | try { 145 | const time = this._unitTimes[analyticUnit.id]; 146 | if(time === undefined) { 147 | throw new Error(`Analytic unit ${analyticUnit.id} is deleted from puller`); 148 | } 149 | const now = Date.now(); 150 | 151 | if(time >= now) { 152 | // TODO: probably we should have ability to set PULL_PERIOD_MS or get it from metric as time step between points 153 | return { 154 | columns: [], 155 | values: [] 156 | }; 157 | } 158 | 159 | const res = await this.pullData(analyticUnit, time, now); 160 | this._grafanaAvailableConsoleReporter(true); 161 | this.alertService.sendGrafanaAvailableWebhook(); 162 | this.alertService.sendDatasourceAvailableWebhook(analyticUnit.metric.datasource.url); 163 | return res; 164 | } catch(err) { 165 | let errorResolved = false; 166 | if(err instanceof GrafanaUnavailable) { 167 | errorResolved = true; 168 | this.alertService.sendGrafanaUnavailableWebhook(); 169 | } else { 170 | this.alertService.sendGrafanaAvailableWebhook(); 171 | } 172 | 173 | if(err instanceof DatasourceUnavailable) { 174 | errorResolved = true; 175 | this.alertService.sendDatasourceUnavailableWebhook(analyticUnit.metric.datasource.url); 176 | } 177 | 178 | if(!errorResolved) { 179 | console.error(`error while pulling data: ${err.message}`); 180 | } 181 | 182 | return { 183 | columns: [], 184 | values: [] 185 | }; 186 | } 187 | } 188 | 189 | const timeout = async () => new Promise( 190 | resolve => setTimeout(resolve, duration) 191 | ); 192 | 193 | while(true) { 194 | yield await getData(); 195 | await timeout(); 196 | } 197 | } 198 | 199 | } 200 | -------------------------------------------------------------------------------- /server/src/models/segment_model.ts: -------------------------------------------------------------------------------- 1 | import { AnalyticUnitId } from './analytic_units'; 2 | import * as AnalyticUnit from '../models/analytic_units'; 3 | import * as AnalyticUnitCache from '../models/analytic_unit_cache_model'; 4 | import { Collection } from '../services/data_service/collection'; 5 | import { DataService } from '../services/data_service'; 6 | 7 | import * as _ from 'lodash'; 8 | 9 | 10 | const db = DataService.getInstance().makeDBQ(Collection.SEGMENTS); 11 | 12 | export type SegmentId = string; 13 | 14 | export class Segment { 15 | constructor( 16 | public analyticUnitId: AnalyticUnitId, 17 | public from: number, 18 | public to: number, 19 | public labeled: boolean = false, 20 | public deleted: boolean = false, 21 | public id?: SegmentId, 22 | public message?: string 23 | ) { 24 | if(analyticUnitId === undefined) { 25 | throw new Error('AnalyticUnitId is undefined'); 26 | } 27 | if(from === undefined) { 28 | throw new Error('from is undefined'); 29 | } 30 | if(isNaN(from)) { 31 | throw new Error('from is NaN'); 32 | } 33 | if(to === undefined) { 34 | throw new Error('to is undefined'); 35 | } 36 | if(isNaN(to)) { 37 | throw new Error('to is NaN'); 38 | } 39 | } 40 | 41 | public toObject() { 42 | return { 43 | _id: this.id, 44 | analyticUnitId: this.analyticUnitId, 45 | from: this.from, 46 | to: this.to, 47 | labeled: this.labeled, 48 | deleted: this.deleted, 49 | message: this.message 50 | }; 51 | } 52 | 53 | public toTemplate(): any { 54 | return { 55 | ...this.toObject(), 56 | _id: undefined, 57 | analyticUnitId: undefined 58 | }; 59 | } 60 | 61 | static fromObject(obj: any): Segment { 62 | if(obj === undefined) { 63 | throw new Error('obj is undefined'); 64 | } 65 | return new Segment( 66 | obj.analyticUnitId, 67 | +obj.from, +obj.to, 68 | obj.labeled, obj.deleted, 69 | obj._id, obj.message 70 | ); 71 | } 72 | 73 | public equals(obj: Segment) : boolean { 74 | return this.analyticUnitId === obj.analyticUnitId && 75 | this.from === obj.from && 76 | this.to === obj.to && 77 | this.labeled === this.labeled && 78 | this.deleted === this.deleted; 79 | } 80 | } 81 | 82 | export type FindManyQuery = { 83 | $or?: any, 84 | from?: { $gte?: number, $lte?: number }, 85 | to?: { $gte?: number, $lte?: number }, 86 | labeled?: boolean, 87 | deleted?: boolean 88 | } 89 | 90 | export async function findOne(segmentId: SegmentId): Promise { 91 | return db.findOne({ _id: segmentId }); 92 | } 93 | 94 | export async function findMany(id: AnalyticUnitId, query: FindManyQuery): Promise { 95 | var dbQuery: any = { analyticUnitId: id }; 96 | if(query.labeled !== undefined) { 97 | dbQuery.labeled = query.labeled; 98 | } 99 | if(query.deleted !== undefined) { 100 | dbQuery.deleted = query.deleted; 101 | } 102 | if(query.from !== undefined) { 103 | dbQuery.from = query.from; 104 | } 105 | if(query.to !== undefined) { 106 | dbQuery.to = query.to; 107 | } 108 | if(query.$or !== undefined) { 109 | dbQuery.$or = query.$or; 110 | } 111 | let segs = await db.findMany(dbQuery); 112 | if(segs === null) { 113 | return []; 114 | } 115 | return segs.map(Segment.fromObject); 116 | } 117 | 118 | export async function findByAnalyticUnitIds(analyticUnitIds: AnalyticUnitId[]): Promise { 119 | const segments = await db.findMany({ analyticUnitId: { $in: analyticUnitIds } }); 120 | 121 | if(segments === null) { 122 | return []; 123 | } 124 | return segments.map(Segment.fromObject); 125 | } 126 | 127 | /** 128 | * If `from` and `to` are defined: @returns segments intersected with `[from; to]` 129 | * If `to` is `undefined`: @returns segments intersected with `[-inf; from]` 130 | * If `from` is `undefined`: @returns segments intersected with `[to: +inf]` 131 | * If `from` and `to` are undefined: @returns all segments 132 | */ 133 | export async function findIntersectedSegments( 134 | analyticUnitId: AnalyticUnit.AnalyticUnitId, 135 | from?: number, 136 | to?: number 137 | ): Promise { 138 | let query: FindManyQuery = {}; 139 | if(from !== undefined) { 140 | query.to = { $gte: from }; 141 | } 142 | if(to !== undefined) { 143 | query.from = { $lte: to }; 144 | } 145 | return findMany(analyticUnitId, query); 146 | } 147 | 148 | 149 | // TODO: rewrite all this horrible function 150 | // TODO: use utils.segments.IntegerSegmentsSet 151 | /** 152 | * Merges an array of segments with ones existing in the DB 153 | * Inserts resulting segments into DB 154 | * @param segments segments to be inserted 155 | * @returns IDs of added and removed segments 156 | */ 157 | export async function mergeAndInsertSegments(segments: Segment[]): Promise<{ 158 | addedIds: SegmentId[], 159 | removedIds: SegmentId[], 160 | anyNewSegments: boolean 161 | }> { 162 | if(_.isEmpty(segments)) { 163 | return { addedIds: [], removedIds: [], anyNewSegments: false }; 164 | } 165 | const analyticUnitId: AnalyticUnitId = segments[0].analyticUnitId; 166 | const unit = await AnalyticUnit.findById(analyticUnitId); 167 | if(unit === null) { 168 | throw new Error('Can`t find analytic unit ' + analyticUnitId); 169 | } 170 | const cache = await AnalyticUnitCache.findById(analyticUnitId); 171 | 172 | const detector = unit.detectorType; 173 | 174 | let segmentIdsToRemove: SegmentId[] = []; 175 | let segmentsToInsert: Segment[] = []; 176 | 177 | let anyNewSegments = false; 178 | for(let segment of segments) { 179 | if(await isIntersectedWithExistingLabeled(segment)) { 180 | continue; 181 | } 182 | 183 | if(!segment.deleted && !segment.labeled) { 184 | if(await isIntersectedWithExistingDeleted(segment)) { 185 | continue; 186 | } 187 | } 188 | 189 | let intersectedSegments: Segment[] = []; 190 | if(detector === AnalyticUnit.DetectorType.PATTERN) { 191 | intersectedSegments = await findMany(analyticUnitId, { 192 | to: { $gte: segment.from }, 193 | from: { $lte: segment.to }, 194 | labeled: segment.labeled, 195 | deleted: segment.deleted 196 | }); 197 | } else { 198 | let intersectionRangeExtension = 0; 199 | if(cache !== null) { 200 | const timeStep = cache.timeStep; 201 | if(timeStep !== undefined) { 202 | intersectionRangeExtension = timeStep; 203 | } 204 | } 205 | intersectedSegments = await findMany(analyticUnitId, { 206 | to: { $gte: segment.from - intersectionRangeExtension }, 207 | from: { $lte: segment.to + intersectionRangeExtension }, 208 | labeled: segment.labeled, 209 | deleted: segment.deleted 210 | }); 211 | } 212 | 213 | if(intersectedSegments.length > 0) { 214 | let intersectedIds = intersectedSegments.map(s => s.id); 215 | let minFromSegment = _.minBy(intersectedSegments.concat(segment), s => s.from); 216 | let maxToSegment = _.maxBy(intersectedSegments.concat(segment), s => s.to); 217 | 218 | if(minFromSegment === undefined) { 219 | throw new Error('minFromSegment is undefined'); 220 | } 221 | 222 | if(maxToSegment === undefined) { 223 | throw new Error('maxToSegment is undefined'); 224 | } 225 | 226 | let from = minFromSegment.from; 227 | let to = maxToSegment.to; 228 | let newSegment = Segment.fromObject(segment.toObject()); 229 | newSegment.from = from; 230 | newSegment.to = to; 231 | segmentIdsToRemove = segmentIdsToRemove.concat(_.compact(intersectedIds)); 232 | segmentsToInsert.push(newSegment); 233 | } else { 234 | anyNewSegments = true; 235 | segmentsToInsert.push(segment); 236 | } 237 | } 238 | 239 | await db.removeMany(segmentIdsToRemove); 240 | const addedIds = await db.insertMany(segmentsToInsert.map(s => s.toObject())); 241 | return { 242 | addedIds, 243 | removedIds: segmentIdsToRemove, 244 | anyNewSegments 245 | }; 246 | } 247 | 248 | export async function insertMany(segments: any[]): Promise { 249 | return db.insertMany(segments); 250 | } 251 | 252 | export async function setSegmentsDeleted(ids: SegmentId[]) { 253 | return db.updateMany(ids, { deleted: true, labeled: false }); 254 | } 255 | 256 | export function removeSegments(idsToRemove: SegmentId[]) { 257 | return db.removeMany(idsToRemove); 258 | } 259 | 260 | async function isIntersectedWithExistingLabeled(segment: Segment): Promise { 261 | const intersected = await findMany(segment.analyticUnitId, { 262 | labeled: true, 263 | deleted: false, 264 | from: { $lte: segment.to }, 265 | to: { $gte: segment.from } 266 | }); 267 | 268 | return intersected.length > 0; 269 | } 270 | 271 | async function isIntersectedWithExistingDeleted(segment: Segment): Promise { 272 | const intersected = await findMany(segment.analyticUnitId, { 273 | labeled: false, 274 | deleted: true, 275 | from: { $lte: segment.to }, 276 | to: { $gte: segment.from } 277 | }); 278 | 279 | return intersected.length > 0; 280 | } 281 | -------------------------------------------------------------------------------- /server/spec/utils/segments.jest.ts: -------------------------------------------------------------------------------- 1 | import { cutSegmentWithSegments, IntegerSegment, IntegerSegmentsSet } from '../../src/utils/segments'; 2 | 3 | import 'jest'; 4 | 5 | 6 | function IS(from: number, to: number) { 7 | return new IntegerSegment(from, to); 8 | } 9 | 10 | function ISS(xs: [number, number][]) { 11 | return new IntegerSegmentsSet(xs.map(x => IS(x[0], x[1]))); 12 | } 13 | 14 | function cutSpan(from: number, to: number, cuts: [number, number][]): [number, number][] { 15 | return cutSegmentWithSegments( 16 | new IntegerSegment(from, to), 17 | cuts.map(([from, to]) => new IntegerSegment(from, to)) 18 | ).map(({ from, to }) => [from, to] as [number, number]); 19 | } 20 | 21 | describe('IntegerSegment', function() { 22 | it('should throw an error on float from or to', function() { 23 | expect(() => IS(0.1, 0)).toThrow(); 24 | expect(() => IS(1, 5.04)).toThrow(); 25 | expect(() => IS(1, 5)).not.toThrow(); 26 | }); 27 | }); 28 | 29 | describe('IntegerSegment.intersect', function() { 30 | it('return undefined if segments don`t intersect', function() { 31 | expect(IS(4, 5).insersect(IS(6, 10))).toEqual(undefined); 32 | expect(IS(7, 10).insersect(IS(1, 3))).toEqual(undefined); 33 | }); 34 | 35 | it('return a point when borders intersect', function() { 36 | expect(IS(4, 5).insersect(IS(5, 6))).toEqual(IS(5, 5)); 37 | expect(IS(4, 5).insersect(IS(4, 4))).toEqual(IS(4, 4)); 38 | }); 39 | }); 40 | 41 | describe('IntegerSegmentSet constructor', function() { 42 | it('can construct from empty segments list', function() { 43 | expect(() => ISS([])).not.toThrow(); 44 | }); 45 | 46 | it('should sort segments', function() { 47 | expect(ISS([[10, 15], [5, 8]]).segments).toEqual([IS(5, 8), IS(10, 15)]); 48 | expect(ISS([[10, 15], [-Infinity, 8]]).segments).toEqual([IS(-Infinity, 8), IS(10, 15)]); 49 | expect(ISS([[10, Infinity], [-Infinity, 8]]).segments).toEqual([IS(-Infinity, 8), IS(10, Infinity)]); 50 | }); 51 | 52 | it('should merge segments', function() { 53 | expect(ISS([[5, 10], [7, 20]]).segments).toEqual([IS(5, 20)]); // it's because 7 <= 10 54 | expect(ISS([[5, 10], [10, 20]]).segments).toEqual([IS(5, 20)]); 55 | expect(ISS([[5, 10], [11, 20]]).segments).toEqual([IS(5, 20)]); // it's because [..., 10], [11, ...], 56 | // there is nothing between 10 and 11 57 | expect(ISS([[3, 11], [4, 10]]).segments).toEqual([IS(3, 11)]); 58 | }); 59 | }); 60 | 61 | describe('IntegerSegmentSet.inversed', function() { 62 | it('should return Infinite segment whes set is empty', function() { 63 | let setA = ISS([]); 64 | expect(setA.inversed()).toEqual(ISS([[-Infinity, Infinity]])); 65 | }); 66 | 67 | it('should return empty segment whes set is infinite', function() { 68 | let setA = ISS([[-Infinity, Infinity]]); 69 | expect(setA.inversed()).toEqual(ISS([])); 70 | }); 71 | 72 | it('should inverse a point', function() { 73 | expect(ISS([[4, 4]]).inversed()).toEqual(ISS([[-Infinity, 3], [5, Infinity]])); 74 | }); 75 | 76 | it('should inverse basic cases', function() { 77 | expect(ISS([[3, 10]]).inversed()).toEqual(ISS([[-Infinity, 2], [11, Infinity]])); 78 | expect(ISS([[3, 10], [15, 20]]).inversed()).toEqual(ISS([[-Infinity, 2], [11, 14] , [21, Infinity]])); 79 | }); 80 | 81 | it('should inverse infinites', function() { 82 | expect(ISS([[3, Infinity]]).inversed()).toEqual(ISS([[-Infinity, 2]])); 83 | expect(ISS([[-Infinity, 3]]).inversed()).toEqual(ISS([[4, Infinity]])); 84 | }); 85 | 86 | }); 87 | 88 | describe('IntegerSegmentSet.intersected', function() { 89 | it('should return empty set if one of intersection is empty', function() { 90 | let setA = ISS([]); 91 | let setB = ISS([[1, 5]]); 92 | expect(setA.intersect(setB).segments).toEqual([]); 93 | expect(setB.intersect(setA).segments).toEqual([]); 94 | }); 95 | 96 | it('should intersect two segments', function() { 97 | let setA = ISS([[2, 5]]); 98 | let setB = ISS([[1, 4]]); 99 | expect(setA.intersect(setB)).toEqual(ISS([[2, 4]])); 100 | }); 101 | 102 | it('should intersect basic cases', function() { 103 | let setA = ISS([[2, 5], [6, 10]]); 104 | let setB = ISS([[1, 9]]); 105 | let setC = ISS([[2, 5], [6, 10]]); 106 | let setD = ISS([[4, 4], [10, 10]]); 107 | let setE = ISS([[4, 4], [10, 10], [12, 15]]); 108 | expect(setA.intersect(setB)).toEqual(ISS([[2, 5], [6, 9]])); 109 | expect(setA.intersect(setC)).toEqual(ISS([[2, 5], [6, 10]])); 110 | expect(setA.intersect(setD)).toEqual(ISS([[4, 4], [10, 10]])); 111 | expect(setA.intersect(setE)).toEqual(ISS([[4, 4], [10, 10]])); 112 | expect(setE.intersect(setA)).toEqual(ISS([[4, 4], [10, 10]])); 113 | }); 114 | 115 | }); 116 | 117 | describe('cutSpanWithSpans', function() { 118 | 119 | it('should handle empty input spans list case', function() { 120 | expect(cutSpan(4, 10, [])).toEqual([[4, 10]]); 121 | }); 122 | 123 | it('should handle works fine one point results', function() { 124 | expect(cutSpan(1, 10, [[2, 10]])).toEqual([[1, 1]]); 125 | expect(cutSpan(1, 10, [[2, 11]])).toEqual([[1, 1]]); 126 | expect(cutSpan(1, 10, [[1, 9]])).toEqual([[10, 10]]); 127 | expect(cutSpan(1, 10, [[0, 9]])).toEqual([[10, 10]]); 128 | expect(cutSpan(1, 10, [[1, 4], [6, 10]])).toEqual([[5, 5]]); 129 | expect(cutSpan(1, 10, [[2, 9]])).toEqual([[1, 1], [10, 10]]); 130 | }); 131 | 132 | it('should throw error is cut contains float border', function() { 133 | expect(() => cutSpan(0, 10, [[0.1, 5]])).toThrow() 134 | expect(() => cutSpan(1, 10, [[0.9, 0.0]])).toThrow(); 135 | expect(() => cutSpan(0.5, 10, [[1, 5]])).toThrow(); 136 | }); 137 | 138 | it('should handle one-point cuts', function() { 139 | expect(cutSpan(1, 10, [[5, 5]])).toEqual([[1, 4], [6, 10]]); 140 | expect(cutSpan(1, 10, [[1, 1]])).toEqual([[2, 10]]); 141 | expect(cutSpan(1, 10, [[10, 10]])).toEqual([[1, 9]]); 142 | expect(cutSpan(1, 10, [[11, 11]])).toEqual([[1, 10]]); 143 | expect(cutSpan(1, 15, [[11, 11], [12, 12]])).toEqual([[1, 10], [13, 15]]); 144 | }); 145 | 146 | it('should handle basic cases', function() { 147 | let cutSpans = [[3, 4], [6, 8], [11, 20]] as [number, number][]; 148 | 149 | expect(cutSpan(0, 11, cutSpans)).toEqual([[0, 2], [5, 5], [9, 10]]); 150 | expect(cutSpan(5, 11, cutSpans)).toEqual([[5, 5], [9, 10]]); 151 | expect(cutSpan(4, 10, cutSpans)).toEqual([[5, 5], [9, 10]]); 152 | expect(cutSpan(5, 10, cutSpans)).toEqual([[5, 5], [9, 10]]); 153 | expect(cutSpan(4, 20, cutSpans)).toEqual([[5, 5], [9, 10]]); 154 | expect(cutSpan(4, 21, cutSpans)).toEqual([[5, 5], [9, 10], [21, 21]]); 155 | expect(cutSpan(2, 20, cutSpans)).toEqual([[2, 2], [5, 5], [9, 10]]); 156 | expect(cutSpan(2, 21, cutSpans)).toEqual([[2, 2], [5, 5], [9, 10], [21, 21]]); 157 | expect(cutSpan(3, 11, cutSpans)).toEqual([[5, 5], [9, 10]]); 158 | expect(cutSpan(3, 20, cutSpans)).toEqual([[5, 5], [9, 10]]); 159 | expect(cutSpan(4, 7, [[3, 5], [6, 8]])).toEqual([]); 160 | }); 161 | 162 | it('should handle infitie span and infinite cuts', function() { 163 | expect(cutSpan(0, Infinity, [[5, 10]])).toEqual([[0, 4], [11, Infinity]]); 164 | expect(cutSpan(0, 6, [[0, Infinity]])).toEqual([]); 165 | expect(cutSpan(0, 6, [[2, Infinity]])).toEqual([[0, 1]]); 166 | expect(cutSpan(-Infinity, Infinity, [[-Infinity, Infinity]])).toEqual([]); 167 | }); 168 | 169 | it('should handle case when from and to are inside of one big span', function() { 170 | expect(cutSpan(4, 10, [[1, 20]])).toEqual([]); 171 | expect(cutSpan(4, 10, [[1, 10]])).toEqual([]); 172 | expect(cutSpan(4, 10, [[4, 20]])).toEqual([]); 173 | expect(cutSpan(4, 10, [[4, 10]])).toEqual([]); 174 | }); 175 | 176 | it('should be ready to get not-sorted cuts', function() { 177 | expect(cutSpan(0, 20, [[3, 5], [1, 2]])).toEqual([[0, 0], [6, 20]]); 178 | expect(cutSpan(0, 20, [[3, 5], [1, 2], [0, 0]])).toEqual([[6, 20]]); 179 | }); 180 | 181 | it('should be ready to get overlayed cuts', function() { 182 | expect(cutSpan(0, 20, [[3, 5], [4, 10]])).toEqual([[0, 2], [11, 20]]); 183 | expect(cutSpan(0, 20, [[3, 9], [4, 9]])).toEqual([[0, 2], [10, 20]]); 184 | expect(cutSpan(0, 20, [[3, 11], [4, 10]])).toEqual([[0, 2], [12, 20]]); 185 | expect(cutSpan(0, 20, [[3, 11], [3, 12]])).toEqual([[0, 2], [13, 20]]); 186 | expect(cutSpan(0, 20, [[3, 11], [3, 12], [3, 10], [3, 15], [3, 14]])).toEqual([[0, 2], [16, 20]]); 187 | expect(cutSpan(0, 20, [[2, 11], [3, 12]])).toEqual([[0, 1], [13, 20]]); 188 | expect(cutSpan(0, 20, [[2, 15], [3, 12]])).toEqual([[0, 1], [16, 20]]); 189 | expect(cutSpan(0, 20, [[2, 15], [3, 12], [1, 18]])).toEqual([[0, 0], [19, 20]]); 190 | expect(cutSpan(0, 20, [[2, 15], [3, Infinity], [1, 18]])).toEqual([[0, 0]]); 191 | expect(cutSpan(0, 20, [[3, 3], [3, Infinity]])).toEqual([[0, 2]]); 192 | expect(cutSpan(0, 20, [[3, 3], [3, Infinity], [3, 3]])).toEqual([[0, 2]]); 193 | expect(cutSpan(0, 20, [[3, 3], [3, Infinity], [3, 3], [4, 4]])).toEqual([[0, 2]]); 194 | expect(cutSpan(0, 20, [[3, 3], [3, Infinity], [3, 3], [4, 4], [3, 5]])).toEqual([[0, 2]]); 195 | expect(cutSpan(-Infinity, Infinity, [[3, 3], [3, Infinity], [3, 3], [4, 4], [3, 5]])).toEqual([[-Infinity, 2]]); 196 | }); 197 | 198 | it('should handle cuts from point span', function() { 199 | expect(cutSpan(1, 1, [[1, 1]])).toEqual([]); 200 | expect(cutSpan(1, 1, [[0, 2]])).toEqual([]); 201 | expect(cutSpan(1, 1, [[0, 1]])).toEqual([]); 202 | expect(cutSpan(1, 1, [[1, 2]])).toEqual([]); 203 | }); 204 | 205 | }); 206 | -------------------------------------------------------------------------------- /server/src/services/analytics_service.ts: -------------------------------------------------------------------------------- 1 | import { AnalyticsTask, AnalyticsTaskType } from '../models/analytics_task_model'; 2 | import { AnalyticsMessageMethod, AnalyticsMessage } from '../models/analytics_message_model'; 3 | import { WebhookType } from '../services/notification_service'; 4 | import * as config from '../config'; 5 | import { AlertService } from './alert_service'; 6 | 7 | import * as WebSocket from 'ws'; 8 | 9 | import * as childProcess from 'child_process' 10 | import * as fs from 'fs'; 11 | import * as path from 'path'; 12 | import * as _ from 'lodash'; 13 | import { HASTIC_SERVER_URL } from '../config'; 14 | 15 | 16 | export class AnalyticsService { 17 | 18 | private _alertService = new AlertService(); 19 | private _socket_server: WebSocket.Server; 20 | private _socket_connection: WebSocket = null; 21 | private _ready: boolean = false; 22 | private _lastAlive: Date = null; 23 | private _pingResponded = false; 24 | private _analyticsPinger: NodeJS.Timeout = null; 25 | private _isClosed = false; 26 | private _productionMode = false; 27 | private _inDocker = false; 28 | private _queue: AnalyticsTask[] = []; 29 | 30 | constructor(private _onMessage: (message: AnalyticsMessage) => void) { 31 | this._productionMode = config.PRODUCTION_MODE; 32 | this._inDocker = config.INSIDE_DOCKER; 33 | this._init(); 34 | } 35 | 36 | public async sendTask(task: AnalyticsTask, fromQueue = false): Promise { 37 | if(!this._ready) { 38 | console.log('Analytics is not ready'); 39 | if(!fromQueue) { 40 | // TODO: add to db? 41 | this._queue.push(task); 42 | console.log('Adding task to queue'); 43 | } 44 | return; 45 | } 46 | let method = task.type === AnalyticsTaskType.PUSH ? 47 | AnalyticsMessageMethod.DATA : AnalyticsMessageMethod.TASK 48 | let message = new AnalyticsMessage( 49 | method, 50 | task.toObject() 51 | ); 52 | return this.sendMessage(message); 53 | } 54 | 55 | public async sendMessage(message: AnalyticsMessage): Promise { 56 | let strMessage = JSON.stringify(message); 57 | return this.sendText(strMessage); 58 | } 59 | 60 | public async sendText(text: string): Promise { 61 | return new Promise((resolve, reject) => { 62 | if(this._socket_connection === null) { 63 | reject('Can`t send because analytics is not connected'); 64 | } 65 | this._socket_connection.send(text, undefined, (err: any) => { 66 | if(err) { 67 | console.trace(`got error while sending ${err}`); 68 | reject(err); 69 | } else { 70 | resolve(); 71 | } 72 | }); 73 | }); 74 | } 75 | 76 | public get ready(): boolean { return this._ready; } 77 | public get lastAlive(): Date { return this._lastAlive; } 78 | 79 | private async _init() { 80 | this._socket_server = new WebSocket.Server({ host: HASTIC_SERVER_URL.hostname, port: +HASTIC_SERVER_URL.port }); 81 | 82 | // TODO: move this to config OR use existing http server 83 | console.log("Creating websocket server ... %s", HASTIC_SERVER_URL.origin); 84 | 85 | this._socket_server.on("connection", this._onNewConnection.bind(this)); 86 | // TODO: handle connection drop 87 | 88 | if(this._productionMode && !this._inDocker) { 89 | console.log('Creating analytics process...'); 90 | try { 91 | var cp = await AnalyticsService._runAnalyticsProcess(); 92 | } catch(error) { 93 | console.error('Can`t run analytics process: %s', error); 94 | return; 95 | } 96 | console.log('Analytics creating successful, pid: %s', cp.pid); 97 | } 98 | 99 | } 100 | 101 | /** 102 | * Spawns analytics process. Reads process stderr and fails if it isn`t empty. 103 | * No need to stop the process later. 104 | * 105 | * @returns Creaded child process 106 | * @throws Process start error or first exception during python start 107 | */ 108 | private static async _runAnalyticsProcess(): Promise { 109 | let cp: childProcess.ChildProcess; 110 | let cpOptions = { 111 | cwd: config.ANALYTICS_PATH, 112 | env: { 113 | ...process.env, 114 | HASTIC_SERVER_URL: config.HASTIC_SERVER_URL.origin 115 | } 116 | }; 117 | 118 | if(fs.existsSync(path.join(config.ANALYTICS_PATH, 'dist/server/server'))) { 119 | console.log('dist/server/server'); 120 | cp = childProcess.spawn('dist/server/server', [], cpOptions); 121 | } else { 122 | const ANALYTICS_SERVER_PATH = path.join('bin', 'server'); 123 | console.log('python3 ' + ANALYTICS_SERVER_PATH); 124 | // If compiled analytics script doesn't exist - fallback to regular python 125 | console.log(config.ANALYTICS_PATH); 126 | // maybe starting it via bash better that put python3 127 | cp = childProcess.spawn('python3', [ANALYTICS_SERVER_PATH], cpOptions); 128 | } 129 | 130 | if(cp.pid === undefined) { 131 | return new Promise((resolve, reject) => { 132 | cp.on('error', reject); 133 | }); 134 | } 135 | 136 | return new Promise((resolve, reject) => { 137 | var resolved = false; 138 | 139 | cp.stdout.on('data', (data) => { 140 | console.log(data.toString()); 141 | if(resolved) { 142 | return; 143 | } 144 | resolved = true; 145 | resolve(cp); 146 | }); 147 | 148 | cp.stderr.on('data', (data) => { 149 | console.error(data.toString()); 150 | if(resolved) { 151 | return; 152 | } 153 | resolved = true; 154 | reject(data); 155 | }); 156 | }); 157 | 158 | } 159 | 160 | private _onAnalyticsUp() { 161 | const msg = 'Analytics is up'; 162 | for(let i in _.range(this._queue.length)) { 163 | // TODO: check if task is done before removing it from the queue 164 | this.sendTask(this._queue.shift(), true); 165 | } 166 | console.log(msg); 167 | this._alertService.sendMessage(msg, WebhookType.RECOVERY); 168 | } 169 | 170 | private _onAnalyticsMessage(data: any) { 171 | let text = data.toString(); 172 | if(text === 'PONG') { 173 | this._pingResponded = true; 174 | this._lastAlive = new Date(Date.now()); 175 | if(!this._ready) { 176 | this._ready = true; 177 | this._onAnalyticsUp(); 178 | } 179 | return; 180 | } 181 | 182 | let response; 183 | try { 184 | response = JSON.parse(text); 185 | } catch (e) { 186 | console.error('Can`t parse response from analytics as json:'); 187 | console.error(text); 188 | throw new Error('Can`t parse response from analytics as json, see log'); 189 | } 190 | this._onMessage(AnalyticsMessage.fromObject(response)); 191 | } 192 | 193 | // cb(this: WebSocket, socket: WebSocket, request: http.IncomingMessage) 194 | private async _onNewConnection(connection: WebSocket) { 195 | if(this._socket_connection !== null) { 196 | // TODO: use buildin websocket validator 197 | console.error('There is already an analytics connection. Only one connection is supported.'); 198 | // we send error and then close connection 199 | connection.send('EALREADYEXISTING', () => { connection.close(); }); 200 | return; 201 | } 202 | // TODO: log connection id 203 | console.log('Got new analytic connection'); 204 | this._socket_connection = connection; 205 | this._socket_connection.on("message", this._onAnalyticsMessage.bind(this)); 206 | // TODO: implement closing 207 | this._socket_connection.on("close", this._onAnalyticsDown.bind(this)); 208 | await this.sendText('hey'); 209 | 210 | console.log('Start analytics pinger...'); 211 | // TODO: use websockets buildin pinger 212 | this._runAlalyticsPinger(); 213 | console.log('Analytics pinger started'); 214 | } 215 | 216 | private async _onAnalyticsDown() { 217 | if(!this._ready) { 218 | // it's possible that ping is too slow and connection is closed 219 | return; 220 | } 221 | this._stopAlalyticsPinger(); 222 | if(this._socket_connection !== null) { 223 | this._socket_connection.close(); 224 | this._socket_connection = null; 225 | } 226 | this._ready = false; 227 | const msg = 'Analytics is down'; 228 | console.log(msg); 229 | this._alertService.sendMessage(msg, WebhookType.FAILURE); 230 | if(this._productionMode && !this._inDocker) { 231 | await AnalyticsService._runAnalyticsProcess(); 232 | } 233 | } 234 | 235 | private _runAlalyticsPinger() { 236 | this._analyticsPinger = setInterval(() => { 237 | if(this._isClosed) { 238 | return; 239 | } 240 | if(!this._pingResponded && this._ready) { 241 | this._onAnalyticsDown(); 242 | } 243 | this._pingResponded = false; 244 | // TODO: set life limit for this ping 245 | this.sendText('PING'); 246 | }, config.ANALYTICS_PING_INTERVAL); 247 | } 248 | 249 | private _stopAlalyticsPinger() { 250 | if(this._analyticsPinger !== null) { 251 | clearInterval(this._analyticsPinger); 252 | } 253 | this._analyticsPinger = null; 254 | } 255 | 256 | public get queueLength() { 257 | return this._queue.length; 258 | } 259 | 260 | public close() { 261 | this._isClosed = true; 262 | console.log('Terminating analytics service...'); 263 | this._stopAlalyticsPinger(); 264 | if(this._socket_connection !== null) { 265 | this._socket_connection.close(); 266 | } 267 | console.log('Termination successful'); 268 | } 269 | 270 | } 271 | -------------------------------------------------------------------------------- /server/src/services/alert_service.ts: -------------------------------------------------------------------------------- 1 | import { getNotifier, AnalyticMeta, WebhookType, Notification, MetaInfo } from './notification_service'; 2 | import * as AnalyticUnit from '../models/analytic_units'; 3 | import { Segment } from '../models/segment_model'; 4 | import { availableReporter } from '../utils/reporter'; 5 | import { toTimeZone } from '../utils/time'; 6 | import { getGrafanaUrl } from '../utils/grafana'; 7 | import { ORG_ID, HASTIC_API_KEY, HASTIC_ALERT_IMAGE } from '../config'; 8 | 9 | import axios from 'axios'; 10 | import * as _ from 'lodash'; 11 | 12 | 13 | const Notifier = getNotifier(); 14 | export class Alert { 15 | public enabled = true; 16 | constructor(protected analyticUnit: AnalyticUnit.AnalyticUnit) {}; 17 | public receive(segment: Segment) { 18 | if(this.enabled) { 19 | this.send(segment); 20 | } 21 | }; 22 | 23 | protected async send(segment) { 24 | const notification = await this.generateNotification(segment); 25 | try { 26 | console.log('sending a notification...'); 27 | await Notifier.sendNotification(notification); 28 | console.log('notification is successfully sent'); 29 | } catch(error) { 30 | console.error(`can't send notification ${error}`); 31 | }; 32 | } 33 | 34 | protected async generateNotification(segment: Segment): Promise { 35 | const meta = this.makeMeta(segment); 36 | const text = this.makeMessage(meta); 37 | let result: Notification = { meta, text }; 38 | if(HASTIC_ALERT_IMAGE) { 39 | try { 40 | console.log('Trying to load image for notification'); 41 | const image = await this.loadImage(); 42 | result.image = image; 43 | } catch(err) { 44 | console.error(`Can't load alert image: ${err}. Check that API key has admin permissions`); 45 | } 46 | } 47 | 48 | return result; 49 | } 50 | 51 | protected async loadImage(): Promise { 52 | const headers = { Authorization: `Bearer ${HASTIC_API_KEY}` }; 53 | const dashdoardId = this.analyticUnit.panelId.split('/')[0]; 54 | const panelId = this.analyticUnit.panelId.split('/')[1]; 55 | const grafanaUrl = getGrafanaUrl(this.analyticUnit.grafanaUrl); 56 | const dashboardApiURL = `${grafanaUrl}/api/dashboards/uid/${dashdoardId}`; 57 | const dashboardInfo: any = await axios.get(dashboardApiURL, { headers }); 58 | const dashboardName = _.last(dashboardInfo.data.meta.url.split('/')); 59 | const renderUrl = `${grafanaUrl}/render/d-solo/${dashdoardId}/${dashboardName}`; 60 | const params = { 61 | panelId, 62 | ordId: ORG_ID, 63 | apiRendering: true, 64 | analyticUnitId: this.analyticUnit.id 65 | }; 66 | const response = await axios.get(renderUrl, { 67 | params, 68 | headers, 69 | responseType: 'arraybuffer' 70 | }); 71 | return new Buffer(response.data, 'binary'); 72 | } 73 | 74 | protected makeMeta(segment: Segment): AnalyticMeta { 75 | const dashdoardId = this.analyticUnit.panelId.split('/')[0]; 76 | const panelId = this.analyticUnit.panelId.split('/')[1]; 77 | const grafanaUrl = getGrafanaUrl(this.analyticUnit.grafanaUrl); 78 | const notificationUrl = `${grafanaUrl}/d/${dashdoardId}?panelId=${panelId}&edit=true&fullscreen=true?orgId=${ORG_ID}`; 79 | 80 | let alert: AnalyticMeta = { 81 | type: WebhookType.DETECT, 82 | analyticUnitType: this.analyticUnit.type, 83 | analyticUnitName: this.analyticUnit.name, 84 | analyticUnitId: this.analyticUnit.id, 85 | grafanaUrl: notificationUrl, 86 | from: segment.from, 87 | to: segment.to, 88 | message: segment.message 89 | }; 90 | 91 | return alert; 92 | } 93 | 94 | protected makeMessage(meta: AnalyticMeta): string { 95 | const localTimeFrom = toTimeZone(meta.from); 96 | const localTimeTo = toTimeZone(meta.to); 97 | return [ 98 | `[${meta.analyticUnitType.toUpperCase()} ALERTING] ${meta.analyticUnitName}`, 99 | `URL: ${meta.grafanaUrl}`, 100 | ``, 101 | `From: ${localTimeFrom}`, 102 | `To: ${localTimeTo}`, 103 | `ID: ${meta.analyticUnitId}`, 104 | `Message: ${meta.message}` 105 | ].join('\n'); 106 | } 107 | } 108 | 109 | class PatternAlert extends Alert { 110 | 111 | private lastSentSegment: Segment; 112 | 113 | public receive(segment: Segment) { 114 | if(this.lastSentSegment === undefined || !segment.equals(this.lastSentSegment) ) { 115 | this.lastSentSegment = segment; 116 | if(this.enabled) { 117 | this.send(segment); 118 | } 119 | } 120 | } 121 | 122 | protected makeMessage(meta: AnalyticMeta): string { 123 | const localTimeFrom = toTimeZone(meta.from); 124 | const localTimeTo = toTimeZone(meta.to); 125 | return [ 126 | `[PATTERN DETECTED] ${meta.analyticUnitName}`, 127 | `URL: ${meta.grafanaUrl}`, 128 | ``, 129 | `From: ${localTimeFrom}`, 130 | `To: ${localTimeTo}`, 131 | `ID: ${meta.analyticUnitId}` 132 | ].join('\n'); 133 | } 134 | }; 135 | 136 | 137 | class ThresholdAlert extends Alert { 138 | // TODO: configure threshold timing in panel like Grafana's alerts (`evaluate` time, `for` time) 139 | // TODO: make events for start and end of threshold 140 | EXPIRE_PERIOD_MS = 60000; 141 | lastOccurence = 0; 142 | 143 | public receive(segment: Segment) { 144 | if(this.lastOccurence === 0) { 145 | this.lastOccurence = segment.from; 146 | if(this.enabled) { 147 | this.send(segment); 148 | } 149 | } else { 150 | 151 | if(segment.from - this.lastOccurence > this.EXPIRE_PERIOD_MS) { 152 | if(this.enabled) { 153 | console.log(`time between threshold occurences ${segment.from - this.lastOccurence}ms, send alert`); 154 | this.send(segment); 155 | } 156 | } 157 | 158 | this.lastOccurence = segment.from; 159 | } 160 | } 161 | 162 | protected makeMessage(meta: AnalyticMeta): string { 163 | const localTimeFrom = toTimeZone(meta.from); 164 | let message = [ 165 | `[THRESHOLD ALERTING] ${meta.analyticUnitName}`, 166 | `URL: ${meta.grafanaUrl}`, 167 | ``, 168 | `Starts at: ${localTimeFrom}`, 169 | `ID: ${meta.analyticUnitId}` 170 | ].join('\n'); 171 | 172 | if(meta.message !== undefined) { 173 | message += meta.message; 174 | } 175 | return message; 176 | } 177 | } 178 | 179 | 180 | export class AlertService { 181 | 182 | // TODO: object -> Map 183 | private _alerts: { [id: string]: Alert }; 184 | private _alertingEnable: boolean; 185 | private _grafanaAvailableReporter: Function; 186 | private _datasourceAvailableReporters: Map; 187 | 188 | constructor() { 189 | this._alerts = {}; 190 | this._datasourceAvailableReporters = new Map(); 191 | 192 | this._grafanaAvailableReporter = availableReporter( 193 | ['[OK] Grafana available', WebhookType.RECOVERY], 194 | ['[FAILURE] Grafana unavailable for pulling data', WebhookType.FAILURE], 195 | this.sendMessage, 196 | this.sendMessage 197 | ); 198 | } 199 | 200 | public receiveAlert(analyticUnit: AnalyticUnit.AnalyticUnit, segment: Segment) { 201 | if(!this._alertingEnable) { 202 | return; 203 | } 204 | 205 | let id = analyticUnit.id; 206 | 207 | if(!_.has(this._alerts, id)) { 208 | this.addAnalyticUnit(analyticUnit); 209 | } 210 | 211 | this._alerts[id].receive(segment); 212 | }; 213 | 214 | public sendMessage(text: string, type: WebhookType, optionalInfo = {}) { 215 | const now = Date.now(); 216 | const infoAlert: MetaInfo = { 217 | params: optionalInfo, 218 | type, 219 | from: now, 220 | to: now 221 | } 222 | 223 | console.log('sending a notification...'); 224 | Notifier.sendNotification({ text, meta: infoAlert }).catch((err) => { 225 | console.error(`can't send message ${err.message}`); 226 | }); 227 | } 228 | 229 | public sendGrafanaAvailableWebhook() { 230 | this._grafanaAvailableReporter(true); 231 | } 232 | 233 | public sendGrafanaUnavailableWebhook() { 234 | this._grafanaAvailableReporter(false); 235 | } 236 | 237 | public sendDatasourceAvailableWebhook(url: string) { 238 | const reporter = this._getDatasourceAvailableReporter(url); 239 | reporter(true); 240 | } 241 | 242 | public sendDatasourceUnavailableWebhook(url: string) { 243 | const reporter = this._getDatasourceAvailableReporter(url); 244 | reporter(false); 245 | } 246 | 247 | public addAnalyticUnit(analyticUnit: AnalyticUnit.AnalyticUnit) { 248 | let alertsType = {}; 249 | 250 | alertsType[AnalyticUnit.DetectorType.THRESHOLD] = ThresholdAlert; 251 | alertsType[AnalyticUnit.DetectorType.PATTERN] = PatternAlert; 252 | alertsType[AnalyticUnit.DetectorType.ANOMALY] = Alert; 253 | 254 | this._alerts[analyticUnit.id] = new alertsType[analyticUnit.detectorType](analyticUnit); 255 | } 256 | 257 | public removeAnalyticUnit(analyticUnitId: AnalyticUnit.AnalyticUnitId) { 258 | delete this._alerts[analyticUnitId]; 259 | } 260 | 261 | public stopAlerting() { 262 | this._alertingEnable = false; 263 | } 264 | 265 | public startAlerting() { 266 | this._alertingEnable = true; 267 | } 268 | 269 | private _getDatasourceAvailableReporter(url: string) { 270 | if(!_.has(this._datasourceAvailableReporters, url)) { 271 | this._datasourceAvailableReporters[url] = availableReporter( 272 | [`[OK] Datasource ${url} available`, WebhookType.RECOVERY], 273 | [`[FAILURE] Datasource ${url} unavailable`, WebhookType.FAILURE], 274 | this.sendMessage, 275 | this.sendMessage 276 | ); 277 | } 278 | return this._datasourceAvailableReporters[url]; 279 | } 280 | } 281 | -------------------------------------------------------------------------------- /tools/prometheus-hastic-exporter/Hastic Exporter dashboard.json: -------------------------------------------------------------------------------- 1 | { 2 | "__inputs": [ 3 | { 4 | "name": "DS_PROMETHEUS", 5 | "label": "Prometheus", 6 | "description": "", 7 | "type": "datasource", 8 | "pluginId": "prometheus", 9 | "pluginName": "Prometheus" 10 | } 11 | ], 12 | "__requires": [ 13 | { 14 | "type": "panel", 15 | "id": "corpglory-hastic-graph-panel", 16 | "name": "Hastic Graph", 17 | "version": "" 18 | }, 19 | { 20 | "type": "grafana", 21 | "id": "grafana", 22 | "name": "Grafana", 23 | "version": "6.2.0" 24 | }, 25 | { 26 | "type": "panel", 27 | "id": "graph", 28 | "name": "Graph", 29 | "version": "" 30 | }, 31 | { 32 | "type": "datasource", 33 | "id": "prometheus", 34 | "name": "Prometheus", 35 | "version": "1.0.0" 36 | }, 37 | { 38 | "type": "panel", 39 | "id": "table", 40 | "name": "Table", 41 | "version": "" 42 | } 43 | ], 44 | "annotations": { 45 | "list": [ 46 | { 47 | "builtIn": 1, 48 | "datasource": "-- Grafana --", 49 | "enable": true, 50 | "hide": true, 51 | "iconColor": "rgba(0, 211, 255, 1)", 52 | "name": "Annotations & Alerts", 53 | "type": "dashboard" 54 | } 55 | ] 56 | }, 57 | "editable": true, 58 | "gnetId": null, 59 | "graphTooltip": 1, 60 | "id": null, 61 | "iteration": 1559660286055, 62 | "links": [], 63 | "panels": [ 64 | { 65 | "aliasColors": {}, 66 | "bars": true, 67 | "dashLength": 10, 68 | "dashes": false, 69 | "datasource": "${DS_PROMETHEUS}", 70 | "fill": 1, 71 | "gridPos": { 72 | "h": 5, 73 | "w": 10, 74 | "x": 0, 75 | "y": 0 76 | }, 77 | "id": 25, 78 | "legend": { 79 | "avg": false, 80 | "current": false, 81 | "max": false, 82 | "min": false, 83 | "show": true, 84 | "total": false, 85 | "values": false 86 | }, 87 | "lines": false, 88 | "linewidth": 1, 89 | "links": [], 90 | "nullPointMode": "null", 91 | "options": {}, 92 | "percentage": false, 93 | "pointradius": 5, 94 | "points": false, 95 | "renderer": "flot", 96 | "seriesOverrides": [], 97 | "spaceLength": 10, 98 | "stack": true, 99 | "steppedLine": false, 100 | "targets": [ 101 | { 102 | "expr": "up{job=\"hastic-exporter\"}", 103 | "format": "time_series", 104 | "intervalFactor": 1, 105 | "legendFormat": "{{ instance }}", 106 | "refId": "A" 107 | } 108 | ], 109 | "thresholds": [], 110 | "timeFrom": null, 111 | "timeRegions": [], 112 | "timeShift": null, 113 | "title": "Hastic Exporter Alive", 114 | "tooltip": { 115 | "shared": true, 116 | "sort": 0, 117 | "value_type": "individual" 118 | }, 119 | "type": "graph", 120 | "xaxis": { 121 | "buckets": null, 122 | "mode": "time", 123 | "name": null, 124 | "show": true, 125 | "values": [] 126 | }, 127 | "yaxes": [ 128 | { 129 | "format": "short", 130 | "label": null, 131 | "logBase": 1, 132 | "max": null, 133 | "min": null, 134 | "show": true 135 | }, 136 | { 137 | "format": "short", 138 | "label": null, 139 | "logBase": 1, 140 | "max": null, 141 | "min": null, 142 | "show": true 143 | } 144 | ], 145 | "yaxis": { 146 | "align": false, 147 | "alignLevel": null 148 | } 149 | }, 150 | { 151 | "columns": [], 152 | "datasource": "${DS_PROMETHEUS}", 153 | "fontSize": "100%", 154 | "gridPos": { 155 | "h": 5, 156 | "w": 7, 157 | "x": 10, 158 | "y": 0 159 | }, 160 | "id": 33, 161 | "links": [], 162 | "options": {}, 163 | "pageSize": null, 164 | "scroll": true, 165 | "showHeader": true, 166 | "sort": { 167 | "col": 0, 168 | "desc": true 169 | }, 170 | "styles": [ 171 | { 172 | "alias": "Time", 173 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 174 | "pattern": "Time", 175 | "type": "date" 176 | }, 177 | { 178 | "alias": "", 179 | "colorMode": null, 180 | "colors": [ 181 | "rgba(245, 54, 54, 0.9)", 182 | "rgba(237, 129, 40, 0.89)", 183 | "rgba(50, 172, 45, 0.97)" 184 | ], 185 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 186 | "decimals": 2, 187 | "mappingType": 1, 188 | "pattern": "", 189 | "thresholds": [], 190 | "type": "number", 191 | "unit": "short" 192 | }, 193 | { 194 | "alias": "", 195 | "colorMode": null, 196 | "colors": [ 197 | "#7eb26d", 198 | "rgba(237, 129, 40, 0.89)", 199 | "rgba(50, 172, 45, 0.97)" 200 | ], 201 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 202 | "decimals": 2, 203 | "mappingType": 1, 204 | "pattern": "instance", 205 | "thresholds": [], 206 | "type": "number", 207 | "unit": "short" 208 | }, 209 | { 210 | "alias": "", 211 | "colorMode": null, 212 | "colors": [ 213 | "rgba(245, 54, 54, 0.9)", 214 | "rgba(237, 129, 40, 0.89)", 215 | "rgba(50, 172, 45, 0.97)" 216 | ], 217 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 218 | "decimals": 2, 219 | "mappingType": 1, 220 | "pattern": "packageVersion", 221 | "thresholds": [], 222 | "type": "number", 223 | "unit": "short" 224 | }, 225 | { 226 | "alias": "", 227 | "colorMode": null, 228 | "colors": [ 229 | "rgba(245, 54, 54, 0.9)", 230 | "rgba(237, 129, 40, 0.89)", 231 | "rgba(50, 172, 45, 0.97)" 232 | ], 233 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 234 | "decimals": 2, 235 | "mappingType": 1, 236 | "pattern": "commitHash", 237 | "thresholds": [], 238 | "type": "number", 239 | "unit": "short" 240 | }, 241 | { 242 | "alias": "", 243 | "colorMode": null, 244 | "colors": [ 245 | "rgba(245, 54, 54, 0.9)", 246 | "rgba(237, 129, 40, 0.89)", 247 | "rgba(50, 172, 45, 0.97)" 248 | ], 249 | "decimals": 2, 250 | "pattern": "/.*/", 251 | "thresholds": [], 252 | "type": "hidden", 253 | "unit": "short" 254 | } 255 | ], 256 | "targets": [ 257 | { 258 | "expr": "lastAlive{instance=~\"$instance\"}", 259 | "format": "table", 260 | "instant": true, 261 | "intervalFactor": 1, 262 | "legendFormat": "", 263 | "refId": "A" 264 | } 265 | ], 266 | "title": "Last Alive", 267 | "transform": "table", 268 | "type": "table" 269 | }, 270 | { 271 | "columns": [], 272 | "datasource": "${DS_PROMETHEUS}", 273 | "fontSize": "100%", 274 | "gridPos": { 275 | "h": 5, 276 | "w": 7, 277 | "x": 17, 278 | "y": 0 279 | }, 280 | "id": 29, 281 | "links": [], 282 | "options": {}, 283 | "pageSize": null, 284 | "scroll": true, 285 | "showHeader": true, 286 | "sort": { 287 | "col": 0, 288 | "desc": true 289 | }, 290 | "styles": [ 291 | { 292 | "alias": "Time", 293 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 294 | "pattern": "Time", 295 | "type": "date" 296 | }, 297 | { 298 | "alias": "", 299 | "colorMode": null, 300 | "colors": [ 301 | "rgba(245, 54, 54, 0.9)", 302 | "rgba(237, 129, 40, 0.89)", 303 | "rgba(50, 172, 45, 0.97)" 304 | ], 305 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 306 | "decimals": 2, 307 | "mappingType": 1, 308 | "pattern": "instance", 309 | "thresholds": [], 310 | "type": "number", 311 | "unit": "short" 312 | }, 313 | { 314 | "alias": "", 315 | "colorMode": null, 316 | "colors": [ 317 | "rgba(245, 54, 54, 0.9)", 318 | "rgba(237, 129, 40, 0.89)", 319 | "rgba(50, 172, 45, 0.97)" 320 | ], 321 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 322 | "decimals": 2, 323 | "mappingType": 1, 324 | "pattern": "packageVersion", 325 | "thresholds": [], 326 | "type": "number", 327 | "unit": "short" 328 | }, 329 | { 330 | "alias": "", 331 | "colorMode": null, 332 | "colors": [ 333 | "rgba(245, 54, 54, 0.9)", 334 | "rgba(237, 129, 40, 0.89)", 335 | "rgba(50, 172, 45, 0.97)" 336 | ], 337 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 338 | "decimals": 2, 339 | "mappingType": 1, 340 | "pattern": "commitHash", 341 | "thresholds": [], 342 | "type": "number", 343 | "unit": "short" 344 | }, 345 | { 346 | "alias": "", 347 | "colorMode": null, 348 | "colors": [ 349 | "rgba(245, 54, 54, 0.9)", 350 | "rgba(237, 129, 40, 0.89)", 351 | "rgba(50, 172, 45, 0.97)" 352 | ], 353 | "decimals": 2, 354 | "pattern": "/.*/", 355 | "thresholds": [], 356 | "type": "hidden", 357 | "unit": "short" 358 | } 359 | ], 360 | "targets": [ 361 | { 362 | "expr": "timestamp{instance=~\"$instance\"}", 363 | "format": "table", 364 | "instant": true, 365 | "intervalFactor": 1, 366 | "legendFormat": "", 367 | "refId": "A" 368 | } 369 | ], 370 | "title": "Metric Last Pull", 371 | "transform": "table", 372 | "type": "table" 373 | }, 374 | { 375 | "aliasColors": {}, 376 | "bars": false, 377 | "dashLength": 10, 378 | "dashes": false, 379 | "datasource": "${DS_PROMETHEUS}", 380 | "fill": 1, 381 | "gridPos": { 382 | "h": 4, 383 | "w": 24, 384 | "x": 0, 385 | "y": 5 386 | }, 387 | "id": 8, 388 | "legend": { 389 | "avg": false, 390 | "current": false, 391 | "max": false, 392 | "min": false, 393 | "show": true, 394 | "total": false, 395 | "values": false 396 | }, 397 | "lines": true, 398 | "linewidth": 1, 399 | "links": [], 400 | "nullPointMode": "null", 401 | "options": {}, 402 | "percentage": false, 403 | "pointradius": 5, 404 | "points": false, 405 | "renderer": "flot", 406 | "seriesOverrides": [], 407 | "spaceLength": 10, 408 | "stack": false, 409 | "steppedLine": false, 410 | "targets": [ 411 | { 412 | "expr": "ready{instance=~\"$instance\"}", 413 | "format": "time_series", 414 | "intervalFactor": 1, 415 | "legendFormat": "{{instance}}-{{packageVersion}}-{{commitHash}}", 416 | "refId": "A" 417 | } 418 | ], 419 | "thresholds": [], 420 | "timeFrom": null, 421 | "timeRegions": [], 422 | "timeShift": null, 423 | "title": "Analytic Alive", 424 | "tooltip": { 425 | "shared": true, 426 | "sort": 0, 427 | "value_type": "individual" 428 | }, 429 | "type": "graph", 430 | "xaxis": { 431 | "buckets": null, 432 | "mode": "time", 433 | "name": null, 434 | "show": true, 435 | "values": [] 436 | }, 437 | "yaxes": [ 438 | { 439 | "format": "short", 440 | "label": null, 441 | "logBase": 1, 442 | "max": null, 443 | "min": null, 444 | "show": true 445 | }, 446 | { 447 | "format": "short", 448 | "label": null, 449 | "logBase": 1, 450 | "max": null, 451 | "min": null, 452 | "show": true 453 | } 454 | ], 455 | "yaxis": { 456 | "align": false, 457 | "alignLevel": null 458 | } 459 | }, 460 | { 461 | "aliasColors": {}, 462 | "bars": false, 463 | "dashLength": 10, 464 | "dashes": false, 465 | "datasource": "${DS_PROMETHEUS}", 466 | "fill": 1, 467 | "gridPos": { 468 | "h": 4, 469 | "w": 12, 470 | "x": 0, 471 | "y": 9 472 | }, 473 | "id": 19, 474 | "legend": { 475 | "avg": false, 476 | "current": false, 477 | "max": false, 478 | "min": false, 479 | "show": true, 480 | "total": false, 481 | "values": false 482 | }, 483 | "lines": true, 484 | "linewidth": 1, 485 | "links": [], 486 | "nullPointMode": "null", 487 | "options": {}, 488 | "percentage": false, 489 | "pointradius": 5, 490 | "points": false, 491 | "renderer": "flot", 492 | "seriesOverrides": [], 493 | "spaceLength": 10, 494 | "stack": false, 495 | "steppedLine": false, 496 | "targets": [ 497 | { 498 | "expr": "tasksQueueLength{instance=~\"$instance\"}", 499 | "format": "time_series", 500 | "intervalFactor": 1, 501 | "legendFormat": "{{instance}}-{{packageVersion}}-{{commitHash}}", 502 | "refId": "A" 503 | } 504 | ], 505 | "thresholds": [], 506 | "timeFrom": null, 507 | "timeRegions": [], 508 | "timeShift": null, 509 | "title": "Queue Length for server's waited tasks", 510 | "tooltip": { 511 | "shared": true, 512 | "sort": 0, 513 | "value_type": "individual" 514 | }, 515 | "type": "graph", 516 | "xaxis": { 517 | "buckets": null, 518 | "mode": "time", 519 | "name": null, 520 | "show": true, 521 | "values": [] 522 | }, 523 | "yaxes": [ 524 | { 525 | "format": "short", 526 | "label": null, 527 | "logBase": 1, 528 | "max": null, 529 | "min": null, 530 | "show": true 531 | }, 532 | { 533 | "format": "short", 534 | "label": null, 535 | "logBase": 1, 536 | "max": null, 537 | "min": null, 538 | "show": true 539 | } 540 | ], 541 | "yaxis": { 542 | "align": false, 543 | "alignLevel": null 544 | } 545 | }, 546 | { 547 | "aliasColors": {}, 548 | "bars": false, 549 | "dashLength": 10, 550 | "dashes": false, 551 | "datasource": "${DS_PROMETHEUS}", 552 | "fill": 1, 553 | "gridPos": { 554 | "h": 4, 555 | "w": 12, 556 | "x": 12, 557 | "y": 9 558 | }, 559 | "hasticDatasource": null, 560 | "id": 21, 561 | "legend": { 562 | "avg": false, 563 | "current": false, 564 | "max": false, 565 | "min": false, 566 | "show": true, 567 | "total": false, 568 | "values": false 569 | }, 570 | "lines": true, 571 | "linewidth": 1, 572 | "links": [], 573 | "nullPointMode": "null", 574 | "options": {}, 575 | "percentage": false, 576 | "pointradius": 5, 577 | "points": false, 578 | "renderer": "flot", 579 | "seriesOverrides": [], 580 | "spaceLength": 10, 581 | "stack": false, 582 | "steppedLine": false, 583 | "targets": [ 584 | { 585 | "expr": "awaitedTasksNumber{instance=~\"$instance\"}", 586 | "format": "time_series", 587 | "intervalFactor": 1, 588 | "legendFormat": "{{instance}}-{{ packageVersion }}-{{ commitHash }}", 589 | "refId": "A" 590 | } 591 | ], 592 | "thresholds": [], 593 | "timeFrom": null, 594 | "timeShift": null, 595 | "title": "Node: awaited tasks from analytics", 596 | "tooltip": { 597 | "shared": true, 598 | "sort": 0, 599 | "value_type": "individual" 600 | }, 601 | "type": "corpglory-hastic-graph-panel", 602 | "xaxis": { 603 | "buckets": null, 604 | "mode": "time", 605 | "name": null, 606 | "show": true, 607 | "values": [] 608 | }, 609 | "yaxes": [ 610 | { 611 | "format": "short", 612 | "label": null, 613 | "logBase": 1, 614 | "max": null, 615 | "min": null, 616 | "show": true 617 | }, 618 | { 619 | "format": "short", 620 | "label": null, 621 | "logBase": 1, 622 | "max": null, 623 | "min": null, 624 | "show": true 625 | } 626 | ] 627 | }, 628 | { 629 | "aliasColors": {}, 630 | "bars": false, 631 | "dashLength": 10, 632 | "dashes": false, 633 | "datasource": "${DS_PROMETHEUS}", 634 | "fill": 1, 635 | "gridPos": { 636 | "h": 10, 637 | "w": 12, 638 | "x": 0, 639 | "y": 13 640 | }, 641 | "id": 6, 642 | "legend": { 643 | "avg": false, 644 | "current": false, 645 | "max": false, 646 | "min": false, 647 | "show": true, 648 | "total": false, 649 | "values": false 650 | }, 651 | "lines": true, 652 | "linewidth": 1, 653 | "links": [], 654 | "nullPointMode": "null", 655 | "options": {}, 656 | "percentage": false, 657 | "pointradius": 5, 658 | "points": false, 659 | "renderer": "flot", 660 | "seriesOverrides": [], 661 | "spaceLength": 10, 662 | "stack": false, 663 | "steppedLine": false, 664 | "targets": [ 665 | { 666 | "expr": "activeWebhooks{instance=~\"$instance\"}", 667 | "format": "time_series", 668 | "intervalFactor": 1, 669 | "legendFormat": "{{instance}}-{{packageVersion}}-{{commitHash}}", 670 | "refId": "A" 671 | } 672 | ], 673 | "thresholds": [], 674 | "timeFrom": null, 675 | "timeRegions": [], 676 | "timeShift": null, 677 | "title": "Active Webhooks", 678 | "tooltip": { 679 | "shared": true, 680 | "sort": 0, 681 | "value_type": "individual" 682 | }, 683 | "type": "graph", 684 | "xaxis": { 685 | "buckets": null, 686 | "mode": "time", 687 | "name": null, 688 | "show": true, 689 | "values": [] 690 | }, 691 | "yaxes": [ 692 | { 693 | "format": "short", 694 | "label": null, 695 | "logBase": 1, 696 | "max": null, 697 | "min": null, 698 | "show": true 699 | }, 700 | { 701 | "format": "short", 702 | "label": null, 703 | "logBase": 1, 704 | "max": null, 705 | "min": null, 706 | "show": true 707 | } 708 | ], 709 | "yaxis": { 710 | "align": false, 711 | "alignLevel": null 712 | } 713 | }, 714 | { 715 | "aliasColors": {}, 716 | "bars": false, 717 | "dashLength": 10, 718 | "dashes": false, 719 | "datasource": "${DS_PROMETHEUS}", 720 | "fill": 1, 721 | "gridPos": { 722 | "h": 5, 723 | "w": 12, 724 | "x": 12, 725 | "y": 13 726 | }, 727 | "id": 10, 728 | "legend": { 729 | "avg": false, 730 | "current": false, 731 | "max": false, 732 | "min": false, 733 | "show": true, 734 | "total": false, 735 | "values": false 736 | }, 737 | "lines": true, 738 | "linewidth": 1, 739 | "links": [], 740 | "nullPointMode": "null", 741 | "options": {}, 742 | "percentage": false, 743 | "pointradius": 5, 744 | "points": false, 745 | "renderer": "flot", 746 | "seriesOverrides": [], 747 | "spaceLength": 10, 748 | "stack": false, 749 | "steppedLine": false, 750 | "targets": [ 751 | { 752 | "expr": "detectionsCount{instance=~\"$instance\"}", 753 | "format": "time_series", 754 | "intervalFactor": 1, 755 | "legendFormat": "{{instance}}-{{packageVersion}}-{{commitHash}}", 756 | "refId": "A" 757 | } 758 | ], 759 | "thresholds": [], 760 | "timeFrom": null, 761 | "timeRegions": [], 762 | "timeShift": null, 763 | "title": "Detections Count since restart", 764 | "tooltip": { 765 | "shared": true, 766 | "sort": 0, 767 | "value_type": "individual" 768 | }, 769 | "type": "graph", 770 | "xaxis": { 771 | "buckets": null, 772 | "mode": "time", 773 | "name": null, 774 | "show": true, 775 | "values": [] 776 | }, 777 | "yaxes": [ 778 | { 779 | "format": "short", 780 | "label": null, 781 | "logBase": 1, 782 | "max": null, 783 | "min": null, 784 | "show": true 785 | }, 786 | { 787 | "format": "short", 788 | "label": null, 789 | "logBase": 1, 790 | "max": null, 791 | "min": null, 792 | "show": true 793 | } 794 | ], 795 | "yaxis": { 796 | "align": false, 797 | "alignLevel": null 798 | } 799 | }, 800 | { 801 | "aliasColors": {}, 802 | "bars": false, 803 | "dashLength": 10, 804 | "dashes": false, 805 | "datasource": "${DS_PROMETHEUS}", 806 | "fill": 1, 807 | "gridPos": { 808 | "h": 5, 809 | "w": 12, 810 | "x": 12, 811 | "y": 18 812 | }, 813 | "id": 23, 814 | "legend": { 815 | "avg": false, 816 | "current": false, 817 | "max": false, 818 | "min": false, 819 | "show": true, 820 | "total": false, 821 | "values": false 822 | }, 823 | "lines": true, 824 | "linewidth": 1, 825 | "links": [], 826 | "nullPointMode": "null", 827 | "options": {}, 828 | "percentage": false, 829 | "pointradius": 5, 830 | "points": false, 831 | "renderer": "flot", 832 | "seriesOverrides": [], 833 | "spaceLength": 10, 834 | "stack": false, 835 | "steppedLine": false, 836 | "targets": [ 837 | { 838 | "expr": "irate(detectionsCount{instance=~\"$instance\"}[1m])", 839 | "format": "time_series", 840 | "interval": "", 841 | "intervalFactor": 1, 842 | "legendFormat": "{{instance}}-{{packageVersion}}-{{commitHash}}", 843 | "refId": "A" 844 | } 845 | ], 846 | "thresholds": [], 847 | "timeFrom": null, 848 | "timeRegions": [], 849 | "timeShift": null, 850 | "title": "Detections per second", 851 | "tooltip": { 852 | "shared": true, 853 | "sort": 0, 854 | "value_type": "individual" 855 | }, 856 | "type": "graph", 857 | "xaxis": { 858 | "buckets": null, 859 | "mode": "time", 860 | "name": null, 861 | "show": true, 862 | "values": [] 863 | }, 864 | "yaxes": [ 865 | { 866 | "format": "short", 867 | "label": null, 868 | "logBase": 1, 869 | "max": null, 870 | "min": null, 871 | "show": true 872 | }, 873 | { 874 | "format": "short", 875 | "label": null, 876 | "logBase": 1, 877 | "max": null, 878 | "min": null, 879 | "show": true 880 | } 881 | ], 882 | "yaxis": { 883 | "align": false, 884 | "alignLevel": null 885 | } 886 | } 887 | ], 888 | "refresh": false, 889 | "schemaVersion": 18, 890 | "style": "dark", 891 | "tags": [], 892 | "templating": { 893 | "list": [ 894 | { 895 | "allValue": ".*", 896 | "current": {}, 897 | "datasource": "${DS_PROMETHEUS}", 898 | "definition": "label_values(up{job=\"hastic-exporter\"}, instance)", 899 | "hide": 0, 900 | "includeAll": true, 901 | "label": null, 902 | "multi": true, 903 | "name": "instance", 904 | "options": [], 905 | "query": "label_values(up{job=\"hastic-exporter\"}, instance)", 906 | "refresh": 1, 907 | "regex": "", 908 | "skipUrlSync": false, 909 | "sort": 0, 910 | "tagValuesQuery": "", 911 | "tags": [], 912 | "tagsQuery": "", 913 | "type": "query", 914 | "useTags": false 915 | } 916 | ] 917 | }, 918 | "time": { 919 | "from": "now-1h", 920 | "to": "now" 921 | }, 922 | "timepicker": { 923 | "refresh_intervals": [ 924 | "1s", 925 | "5s", 926 | "10s", 927 | "30s", 928 | "1m", 929 | "5m", 930 | "15m", 931 | "30m", 932 | "1h", 933 | "2h", 934 | "1d" 935 | ], 936 | "time_options": [ 937 | "5m", 938 | "15m", 939 | "1h", 940 | "6h", 941 | "12h", 942 | "24h", 943 | "2d", 944 | "7d", 945 | "30d" 946 | ] 947 | }, 948 | "timezone": "", 949 | "title": "Hastic Exporter", 950 | "version": 3 951 | } --------------------------------------------------------------------------------