├── Dockerfile ├── .gitignore ├── .prettierrc.js ├── tsconfig.json ├── jest.config.js ├── provisioning └── datasources │ └── splunk-datasource.yml ├── src ├── module.ts ├── types.ts ├── ConfigEditor.tsx ├── QueryEditor.tsx ├── plugin.json ├── img │ └── logo.svg └── datasource.ts ├── Makefile ├── docker-compose.yml ├── CHANGELOG.md ├── package.json ├── LICENSE ├── .rc ├── .github └── workflows │ └── ci.yml ├── .releaserc └── README.md /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:17-alpine3.15 2 | 3 | RUN apk add --no-cache git 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .cache/ 2 | .npm/ 3 | coverage/ 4 | dist/ 5 | node_modules/ 6 | *~ 7 | \#* 8 | .\#* 9 | yarn-error.log -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | ...require('@grafana/toolkit/src/config/prettier.plugin.config.json'), 3 | }; 4 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "rootDir": "./src", 4 | "baseUrl": "./src", 5 | "jsx": "react" 6 | }, 7 | "extends": "@grafana/toolkit/src/config/tsconfig.plugin.json", 8 | "include": ["src", "types"] 9 | } 10 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | // This file is needed because it is used by vscode and other tools that 2 | // call `jest` directly. However, unless you are doing anything special 3 | // do not edit this file 4 | 5 | const standard = require('@grafana/toolkit/src/config/jest.plugin.config'); 6 | 7 | // This process will use the same config that `yarn test` is using 8 | module.exports = standard.jestConfig(); 9 | -------------------------------------------------------------------------------- /provisioning/datasources/splunk-datasource.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: Splunk Datasource 5 | type: efcasado-splunk-datasource 6 | orgId: 1 7 | isDefault: true 8 | access: proxy 9 | url: https://splunk:8089 10 | basicAuth: true 11 | basicAuthUser: admin 12 | jsonData: 13 | tlsSkipVerify: true 14 | secureJsonData: 15 | basicAuthPassword: thisisasecret 16 | -------------------------------------------------------------------------------- /src/module.ts: -------------------------------------------------------------------------------- 1 | import { DataSourcePlugin } from '@grafana/data'; 2 | import { DataSource } from './datasource'; 3 | import { ConfigEditor } from './ConfigEditor'; 4 | import { QueryEditor } from './QueryEditor'; 5 | import { SplunkQuery, SplunkDataSourceOptions } from './types'; 6 | 7 | export const plugin = new DataSourcePlugin(DataSource) 8 | .setConfigEditor(ConfigEditor) 9 | .setQueryEditor(QueryEditor); 10 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all build build-docker build-splunk-datasource up down release 2 | 3 | SHELL = BASH_ENV=.rc /bin/bash --noprofile 4 | 5 | all: build up 6 | 7 | build: | build-docker build-splunk-datasource 8 | 9 | build-docker: 10 | docker build . -t node 11 | 12 | build-splunk-datasource: 13 | yarn install 14 | yarn build 15 | 16 | up: 17 | docker-compose up -d 18 | 19 | down: 20 | -docker-compose down 21 | 22 | release: 23 | npm install @semantic-release/exec @semantic-release/github @semantic-release/git @semantic-release/changelog semantic-release@19.0.2 24 | npx semantic-release $(RELEASE_OPTS) 25 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import { DataQuery, DataSourceJsonData } from '@grafana/data'; 2 | 3 | export interface SplunkQuery extends DataQuery { 4 | queryText: string; 5 | } 6 | 7 | export const defaultQuery: Partial = { 8 | queryText: '', 9 | }; 10 | 11 | /** 12 | * These are options configured for each DataSource instance 13 | */ 14 | export interface SplunkDataSourceOptions extends DataSourceJsonData { 15 | endpoint?: string; 16 | } 17 | 18 | /** 19 | * Value that is used in the backend, but never sent over HTTP to the frontend 20 | */ 21 | export interface SplunkSecureJsonData { 22 | basicAuthToken?: string; 23 | } 24 | -------------------------------------------------------------------------------- /src/ConfigEditor.tsx: -------------------------------------------------------------------------------- 1 | import React, { PureComponent } from 'react'; 2 | import { DataSourceHttpSettings } from '@grafana/ui'; 3 | import { DataSourcePluginOptionsEditorProps } from '@grafana/data'; 4 | import { SplunkDataSourceOptions } from './types'; 5 | 6 | interface Props extends DataSourcePluginOptionsEditorProps {} 7 | 8 | interface State {} 9 | 10 | export class ConfigEditor extends PureComponent { 11 | constructor(props: Props) { 12 | super(props); 13 | } 14 | 15 | render() { 16 | const { options, onOptionsChange } = this.props; 17 | 18 | return ( 19 | 20 | ); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | grafana: 4 | image: grafana/grafana:7.5.16 5 | environment: 6 | - GF_AUTH_BASIC_ENABLED=false 7 | - GF_AUTH_ANONYMOUS_ENABLED=true 8 | - GF_AUTH_ANONYMOUS_ORG_ROLE=Admin 9 | - GF_SECURITY_ADMIN_PASSWORD=admin 10 | - GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS=efcasado-splunk-datasource 11 | ports: 12 | - 3000:3000 13 | volumes: 14 | - ./:/var/lib/grafana/plugins/efcasado-splunk-datasource 15 | - ./provisioning/datasources/splunk-datasource.yml:/etc/grafana/provisioning/datasources/splunk-datasource.yml 16 | 17 | splunk: 18 | image: splunk/splunk:8.2.6 19 | environment: 20 | - SPLUNK_START_ARGS=--accept-license 21 | - SPLUNK_PASSWORD=thisisasecret 22 | ports: 23 | - 8000:8000 24 | - 8089:8089 25 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [0.3.3](https://github.com/efcasado/grafana-plugin-splunk-datasource/compare/v0.3.2...v0.3.3) (2022-05-29) 2 | 3 | 4 | ### Bug Fixes 5 | 6 | * **config:** Use official HttpProxySettings to configure Splunk Datasource ([#30](https://github.com/efcasado/grafana-plugin-splunk-datasource/issues/30)) ([39b7418](https://github.com/efcasado/grafana-plugin-splunk-datasource/commit/39b7418669e8a95be696786fcc8d07201035d9a1)) 7 | 8 | ## [0.3.2](https://github.com/efcasado/grafana-plugin-splunk-datasource/compare/v0.3.1...v0.3.2) (2022-05-29) 9 | 10 | 11 | ### Bug Fixes 12 | 13 | * **config:** Simplify configuration ([12fc0b2](https://github.com/efcasado/grafana-plugin-splunk-datasource/commit/12fc0b2427d1ce6fae6b9fa3133ab1e0830c98b9)) 14 | 15 | ## [0.3.1](https://github.com/efcasado/grafana-plugin-splunk-datasource/compare/v0.3.0...v0.3.1) (2022-05-29) 16 | 17 | # Changelog 18 | 19 | ## 0.1.0 (Unreleased) 20 | 21 | Initial release. 22 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "efcasado-splunk-datasource", 3 | "version": "0.1.1", 4 | "description": "", 5 | "scripts": { 6 | "build": "grafana-toolkit plugin:build", 7 | "test": "grafana-toolkit plugin:test", 8 | "dev": "grafana-toolkit plugin:dev", 9 | "watch": "grafana-toolkit plugin:dev --watch", 10 | "sign": "grafana-toolkit plugin:sign", 11 | "start": "yarn watch" 12 | }, 13 | "author": "efcasado", 14 | "license": "Apache-2.0", 15 | "devDependencies": { 16 | "@grafana/data": "latest", 17 | "@grafana/runtime": "latest", 18 | "@grafana/toolkit": "latest", 19 | "@grafana/ui": "latest", 20 | "@testing-library/jest-dom": "5.4.0", 21 | "@testing-library/react": "^10.0.2", 22 | "@types/lodash": "latest" 23 | }, 24 | "engines": { 25 | "node": ">=14" 26 | }, 27 | "dependencies": { 28 | "@semantic-release/git": "^10.0.1", 29 | "install": "^0.13.0", 30 | "semantic-release": "^19.0.2" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/QueryEditor.tsx: -------------------------------------------------------------------------------- 1 | import defaults from 'lodash/defaults'; 2 | 3 | import React, { PureComponent } from 'react'; 4 | import { QueryField } from '@grafana/ui'; 5 | import { QueryEditorProps } from '@grafana/data'; 6 | import { DataSource } from './datasource'; 7 | import { defaultQuery, SplunkDataSourceOptions, SplunkQuery } from './types'; 8 | 9 | type Props = QueryEditorProps; 10 | 11 | export class QueryEditor extends PureComponent { 12 | onQueryTextChange = (value: string) => { 13 | const { onChange, query } = this.props; 14 | onChange({ ...query, queryText: value }); 15 | }; 16 | 17 | render() { 18 | const query = defaults(this.props.query, defaultQuery); 19 | const { queryText } = query; 20 | 21 | return ( 22 |
23 | 24 |
25 | ); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://raw.githubusercontent.com/grafana/grafana/master/docs/sources/developers/plugins/plugin.schema.json", 3 | "type": "datasource", 4 | "name": "splunk-datasource", 5 | "id": "efcasado-splunk-datasource", 6 | "metrics": true, 7 | "info": { 8 | "description": "", 9 | "author": { 10 | "name": "efcasado", 11 | "url": "" 12 | }, 13 | "keywords": [], 14 | "logos": { 15 | "small": "img/logo.svg", 16 | "large": "img/logo.svg" 17 | }, 18 | "links": [ 19 | { 20 | "name": "Website", 21 | "url": "https://github.com/efcasado/grafana-plugin-splunk-datasource" 22 | }, 23 | { 24 | "name": "License", 25 | "url": "https://github.com/efcasado/grafana-plugin-splunk-datasource/blob/main/LICENSE" 26 | } 27 | ], 28 | "screenshots": [], 29 | "version": "%VERSION%", 30 | "updated": "%TODAY%" 31 | }, 32 | "dependencies": { 33 | "grafanaDependency": ">=7.0.0", 34 | "plugins": [] 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022, Enrique Fernandez 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /.rc: -------------------------------------------------------------------------------- 1 | function npm() { 2 | DOCKER_IMAGE=node 3 | DOCKER_RUN_OPTS="--rm -v ${PWD}:${PWD} -w ${PWD} -e npm_config_cache=${PWD}/.npm -e npm_config_prefix=${PWD} -e YARN_CACHE_FOLDER=${PWD}/.cache/yarn -e GITHUB_TOKEN" 4 | if [ -n "$ZSH_VERSION" ]; then 5 | docker run ${=DOCKER_RUN_OPTS} --entrypoint=npm ${DOCKER_IMAGE} "$@" 6 | else 7 | docker run ${DOCKER_RUN_OPTS} --entrypoint=npm ${DOCKER_IMAGE} "$@" 8 | fi 9 | } 10 | 11 | function npx() { 12 | DOCKER_IMAGE=node 13 | DOCKER_RUN_OPTS="--rm -v ${PWD}:${PWD} -w ${PWD} -e npm_config_cache=${PWD}/.npm -e npm_config_prefix=${PWD} -e YARN_CACHE_FOLDER=${PWD}/.cache/yarn -e GITHUB_TOKEN" 14 | 15 | if [ -n "$ZSH_VERSION" ]; then 16 | docker run ${=DOCKER_RUN_OPTS} --entrypoint=npx ${DOCKER_IMAGE} "$@" 17 | else 18 | docker run ${DOCKER_RUN_OPTS} --entrypoint=npx ${DOCKER_IMAGE} "$@" 19 | fi 20 | } 21 | 22 | function yarn() { 23 | DOCKER_IMAGE=node 24 | DOCKER_RUN_OPTS="--rm -v ${PWD}:${PWD} -w ${PWD} -e NODE_OPTIONS=--openssl-legacy-provider -e npm_config_cache=${PWD}/.npm -e npm_config_prefix=${PWD}/.npm -e YARN_CACHE_FOLDER=${PWD}/.cache/yarn -e GITHUB_TOKEN" 25 | if [ -n "$ZSH_VERSION" ]; then 26 | docker run ${=DOCKER_RUN_OPTS} --entrypoint=yarn ${DOCKER_IMAGE} "$@" 27 | else 28 | docker run ${DOCKER_RUN_OPTS} --entrypoint=yarn ${DOCKER_IMAGE} "$@" 29 | fi 30 | } -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | concurrency: 4 | group: ${{ github.ref }} 5 | cancel-in-progress: true 6 | 7 | on: 8 | push: 9 | branches: 10 | - main 11 | pull_request: 12 | branches: 13 | - main 14 | jobs: 15 | build: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v2 19 | 20 | - name: Cache yarn cache 21 | uses: actions/cache@v2 22 | id: cache-yarn-cache 23 | with: 24 | path: .cache/yarn 25 | key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} 26 | restore-keys: | 27 | ${{ runner.os }}-yarn- 28 | 29 | - name: Cache node_modules 30 | id: cache-node-modules 31 | uses: actions/cache@v2 32 | with: 33 | path: node_modules 34 | key: ${{ runner.os }}-${{ matrix.node-version }}-nodemodules-${{ hashFiles('**/yarn.lock') }} 35 | restore-keys: | 36 | ${{ runner.os }}-${{ matrix.node-version }}-nodemodules- 37 | 38 | - name: Build plugin 39 | run: make build 40 | 41 | - name: Release (dry run) 42 | if: github.ref != 'refs/heads/main' 43 | env: 44 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 45 | run: RELEASE_OPTS=--dry-run make release 46 | 47 | - name: Release 48 | if: github.ref == 'refs/heads/main' 49 | env: 50 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 51 | run: RELEASE_OPTS=--no-ci make release 52 | -------------------------------------------------------------------------------- /src/img/logo.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.releaserc: -------------------------------------------------------------------------------- 1 | { 2 | "branch": "main", 3 | "branches": [ "main" ], 4 | "plugins": [ 5 | ["@semantic-release/commit-analyzer", 6 | { 7 | "releaseRules": [ 8 | { "type": "major", "release": "major" }, 9 | { "type": "breaking", "release": "major" }, 10 | { "type": "minor", "release": "minor" }, 11 | { "type": "feat", "release": "minor" }, 12 | { "type": "patch", "release": "patch" }, 13 | { "type": "fix", "release": "patch" }, 14 | { "type": "chore", "release": "patch" }, 15 | { "type": "no-release", "release": false } 16 | ], 17 | "parserOpts": { 18 | "noteKeywords": ["BREAKING CHANGE", "BREAKING CHANGES"] 19 | } 20 | } 21 | ], 22 | "@semantic-release/release-notes-generator", 23 | ["@semantic-release/changelog", 24 | { 25 | "changelogFile": "CHANGELOG.md" 26 | } 27 | ], 28 | ["@semantic-release/exec", { 29 | "prepareCmd": "mv dist efcasado-splunk-datasource; tar -czvf efcasado-splunk-datasource-${nextRelease.version}.tar.gz efcasado-splunk-datasource" 30 | }], 31 | ["@semantic-release/git", 32 | { 33 | "assets": ["CHANGELOG.md"] 34 | } 35 | ], 36 | ["@semantic-release/github", 37 | { 38 | "assets": ["efcasado-splunk-datasource-*.tar.gz"] 39 | } 40 | ] 41 | ] 42 | } 43 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Splunk Data Source Plugin for Grafana 2 | 3 | ![Splunk Data Source for Grafana](https://github.com/efcasado/grafana-plugin-splunk-datasource/actions/workflows/ci.yml/badge.svg?branch=main) 4 | 5 | > **DISCLAIMER!** 6 | > This plugin is a proof-of-concept and breaking changes are very likely to be introduced. 7 | > Also, it has only been used in toy environments. Thus, if you are considering using it 8 | > in a production environment, do it at your own risk! 9 | 10 | 11 | ## What is Splunk Data Source Plugin for Grafana? 12 | 13 | Splunk Data Source Plugin for Grafana is a Grafana (data source) plugin that 14 | allows you to pull Splunk data into your Grafana dashboards. Or, in other words, 15 | it is a Grafana plugin that allows you to query Splunk directly from Grafana. 16 | 17 | splunk-dashboard-in-grafana 18 | 19 | 20 | ### Installation 21 | 22 | 1. Download the latest version of the plugin 23 | 24 | ```bash 25 | wget https://github.com/efcasado/grafana-plugin-splunk-datasource/releases/download/vX.Y.Z/efcasado-splunk-datasource-X.Y.Z.tar.gz 26 | ``` 27 | 28 | 2. Unzip it in your Grafana's installation plugin directory (eg. `/var/lib/grafana/plugins`) 29 | 30 | ```bash 31 | tar -zxf efcasado-splunk-datasource-X.Y.Z.tar.gz -C YOUR_PLUGIN_DIR 32 | ``` 33 | 3. As of Grafana v8+ you must explicitly define any unsigned plugins that you wish to allow / load (eg edit: `/etc/grafana/grafana.ini` 34 | 35 | ```allow_loading_unsigned_plugins = efcasado-splunk-datasource ``` 36 | 37 | ### Configuration 38 | 39 | The preferred way to configure Splunk Data Source Plugin for Grafana is using 40 | a [provisioning file](https://grafana.com/docs/grafana/latest/administration/provisioning/). 41 | You can use the provisioning script [included in this repository](https://github.com/efcasado/grafana-plugin-splunk-datasource/blob/main/provisioning/datasources/splunk-datasource.yml) 42 | as source of inspiration. However, the plugin can also be manually configured 43 | by an administrator from Grafana's UI `Configuration --> Datasources --> Add data source`. 44 | 45 | NB: By default Splunk's REST API is only available via HTTPS (even if you allow HTTP access on a differen port), ie it is usually at: https://:8089 46 | 47 | (example configuration via the Grafana web-GUI (in grafana v9.3.4): 48 | 49 | ![image](https://user-images.githubusercontent.com/60830628/221431256-ed3b9a8a-fdb0-4e0c-8ec7-9aa72477ac65.png) 50 | 51 | 52 | ### Testing in Grafana: 53 | Using a standard Splunk Query as a Grafana Query (and showing splunk results): 54 | ![image](https://user-images.githubusercontent.com/60830628/221431676-ca1f1982-1377-4753-aecb-5e447f34ce7c.png) 55 | 56 | 57 | 58 | 59 | ## Getting Started with Docker (Build/Run in Docker) 60 | 61 | 1. Build the project 62 | 63 | ```bash 64 | make build 65 | ``` 66 | 67 | 2. Spin up the test environment 68 | 69 | ```bash 70 | make up 71 | ``` 72 | 73 | 3. Point your browser to [localhost:3000](http://localhost:3000) 74 | 75 | 76 | 77 | ## License 78 | 79 | > The MIT License (MIT) 80 | > 81 | > Copyright (c) 2022, Enrique Fernandez 82 | > 83 | > Permission is hereby granted, free of charge, to any person obtaining a copy 84 | > of this software and associated documentation files (the "Software"), to deal 85 | > in the Software without restriction, including without limitation the rights 86 | > to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 87 | > copies of the Software, and to permit persons to whom the Software is 88 | > furnished to do so, subject to the following conditions: 89 | > 90 | > The above copyright notice and this permission notice shall be included in 91 | > all copies or substantial portions of the Software. 92 | > 93 | > THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 94 | > IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 95 | > FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 96 | > AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 97 | > LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 98 | > OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 99 | > THE SOFTWARE. 100 | -------------------------------------------------------------------------------- /src/datasource.ts: -------------------------------------------------------------------------------- 1 | import { getBackendSrv } from '@grafana/runtime'; 2 | 3 | import { 4 | DataQueryRequest, 5 | DataQueryResponse, 6 | DataSourceApi, 7 | DataSourceInstanceSettings, 8 | MutableDataFrame, 9 | } from '@grafana/data'; 10 | 11 | import { SplunkQuery, SplunkDataSourceOptions } from './types'; 12 | 13 | export class DataSource extends DataSourceApi { 14 | url?: string; 15 | 16 | constructor(instanceSettings: DataSourceInstanceSettings) { 17 | super(instanceSettings); 18 | 19 | this.url = instanceSettings.url; 20 | } 21 | 22 | async query(options: DataQueryRequest): Promise { 23 | const moment = require('moment'); 24 | const promises = options.targets.map((query) => 25 | this.doRequest(query, options).then((response) => { 26 | const frame = new MutableDataFrame({ 27 | refId: query.refId, 28 | fields: [], 29 | }); 30 | 31 | // console.log(`DEBUG: nFields=${response.fields.length}`); 32 | // console.log(`DEBUG: nResults=${response.results.length}`); 33 | 34 | //let fields = response.data.fields.map((field: any) => field['name']); 35 | response.fields.forEach((field: any) => { 36 | // console.log(`DEBUG: field=${field}`); 37 | frame.addField({ name: field }); 38 | }); 39 | 40 | response.results.forEach((result: any) => { 41 | // console.log(`DEBUG: result=${JSON.stringify(result)}`); 42 | let row: any[] = []; 43 | 44 | response.fields.forEach((field: any) => { 45 | if (field === 'Time') { 46 | let time = moment(result['_time']).format('YYYY-MM-DDTHH:mm:ssZ'); 47 | row.push(time); 48 | } else { 49 | row.push(result[field]); 50 | } 51 | }); 52 | frame.appendRow(row); 53 | }); 54 | 55 | return frame; 56 | }) 57 | ); 58 | 59 | return Promise.all(promises).then((data) => ({ data })); 60 | } 61 | 62 | async testDatasource() { 63 | const data = new URLSearchParams({ 64 | search: `search index=_internal * | stats count`, 65 | output_mode: 'json', 66 | exec_mode: 'oneshot', 67 | }).toString(); 68 | 69 | return getBackendSrv() 70 | .datasourceRequest({ 71 | method: 'POST', 72 | url: this.url + '/services/search/jobs', 73 | headers: { 74 | 'Content-Type': 'application/x-www-form-urlencoded', 75 | }, 76 | data: data, 77 | }) 78 | .then( 79 | (response: any) => { 80 | return { 81 | status: 'success', 82 | message: 'Data source is working', 83 | title: 'Success', 84 | }; 85 | }, 86 | (err: any) => { 87 | return { 88 | status: 'error', 89 | message: err.statusText, 90 | title: 'Error', 91 | }; 92 | } 93 | ); 94 | } 95 | 96 | async doSearchStatusRequest(sid: string) { 97 | const result: boolean = await getBackendSrv() 98 | .datasourceRequest({ 99 | method: 'GET', 100 | url: this.url + '/services/search/jobs/' + sid, 101 | params: { 102 | output_mode: 'json', 103 | }, 104 | }) 105 | .then((response) => { 106 | let status = response.data.entry[0].content.dispatchState; 107 | // console.log(`DEBUG: dispatchState=${status}`); 108 | return status === 'DONE' || status === 'PAUSED' || status === 'FAILED'; 109 | }); 110 | 111 | return result; 112 | } 113 | 114 | async doSearchRequest(query: SplunkQuery, options: DataQueryRequest) { 115 | const { range } = options; 116 | const from = Math.floor(range!.from.valueOf() / 1000); 117 | const to = Math.floor(range!.to.valueOf() / 1000); 118 | 119 | const data = new URLSearchParams({ 120 | search: `search ${query.queryText}`, 121 | output_mode: 'json', 122 | earliest_time: from.toString(), 123 | latest_time: to.toString(), 124 | }).toString(); 125 | 126 | const sid: string = await getBackendSrv() 127 | .datasourceRequest({ 128 | method: 'POST', 129 | url: this.url + '/services/search/jobs', 130 | headers: { 131 | 'Content-Type': 'application/x-www-form-urlencoded', 132 | }, 133 | data: data, 134 | }) 135 | .then((response) => { 136 | return response.data.sid; 137 | }); 138 | 139 | return sid; 140 | } 141 | 142 | async doGetAllResultsRequest(sid: string) { 143 | const count = 50000; 144 | let offset = 0; 145 | let isFirst = true; 146 | let isFinished = false; 147 | let fields: any[] = []; 148 | let results: any[] = []; 149 | 150 | while (!isFinished) { 151 | await getBackendSrv() 152 | .datasourceRequest({ 153 | method: 'GET', 154 | url: this.url + '/services/search/jobs/' + sid + '/results', 155 | params: { 156 | output_mode: 'json', 157 | offset: offset, 158 | count: count, 159 | }, 160 | }) 161 | .then((response) => { 162 | // console.log(`DEBUG: count=${count} offset=${offset} ${JSON.stringify(response.data)}`); 163 | if (response.data.post_process_count === 0 && response.data.results.length === 0) { 164 | isFinished = true; 165 | } else { 166 | if (isFirst) { 167 | isFirst = false; 168 | fields = response.data.fields.map((field: any) => field['name']); 169 | } 170 | offset = offset + count; 171 | results = results.concat(response.data.results); 172 | } 173 | }); 174 | 175 | offset = offset + count; 176 | } 177 | 178 | if (fields.includes('_time')) { 179 | fields.push('Time'); 180 | } 181 | 182 | const index = fields.indexOf('_raw', 0); 183 | if (index > -1) { 184 | fields.splice(index, 1); 185 | fields = fields.reverse(); 186 | fields.push('_raw'); 187 | fields = fields.reverse(); 188 | } 189 | 190 | return { fields: fields, results: results }; 191 | } 192 | 193 | async doRequest(query: SplunkQuery, options: DataQueryRequest) { 194 | const sid: string = await this.doSearchRequest(query, options); 195 | // console.log(`DEBUG: sid=${sid}`); 196 | 197 | while (!(await this.doSearchStatusRequest(sid))) {} 198 | 199 | const result = await this.doGetAllResultsRequest(sid); 200 | return result; 201 | } 202 | } 203 | --------------------------------------------------------------------------------