├── .bra.toml ├── .config ├── .cprc.json ├── .eslintrc ├── .prettierrc.js ├── Dockerfile ├── README.md ├── entrypoint.sh ├── jest-setup.js ├── jest.config.js ├── jest │ ├── mocks │ │ └── react-inlinesvg.tsx │ └── utils.js ├── supervisord │ └── supervisord.conf ├── tsconfig.json ├── types │ └── custom.d.ts └── webpack │ ├── constants.ts │ ├── utils.ts │ └── webpack.config.ts ├── .cprc.json ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ └── feature_request.md ├── combine_prs.yml ├── dependabot.yml ├── issue_commands.json ├── pr-commands.json ├── pull_request_template.md ├── release.yml ├── workflows │ ├── combine_prs.yml │ ├── detect-breaking-changes.yml │ ├── issue_commands.yml │ ├── pr-commands.yml │ ├── publish.yml │ ├── push.yml │ └── stale.yml └── zizmor.yml ├── .gitignore ├── .nvmrc ├── .prettierrc.js ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── Magefile.go ├── README.md ├── appveyor.yml ├── cspell.config.json ├── docker-compose.yaml ├── docs └── img │ ├── opensearch-details.png │ ├── pipeline-aggregation-editor.png │ ├── query-editor.png │ ├── sigv4.png │ └── templating-query.png ├── eslint.config.mjs ├── go.mod ├── go.sum ├── jest-setup.js ├── jest.config.js ├── lefthook.yml ├── package.json ├── pkg ├── main.go ├── null │ └── float.go ├── opensearch │ ├── client │ │ ├── client.go │ │ ├── client_test.go │ │ ├── index_pattern.go │ │ ├── index_pattern_test.go │ │ ├── models.go │ │ ├── models_test.go │ │ ├── ppl_request.go │ │ ├── ppl_request_test.go │ │ ├── search_request.go │ │ └── search_request_test.go │ ├── lucene_handler.go │ ├── models.go │ ├── opensearch.go │ ├── opensearch_test.go │ ├── ppl_handler.go │ ├── ppl_response_parser.go │ ├── ppl_response_parser_test.go │ ├── query_request.go │ ├── query_request_test.go │ ├── response_parser.go │ ├── response_parser_test.go │ └── snapshot_tests │ │ ├── README.md │ │ ├── helpers_test.go │ │ ├── lucene_logs_test.go │ │ ├── lucene_metric_test.go │ │ ├── lucene_raw_data_test.go │ │ ├── lucene_raw_document_test.go │ │ ├── lucene_service_map_test.go │ │ ├── lucene_trace_list_test.go │ │ ├── lucene_trace_spans_test.go │ │ ├── ppl_logs_test.go │ │ ├── ppl_table_test.go │ │ └── testdata │ │ ├── lucene_logs.expected_result_generated_snapshot.golden.jsonc │ │ ├── lucene_logs.query_input.json │ │ ├── lucene_logs.response_from_opensearch.json │ │ ├── lucene_metric_average_derivative_group_by_date_histogram.expected_result_generated_snapshot.golden.jsonc │ │ ├── lucene_metric_average_derivative_group_by_date_histogram.query_input.json │ │ ├── lucene_metric_average_derivative_group_by_date_histogram.response_from_opensearch.json │ │ ├── lucene_metric_max_group_by_terms.expected_result_generated_snapshot.golden.jsonc │ │ ├── lucene_metric_max_group_by_terms.query_input.json │ │ ├── lucene_metric_max_group_by_terms.response_from_opensearch.json │ │ ├── lucene_metric_percentiles_group_by_terms_orderby_percentiles.query_input.json │ │ ├── lucene_metric_sum_group_by_date_histogram.expected_result_generated_snapshot.golden.jsonc │ │ ├── lucene_metric_sum_group_by_date_histogram.query_input.json │ │ ├── lucene_metric_sum_group_by_date_histogram.response_from_opensearch.json │ │ ├── lucene_metric_sum_group_by_histogram.query_input.json │ │ ├── lucene_metric_sum_group_by_histogram_decimal_interval.query_input.json │ │ ├── lucene_metric_sum_group_by_histogram_invalid_interval.query_input.json │ │ ├── lucene_raw_data.expected_result_generated_snapshot.golden.jsonc │ │ ├── lucene_raw_data.query_input.json │ │ ├── lucene_raw_data.response_from_opensearch.json │ │ ├── lucene_raw_document.expected_result_generated_snapshot.golden.jsonc │ │ ├── lucene_raw_document.query_input.json │ │ ├── lucene_raw_document.response_from_opensearch.json │ │ ├── lucene_service_map_input.json │ │ ├── lucene_service_map_input_trace_list.json │ │ ├── lucene_service_map_input_with_trace_id.json │ │ ├── lucene_service_map_prefetch.response_from_opensearch.json │ │ ├── lucene_trace_list.expected_result_generated_snapshot.golden.jsonc │ │ ├── lucene_trace_list.query_input.json │ │ ├── lucene_trace_list.query_input_multiple.json │ │ ├── lucene_trace_list.response_from_opensearch.json │ │ ├── lucene_trace_list.response_from_opensearch_multiple.json │ │ ├── lucene_trace_list_and_spans.query_input.json │ │ ├── lucene_trace_spans.query_input.json │ │ ├── lucene_trace_spans.query_input_multiple.json │ │ ├── lucene_trace_spans.response_from_opensearch.json │ │ ├── ppl_logs.expected_result_generated_snapshot.golden.jsonc │ │ ├── ppl_logs.query_input.json │ │ ├── ppl_logs.response_from_opensearch.json │ │ ├── ppl_table.expected_result_generated_snapshot.golden.jsonc │ │ ├── ppl_table.query_input.json │ │ └── ppl_table.response_from_opensearch.json ├── tsdb │ └── interval.go └── utils │ ├── utils.go │ └── utils_test.go ├── playwright.config.ts ├── provisioning ├── dashboards │ ├── aws-opensearch.yaml │ └── aws-opensearch │ │ ├── ecommerce-example.json │ │ ├── traces-example.json │ │ └── web-traffic-example.json └── datasources │ └── aws-opensearch.yaml ├── src ├── QueryBuilder.ts ├── __mocks__ │ ├── DefaultConfigOptions.ts │ ├── OpenSearchDatasource.ts │ └── openSearchTraceMock.ts ├── components │ ├── AddRemove.test.tsx │ ├── AddRemove.tsx │ ├── IconButton.tsx │ ├── MetricPicker.tsx │ ├── QueryEditor │ │ ├── AnnotationQueryEditor.tsx │ │ ├── BucketAggregationsEditor │ │ │ ├── BucketAggregationEditor.tsx │ │ │ ├── SettingsEditor │ │ │ │ ├── FiltersSettingsEditor │ │ │ │ │ ├── index.tsx │ │ │ │ │ ├── state │ │ │ │ │ │ ├── actions.ts │ │ │ │ │ │ ├── reducer.test.ts │ │ │ │ │ │ ├── reducer.ts │ │ │ │ │ │ └── types.ts │ │ │ │ │ └── utils.ts │ │ │ │ ├── index.tsx │ │ │ │ └── useDescription.ts │ │ │ ├── aggregations.ts │ │ │ ├── index.tsx │ │ │ ├── state │ │ │ │ ├── actions.ts │ │ │ │ ├── reducer.test.ts │ │ │ │ ├── reducer.ts │ │ │ │ └── types.ts │ │ │ └── utils.ts │ │ ├── LuceneQueryEditor │ │ │ ├── LuceneQueryEditor.test.tsx │ │ │ ├── LuceneQueryEditor.tsx │ │ │ ├── LuceneQueryTypeSelector.tsx │ │ │ ├── state.test.ts │ │ │ └── state.ts │ │ ├── MetricAggregationsEditor │ │ │ ├── MetricEditor.test.tsx │ │ │ ├── MetricEditor.tsx │ │ │ ├── SettingsEditor │ │ │ │ ├── BucketScriptSettingsEditor │ │ │ │ │ ├── index.tsx │ │ │ │ │ ├── state │ │ │ │ │ │ ├── actions.ts │ │ │ │ │ │ ├── reducer.test.ts │ │ │ │ │ │ ├── reducer.ts │ │ │ │ │ │ └── types.ts │ │ │ │ │ └── utils.ts │ │ │ │ ├── MovingAverageSettingsEditor.tsx │ │ │ │ ├── SettingField.tsx │ │ │ │ ├── index.test.tsx │ │ │ │ ├── index.tsx │ │ │ │ └── useDescription.ts │ │ │ ├── SpecialMetricAggregationsRow.tsx │ │ │ ├── aggregations.ts │ │ │ ├── index.tsx │ │ │ ├── state │ │ │ │ ├── actions.ts │ │ │ │ ├── reducer.test.ts │ │ │ │ ├── reducer.ts │ │ │ │ └── types.ts │ │ │ ├── styles.ts │ │ │ └── utils.ts │ │ ├── OpenSearchQueryContext.test.tsx │ │ ├── OpenSearchQueryContext.tsx │ │ ├── PPLFormatEditor │ │ │ ├── HelpMessage.tsx │ │ │ ├── OpenCloseButton.test.tsx │ │ │ ├── OpenCloseButton.tsx │ │ │ ├── SettingsEditor.test.tsx │ │ │ ├── SettingsEditor.tsx │ │ │ ├── formats.ts │ │ │ ├── index.test.tsx │ │ │ ├── index.tsx │ │ │ ├── state.test.ts │ │ │ ├── state.ts │ │ │ └── utils.ts │ │ ├── QueryEditorRow.tsx │ │ ├── QueryTypeEditor │ │ │ ├── index.test.tsx │ │ │ ├── index.tsx │ │ │ ├── state.test.ts │ │ │ ├── state.ts │ │ │ ├── utils.test.ts │ │ │ └── utils.ts │ │ ├── SettingsEditorContainer.tsx │ │ ├── index.test.tsx │ │ ├── index.tsx │ │ ├── state.test.ts │ │ ├── state.ts │ │ └── styles.ts │ └── types.ts ├── configuration │ ├── ConfigEditor.test.tsx │ ├── ConfigEditor.tsx │ ├── DataLink.tsx │ ├── DataLinks.test.tsx │ ├── DataLinks.tsx │ ├── LogsConfig.test.tsx │ ├── LogsConfig.tsx │ ├── OpenSearchDetails.test.tsx │ ├── OpenSearchDetails.tsx │ ├── utils.test.ts │ └── utils.ts ├── dependencies │ ├── DataSourcePicker.tsx │ ├── PluginSignatureBadge.tsx │ ├── flatten.ts │ ├── matchers │ │ ├── index.ts │ │ ├── toEmitValuesWith.ts │ │ ├── types.ts │ │ └── utils.ts │ ├── mocks.ts │ └── table_model.ts ├── grammar.ts ├── hooks │ ├── useNextId.test.tsx │ ├── useNextId.ts │ ├── useStatelessReducer.test.tsx │ └── useStatelessReducer.ts ├── img │ └── logo.svg ├── index_pattern.ts ├── modifyQuery.test.ts ├── modifyQuery.ts ├── module.ts ├── opensearchDatasource.test.ts ├── opensearchDatasource.ts ├── plugin.json ├── query_def.ts ├── query_help.md ├── reducerTester.ts ├── specs │ └── index_pattern.test.ts ├── tracking.ts ├── types.ts ├── typings │ └── index.d.ts ├── utils.test.ts └── utils.ts ├── tests ├── annotationsEditor.spec.ts ├── configEditor.spec.ts └── queryEditor.spec.ts ├── tsconfig.json └── yarn.lock /.bra.toml: -------------------------------------------------------------------------------- 1 | 2 | [run] 3 | init_cmds = [ 4 | ["mage", "-v", "build:debug"], 5 | ["mage", "-v", "reloadPlugin"] 6 | ] 7 | watch_all = true 8 | follow_symlinks = true 9 | watch_dirs = [ 10 | "$WORKDIR/pkg", 11 | ] 12 | watch_exts = [".go"] 13 | build_delay = 1500 14 | cmds = [ 15 | ["mage", "-v", "build:debug"], 16 | ["mage", "-v", "reloadPlugin"] 17 | ] -------------------------------------------------------------------------------- /.config/.cprc.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "5.5.3" 3 | } 4 | -------------------------------------------------------------------------------- /.config/.eslintrc: -------------------------------------------------------------------------------- 1 | /* 2 | * ⚠️⚠️⚠️ THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY. ⚠️⚠️⚠️ 3 | * 4 | * In order to extend the configuration follow the steps in 5 | * https://grafana.com/developers/plugin-tools/get-started/set-up-development-environment#extend-the-eslint-config 6 | */ 7 | { 8 | "extends": ["@grafana/eslint-config"], 9 | "root": true, 10 | "rules": { 11 | "react/prop-types": "off" 12 | }, 13 | "overrides": [ 14 | { 15 | "plugins": ["deprecation"], 16 | "files": ["src/**/*.{ts,tsx}"], 17 | "rules": { 18 | "deprecation/deprecation": "warn" 19 | }, 20 | "parserOptions": { 21 | "project": "./tsconfig.json" 22 | } 23 | } 24 | ] 25 | } 26 | -------------------------------------------------------------------------------- /.config/.prettierrc.js: -------------------------------------------------------------------------------- 1 | /* 2 | * ⚠️⚠️⚠️ THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY. ⚠️⚠️⚠️ 3 | * 4 | * In order to extend the configuration follow the steps in .config/README.md 5 | */ 6 | 7 | module.exports = { 8 | endOfLine: 'auto', 9 | printWidth: 120, 10 | trailingComma: 'es5', 11 | semi: true, 12 | jsxSingleQuote: false, 13 | singleQuote: true, 14 | useTabs: false, 15 | tabWidth: 2, 16 | }; 17 | -------------------------------------------------------------------------------- /.config/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG grafana_version=latest 2 | ARG grafana_image=grafana-enterprise 3 | 4 | FROM grafana/${grafana_image}:${grafana_version} 5 | 6 | ARG development=false 7 | ARG TARGETARCH 8 | 9 | ARG GO_VERSION=1.21.6 10 | ARG GO_ARCH=${TARGETARCH:-amd64} 11 | 12 | ENV DEV "${development}" 13 | 14 | # Make it as simple as possible to access the grafana instance for development purposes 15 | # Do NOT enable these settings in a public facing / production grafana instance 16 | ENV GF_AUTH_ANONYMOUS_ORG_ROLE "Admin" 17 | ENV GF_AUTH_ANONYMOUS_ENABLED "true" 18 | ENV GF_AUTH_BASIC_ENABLED "false" 19 | # Set development mode so plugins can be loaded without the need to sign 20 | ENV GF_DEFAULT_APP_MODE "development" 21 | 22 | 23 | LABEL maintainer="Grafana Labs " 24 | 25 | ENV GF_PATHS_HOME="/usr/share/grafana" 26 | WORKDIR $GF_PATHS_HOME 27 | 28 | USER root 29 | 30 | # Installing supervisor and inotify-tools 31 | RUN if [ "${development}" = "true" ]; then \ 32 | if grep -i -q alpine /etc/issue; then \ 33 | apk add supervisor inotify-tools git; \ 34 | elif grep -i -q ubuntu /etc/issue; then \ 35 | DEBIAN_FRONTEND=noninteractive && \ 36 | apt-get update && \ 37 | apt-get install -y supervisor inotify-tools git && \ 38 | rm -rf /var/lib/apt/lists/*; \ 39 | else \ 40 | echo 'ERROR: Unsupported base image' && /bin/false; \ 41 | fi \ 42 | fi 43 | 44 | COPY supervisord/supervisord.conf /etc/supervisor.d/supervisord.ini 45 | COPY supervisord/supervisord.conf /etc/supervisor/conf.d/supervisord.conf 46 | 47 | 48 | # Installing Go 49 | RUN if [ "${development}" = "true" ]; then \ 50 | curl -O -L https://golang.org/dl/go${GO_VERSION}.linux-${GO_ARCH}.tar.gz && \ 51 | rm -rf /usr/local/go && \ 52 | tar -C /usr/local -xzf go${GO_VERSION}.linux-${GO_ARCH}.tar.gz && \ 53 | echo "export PATH=$PATH:/usr/local/go/bin:~/go/bin" >> ~/.bashrc && \ 54 | rm -f go${GO_VERSION}.linux-${GO_ARCH}.tar.gz; \ 55 | fi 56 | 57 | # Installing delve for debugging 58 | RUN if [ "${development}" = "true" ]; then \ 59 | /usr/local/go/bin/go install github.com/go-delve/delve/cmd/dlv@latest; \ 60 | fi 61 | 62 | # Installing mage for plugin (re)building 63 | RUN if [ "${development}" = "true" ]; then \ 64 | git clone https://github.com/magefile/mage; \ 65 | cd mage; \ 66 | export PATH=$PATH:/usr/local/go/bin; \ 67 | go run bootstrap.go; \ 68 | fi 69 | 70 | # Inject livereload script into grafana index.html 71 | RUN sed -i 's|||g' /usr/share/grafana/public/views/index.html 72 | 73 | 74 | COPY entrypoint.sh /entrypoint.sh 75 | RUN chmod +x /entrypoint.sh 76 | ENTRYPOINT ["/entrypoint.sh"] 77 | -------------------------------------------------------------------------------- /.config/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if [ "${DEV}" = "false" ]; then 4 | echo "Starting test mode" 5 | exec /run.sh 6 | fi 7 | 8 | echo "Starting development mode" 9 | 10 | if grep -i -q alpine /etc/issue; then 11 | exec /usr/bin/supervisord -c /etc/supervisord.conf 12 | elif grep -i -q ubuntu /etc/issue; then 13 | exec /usr/bin/supervisord -c /etc/supervisor/supervisord.conf 14 | else 15 | echo 'ERROR: Unsupported base image' 16 | exit 1 17 | fi 18 | 19 | -------------------------------------------------------------------------------- /.config/jest-setup.js: -------------------------------------------------------------------------------- 1 | /* 2 | * ⚠️⚠️⚠️ THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY. ⚠️⚠️⚠️ 3 | * 4 | * In order to extend the configuration follow the steps in 5 | * https://grafana.com/developers/plugin-tools/get-started/set-up-development-environment#extend-the-jest-config 6 | */ 7 | 8 | import '@testing-library/jest-dom'; 9 | import { TextEncoder, TextDecoder } from 'util'; 10 | 11 | Object.assign(global, { TextDecoder, TextEncoder }); 12 | 13 | // https://jestjs.io/docs/manual-mocks#mocking-methods-which-are-not-implemented-in-jsdom 14 | Object.defineProperty(global, 'matchMedia', { 15 | writable: true, 16 | value: (query) => ({ 17 | matches: false, 18 | media: query, 19 | onchange: null, 20 | addListener: jest.fn(), // deprecated 21 | removeListener: jest.fn(), // deprecated 22 | addEventListener: jest.fn(), 23 | removeEventListener: jest.fn(), 24 | dispatchEvent: jest.fn(), 25 | }), 26 | }); 27 | 28 | HTMLCanvasElement.prototype.getContext = () => {}; 29 | -------------------------------------------------------------------------------- /.config/jest.config.js: -------------------------------------------------------------------------------- 1 | /* 2 | * ⚠️⚠️⚠️ THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY. ⚠️⚠️⚠️ 3 | * 4 | * In order to extend the configuration follow the steps in 5 | * https://grafana.com/developers/plugin-tools/get-started/set-up-development-environment#extend-the-jest-config 6 | */ 7 | 8 | const path = require('path'); 9 | const { grafanaESModules, nodeModulesToTransform } = require('./jest/utils'); 10 | 11 | module.exports = { 12 | moduleNameMapper: { 13 | '\\.(css|scss|sass)$': 'identity-obj-proxy', 14 | 'react-inlinesvg': path.resolve(__dirname, 'jest', 'mocks', 'react-inlinesvg.tsx'), 15 | }, 16 | modulePaths: ['/src'], 17 | setupFilesAfterEnv: ['/jest-setup.js'], 18 | testEnvironment: 'jest-environment-jsdom', 19 | testMatch: [ 20 | '/src/**/__tests__/**/*.{js,jsx,ts,tsx}', 21 | '/src/**/*.{spec,test,jest}.{js,jsx,ts,tsx}', 22 | '/src/**/*.{spec,test,jest}.{js,jsx,ts,tsx}', 23 | ], 24 | transform: { 25 | '^.+\\.(t|j)sx?$': [ 26 | '@swc/jest', 27 | { 28 | sourceMaps: 'inline', 29 | jsc: { 30 | parser: { 31 | syntax: 'typescript', 32 | tsx: true, 33 | decorators: false, 34 | dynamicImport: true, 35 | }, 36 | }, 37 | }, 38 | ], 39 | }, 40 | // Jest will throw `Cannot use import statement outside module` if it tries to load an 41 | // ES module without it being transformed first. ./config/README.md#esm-errors-with-jest 42 | transformIgnorePatterns: [nodeModulesToTransform(grafanaESModules)], 43 | }; 44 | -------------------------------------------------------------------------------- /.config/jest/mocks/react-inlinesvg.tsx: -------------------------------------------------------------------------------- 1 | // Due to the grafana/ui Icon component making fetch requests to 2 | // `/public/img/icon/.svg` we need to mock react-inlinesvg to prevent 3 | // the failed fetch requests from displaying errors in console. 4 | 5 | import React from 'react'; 6 | 7 | type Callback = (...args: any[]) => void; 8 | 9 | export interface StorageItem { 10 | content: string; 11 | queue: Callback[]; 12 | status: string; 13 | } 14 | 15 | export const cacheStore: { [key: string]: StorageItem } = Object.create(null); 16 | 17 | const SVG_FILE_NAME_REGEX = /(.+)\/(.+)\.svg$/; 18 | 19 | const InlineSVG = ({ src }: { src: string }) => { 20 | // testId will be the file name without extension (e.g. `public/img/icons/angle-double-down.svg` -> `angle-double-down`) 21 | const testId = src.replace(SVG_FILE_NAME_REGEX, '$2'); 22 | return ; 23 | }; 24 | 25 | export default InlineSVG; 26 | -------------------------------------------------------------------------------- /.config/jest/utils.js: -------------------------------------------------------------------------------- 1 | /* 2 | * ⚠️⚠️⚠️ THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY. ⚠️⚠️⚠️ 3 | * 4 | * In order to extend the configuration follow the steps in .config/README.md 5 | */ 6 | 7 | /* 8 | * This utility function is useful in combination with jest `transformIgnorePatterns` config 9 | * to transform specific packages (e.g.ES modules) in a projects node_modules folder. 10 | */ 11 | const nodeModulesToTransform = (moduleNames) => `node_modules\/(?!.*(${moduleNames.join('|')})\/.*)`; 12 | 13 | // Array of known nested grafana package dependencies that only bundle an ESM version 14 | const grafanaESModules = [ 15 | '.pnpm', // Support using pnpm symlinked packages 16 | '@grafana/schema', 17 | 'd3', 18 | 'd3-color', 19 | 'd3-force', 20 | 'd3-interpolate', 21 | 'd3-scale-chromatic', 22 | 'ol', 23 | 'react-colorful', 24 | 'rxjs', 25 | 'uuid', 26 | ]; 27 | 28 | module.exports = { 29 | nodeModulesToTransform, 30 | grafanaESModules, 31 | }; 32 | -------------------------------------------------------------------------------- /.config/supervisord/supervisord.conf: -------------------------------------------------------------------------------- 1 | [supervisord] 2 | nodaemon=true 3 | user=root 4 | 5 | [program:grafana] 6 | user=root 7 | directory=/var/lib/grafana 8 | command=bash -c 'while [ ! -f /root/grafana-opensearch-datasource/dist/gpx_opensearch-datasource* ]; do sleep 1; done; /run.sh' 9 | stdout_logfile=/dev/fd/1 10 | stdout_logfile_maxbytes=0 11 | redirect_stderr=true 12 | killasgroup=true 13 | stopasgroup=true 14 | autostart=true 15 | 16 | [program:delve] 17 | user=root 18 | command=/bin/bash -c 'pid=""; while [ -z "$pid" ]; do pid=$(pgrep -f gpx_opensearch-datasource); done; /root/go/bin/dlv attach --api-version=2 --headless --continue --accept-multiclient --listen=:2345 $pid' 19 | stdout_logfile=/dev/fd/1 20 | stdout_logfile_maxbytes=0 21 | redirect_stderr=true 22 | killasgroup=false 23 | stopasgroup=false 24 | autostart=true 25 | autorestart=true 26 | 27 | [program:build-watcher] 28 | user=root 29 | command=/bin/bash -c 'while inotifywait -e modify,create,delete -r /var/lib/grafana/plugins/grafana-opensearch-datasource; do echo "Change detected, restarting delve...";supervisorctl restart delve; done' 30 | stdout_logfile=/dev/fd/1 31 | stdout_logfile_maxbytes=0 32 | redirect_stderr=true 33 | killasgroup=true 34 | stopasgroup=true 35 | autostart=true 36 | 37 | [program:mage-watcher] 38 | user=root 39 | environment=PATH="/usr/local/go/bin:/root/go/bin:%(ENV_PATH)s" 40 | directory=/root/grafana-opensearch-datasource 41 | command=/bin/bash -c 'git config --global --add safe.directory /root/grafana-opensearch-datasource && mage -v watch' 42 | stdout_logfile=/dev/fd/1 43 | stdout_logfile_maxbytes=0 44 | redirect_stderr=true 45 | killasgroup=true 46 | stopasgroup=true 47 | autostart=true 48 | -------------------------------------------------------------------------------- /.config/tsconfig.json: -------------------------------------------------------------------------------- 1 | /* 2 | * ⚠️⚠️⚠️ THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY. ⚠️⚠️⚠️ 3 | * 4 | * In order to extend the configuration follow the steps in 5 | * https://grafana.com/developers/plugin-tools/get-started/set-up-development-environment#extend-the-typescript-config 6 | */ 7 | { 8 | "compilerOptions": { 9 | "alwaysStrict": true, 10 | "declaration": false, 11 | "rootDir": "../src", 12 | "baseUrl": "../src", 13 | "typeRoots": ["../node_modules/@types"], 14 | "resolveJsonModule": true 15 | }, 16 | "ts-node": { 17 | "compilerOptions": { 18 | "module": "commonjs", 19 | "target": "es5", 20 | "esModuleInterop": true 21 | }, 22 | "transpileOnly": true 23 | }, 24 | "include": ["../src", "./types"], 25 | "extends": "@grafana/tsconfig" 26 | } 27 | -------------------------------------------------------------------------------- /.config/types/custom.d.ts: -------------------------------------------------------------------------------- 1 | // Image declarations 2 | declare module '*.gif' { 3 | const src: string; 4 | export default src; 5 | } 6 | 7 | declare module '*.jpg' { 8 | const src: string; 9 | export default src; 10 | } 11 | 12 | declare module '*.jpeg' { 13 | const src: string; 14 | export default src; 15 | } 16 | 17 | declare module '*.png' { 18 | const src: string; 19 | export default src; 20 | } 21 | 22 | declare module '*.webp' { 23 | const src: string; 24 | export default src; 25 | } 26 | 27 | declare module '*.svg' { 28 | const content: string; 29 | export default content; 30 | } 31 | 32 | // Font declarations 33 | declare module '*.woff'; 34 | declare module '*.woff2'; 35 | declare module '*.eot'; 36 | declare module '*.ttf'; 37 | declare module '*.otf'; 38 | -------------------------------------------------------------------------------- /.config/webpack/constants.ts: -------------------------------------------------------------------------------- 1 | export const SOURCE_DIR = 'src'; 2 | export const DIST_DIR = 'dist'; 3 | -------------------------------------------------------------------------------- /.config/webpack/utils.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import process from 'process'; 3 | import os from 'os'; 4 | import path from 'path'; 5 | import { glob } from 'glob'; 6 | import { SOURCE_DIR } from './constants'; 7 | 8 | export function isWSL() { 9 | if (process.platform !== 'linux') { 10 | return false; 11 | } 12 | 13 | if (os.release().toLowerCase().includes('microsoft')) { 14 | return true; 15 | } 16 | 17 | try { 18 | return fs.readFileSync('/proc/version', 'utf8').toLowerCase().includes('microsoft'); 19 | } catch { 20 | return false; 21 | } 22 | } 23 | 24 | export function getPackageJson() { 25 | return require(path.resolve(process.cwd(), 'package.json')); 26 | } 27 | 28 | export function getPluginJson() { 29 | return require(path.resolve(process.cwd(), `${SOURCE_DIR}/plugin.json`)); 30 | } 31 | 32 | export function getCPConfigVersion() { 33 | const cprcJson = path.resolve(__dirname, '../', '.cprc.json'); 34 | return fs.existsSync(cprcJson) ? require(cprcJson).version : { version: 'unknown' }; 35 | } 36 | 37 | export function hasReadme() { 38 | return fs.existsSync(path.resolve(process.cwd(), SOURCE_DIR, 'README.md')); 39 | } 40 | 41 | // Support bundling nested plugins by finding all plugin.json files in src directory 42 | // then checking for a sibling module.[jt]sx? file. 43 | export async function getEntries(): Promise> { 44 | const pluginsJson = await glob('**/src/**/plugin.json', { absolute: true }); 45 | 46 | const plugins = await Promise.all( 47 | pluginsJson.map((pluginJson) => { 48 | const folder = path.dirname(pluginJson); 49 | return glob(`${folder}/module.{ts,tsx,js,jsx}`, { absolute: true }); 50 | }) 51 | ); 52 | 53 | return plugins.reduce((result, modules) => { 54 | return modules.reduce((result, module) => { 55 | const pluginPath = path.dirname(module); 56 | const pluginName = path.relative(process.cwd(), pluginPath).replace(/src\/?/i, ''); 57 | const entryName = pluginName === '' ? 'module' : `${pluginName}/module`; 58 | 59 | result[entryName] = module; 60 | return result; 61 | }, result); 62 | }, {}); 63 | } 64 | -------------------------------------------------------------------------------- /.cprc.json: -------------------------------------------------------------------------------- 1 | { 2 | "features": { 3 | "bundleGrafanaUI": false, 4 | "useReactRouterV6": false 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Lines starting with '#' are comments. 2 | # Each line is a file pattern followed by one or more owners. 3 | 4 | # More details are here: https://help.github.com/articles/about-codeowners/ 5 | 6 | # The '*' pattern is global owners. 7 | 8 | # Order is important. The last matching pattern has the most precedence. 9 | # The folders are ordered as follows: 10 | 11 | # In each subsection folders are ordered first by depth, then alphabetically. 12 | # This should make it easy to add new rules without breaking existing ones. 13 | 14 | * @grafana/aws-datasources 15 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Report a bug you found when using this plugin 4 | labels: ["datasource/OpenSearch", "type/bug"] 5 | --- 6 | 7 | 13 | 14 | **What happened**: 15 | 16 | **What you expected to happen**: 17 | 18 | **How to reproduce it (as minimally and precisely as possible)**: 19 | 20 | **Anything else we need to know?**: 21 | 22 | **Environment**: 23 | - Grafana version: 24 | - OpenSearch version: 25 | - Plugin version: 26 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Questions & Help 4 | url: https://community.grafana.com 5 | about: Please ask and answer questions here 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | labels: ['datasource/OpenSearch', 'type/feature-request'] 5 | --- 6 | 7 | **Is your feature request related to a problem? Please describe.** 8 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 9 | 10 | **Describe the solution you'd like** 11 | A clear and concise description of what you want to happen. 12 | 13 | **Describe alternatives you've considered** 14 | A clear and concise description of any alternative solutions or features you've considered. 15 | 16 | **Additional context** 17 | Add any other context or screenshots about the feature request here. 18 | -------------------------------------------------------------------------------- /.github/combine_prs.yml: -------------------------------------------------------------------------------- 1 | name: Combine PRs 2 | 3 | on: 4 | workflow_dispatch: # manual activation, for now 5 | 6 | permissions: 7 | contents: write 8 | pull-requests: write 9 | checks: read 10 | 11 | jobs: 12 | combine-prs: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: combine-prs 17 | id: combine-prs 18 | uses: github/combine-prs@v5.1.0 19 | with: 20 | labels: combined-pr 21 | pr_title: "Dependabot updates" 22 | branch_prefix: dependabot # the default, just for clarity 23 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: 'gomod' 4 | directory: '/' 5 | schedule: 6 | interval: 'weekly' 7 | groups: 8 | all-go-dependencies: 9 | patterns: 10 | - '*' 11 | - package-ecosystem: 'github-actions' 12 | directory: '/' 13 | schedule: 14 | interval: 'weekly' 15 | groups: 16 | all-github-action-dependencies: 17 | patterns: 18 | - '*' 19 | - package-ecosystem: 'npm' 20 | directory: '/' 21 | schedule: 22 | interval: 'weekly' 23 | ignore: 24 | - dependency-name: '@reduxjs/toolkit' 25 | update-types: ['version-update:semver-major'] 26 | - dependency-name: 'react' 27 | update-types: ['version-update:semver-major'] 28 | - dependency-name: 'react-dom' 29 | update-types: ['version-update:semver-major'] 30 | groups: 31 | all-node-dependencies: 32 | patterns: 33 | - '*' 34 | -------------------------------------------------------------------------------- /.github/issue_commands.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "label", 4 | "name": "datasource/OpenSearch", 5 | "action": "addToProject", 6 | "addToProject": { 7 | "url": "https://github.com/orgs/grafana/projects/97" 8 | } 9 | }, 10 | { 11 | "type": "label", 12 | "name": "datasource/OpenSearch", 13 | "action": "removeFromProject", 14 | "removeFromProject": { 15 | "url": "https://github.com/orgs/grafana/projects/97" 16 | } 17 | }, 18 | { 19 | "type": "label", 20 | "name": "type/docs", 21 | "action": "addToProject", 22 | "addToProject": { 23 | "url": "https://github.com/orgs/grafana/projects/69" 24 | } 25 | }, 26 | { 27 | "type": "label", 28 | "name": "type/docs", 29 | "action": "removeFromProject", 30 | "removeFromProject": { 31 | "url": "https://github.com/orgs/grafana/projects/69" 32 | } 33 | } 34 | ] 35 | -------------------------------------------------------------------------------- /.github/pr-commands.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "author", 4 | "name": "pr/external", 5 | "notMemberOf": { 6 | "org": "grafana" 7 | }, 8 | "ignoreList": ["renovate[bot]", "dependabot[bot]", "grafana-delivery-bot[bot]", "grafanabot"], 9 | "action": "updateLabel", 10 | "addLabel": "pr/external" 11 | }, 12 | { 13 | "type": "label", 14 | "name": "pr/external", 15 | "action": "addToProject", 16 | "addToProject": { 17 | "url": "https://github.com/orgs/grafana/projects/97", 18 | "column": "Incoming" 19 | } 20 | } 21 | ] 22 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 12 | 13 | **What this PR does / why we need it**: 14 | 15 | **Which issue(s) this PR fixes**: 16 | 17 | Fixes # 18 | 19 | **Special notes for your reviewer**: 20 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | changelog: 2 | categories: 3 | - title: Copy the following lines for the CHANGELOG 4 | labels: 5 | - changelog 6 | - title: Hidden 7 | exclude: 8 | labels: 9 | - '*' 10 | -------------------------------------------------------------------------------- /.github/workflows/combine_prs.yml: -------------------------------------------------------------------------------- 1 | name: Combine PRs 2 | 3 | on: 4 | workflow_dispatch: # manual activation, for now 5 | 6 | permissions: 7 | contents: write 8 | pull-requests: write 9 | checks: read 10 | 11 | jobs: 12 | combine-prs: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: combine-prs 17 | id: combine-prs 18 | uses: github/combine-prs@v5.2.0 19 | with: 20 | labels: combined-pr 21 | pr_title: "Dependabot updates" 22 | branch_prefix: dependabot # the default, just for clarity 23 | -------------------------------------------------------------------------------- /.github/workflows/detect-breaking-changes.yml: -------------------------------------------------------------------------------- 1 | name: Compatibility check 2 | on: [push, pull_request] 3 | permissions: 4 | contents: read 5 | jobs: 6 | compatibilitycheck: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v4 10 | with: 11 | persist-credentials: false 12 | - uses: actions/setup-node@v4 13 | with: 14 | node-version-file: '.nvmrc' 15 | - name: Install dependencies 16 | run: yarn install 17 | - name: Build plugin 18 | run: yarn build 19 | - name: Compatibility check 20 | uses: grafana/plugin-actions/is-compatible@v1 21 | with: 22 | module: './src/module.ts' 23 | comment-pr: 'no' 24 | fail-if-incompatible: 'yes' 25 | targets: '@grafana/data,@grafana/ui,@grafana/runtime,@grafana/e2e-selectors' 26 | -------------------------------------------------------------------------------- /.github/workflows/issue_commands.yml: -------------------------------------------------------------------------------- 1 | name: Run commands when issues are labeled 2 | on: 3 | issues: 4 | types: [labeled, unlabeled] 5 | permissions: 6 | contents: read 7 | issues: write 8 | jobs: 9 | main: 10 | runs-on: ubuntu-latest 11 | permissions: 12 | contents: read 13 | # The "id-token: write" permission is required by "get-vault-secrets" action. We request it here 14 | # instead of the whole workflow, in order to reduce the scope of the permission. 15 | id-token: write 16 | steps: 17 | - name: Checkout Actions 18 | uses: actions/checkout@v4 19 | with: 20 | repository: 'grafana/grafana-github-actions' 21 | path: ./actions 22 | ref: main 23 | persist-credentials: false 24 | - name: Install Actions 25 | run: npm install --production --prefix ./actions 26 | - name: Get secrets from vault 27 | id: get-secrets 28 | uses: grafana/shared-workflows/actions/get-vault-secrets@main 29 | with: 30 | repo_secrets: | 31 | AWS_DS_TOKEN_CREATOR_ID=aws-ds-token-creator:app_id 32 | AWS_DS_TOKEN_CREATOR_PEM=aws-ds-token-creator:pem 33 | - name: 'Generate token' 34 | id: generate_token 35 | uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a 36 | with: 37 | app_id: ${{ env.AWS_DS_TOKEN_CREATOR_ID }} 38 | private_key: ${{ env.AWS_DS_TOKEN_CREATOR_PEM }} 39 | - name: Run Commands 40 | uses: ./actions/commands 41 | with: 42 | token: ${{ steps.generate_token.outputs.token }} 43 | configPath: issue_commands 44 | -------------------------------------------------------------------------------- /.github/workflows/pr-commands.yml: -------------------------------------------------------------------------------- 1 | name: PR automation 2 | on: 3 | pull_request_target: # zizmor: ignore[dangerous-triggers] 4 | types: 5 | - labeled 6 | - opened 7 | permissions: {} 8 | concurrency: 9 | group: pr-commands-${{ github.event.number }} 10 | jobs: 11 | main: 12 | permissions: 13 | contents: read 14 | # The "id-token: write" permission is required by "get-vault-secrets" action. We request it here 15 | # instead of the whole workflow, in order to reduce the scope of the permission. 16 | id-token: write 17 | pull-requests: write 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Checkout Actions 21 | uses: actions/checkout@v4 22 | with: 23 | repository: 'grafana/grafana-github-actions' 24 | path: ./actions 25 | ref: main 26 | persist-credentials: false 27 | - name: Install Actions 28 | run: npm install --production --prefix ./actions 29 | - name: Get secrets from vault 30 | id: get-secrets 31 | uses: grafana/shared-workflows/actions/get-vault-secrets@main 32 | with: 33 | repo_secrets: | 34 | AWS_DS_TOKEN_CREATOR_ID=aws-ds-token-creator:app_id 35 | AWS_DS_TOKEN_CREATOR_PEM=aws-ds-token-creator:pem 36 | - name: 'Generate token' 37 | id: generate_token 38 | uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a 39 | with: 40 | app_id: ${{ env.AWS_DS_TOKEN_CREATOR_ID }} 41 | private_key: ${{ env.AWS_DS_TOKEN_CREATOR_PEM }} 42 | - name: Run Commands 43 | uses: ./actions/commands 44 | with: 45 | token: ${{ steps.generate_token.outputs.token }} 46 | configPath: pr-commands 47 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Plugins - CD 2 | run-name: Deploy ${{ inputs.branch }} to ${{ inputs.environment }} by @${{ github.actor }} 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | branch: 8 | description: Branch to publish from. Can be used to deploy PRs to dev 9 | default: main 10 | environment: 11 | description: Environment to publish to 12 | required: true 13 | type: choice 14 | options: 15 | - 'dev' 16 | - 'ops' 17 | - 'prod' 18 | docs-only: 19 | description: Only publish docs, do not publish the plugin 20 | default: false 21 | type: boolean 22 | 23 | permissions: {} 24 | 25 | jobs: 26 | cd: 27 | name: CD 28 | uses: grafana/plugin-ci-workflows/.github/workflows/cd.yml@main # zizmor: ignore[unpinned-uses] 29 | permissions: 30 | contents: write 31 | id-token: write 32 | attestations: write 33 | with: 34 | branch: ${{ github.event.inputs.branch }} 35 | environment: ${{ github.event.inputs.environment }} 36 | docs-only: ${{ fromJSON(github.event.inputs.docs-only) }} 37 | golangci-lint-version: 2.1.6 38 | 39 | # Scope for the plugin published to the catalog. Setting this to "grafana_cloud" will make it visible only in Grafana Cloud 40 | # (and hide it for on-prem). This is required for some provisioned plugins. 41 | # scopes: grafana_cloud 42 | 43 | # Also deploy the plugin to Grafana Cloud via Argo. You also have to follow the Argo Workflows setup guide for this to work. 44 | # grafana-cloud-deployment-type: provisioned 45 | # argo-workflow-slack-channel: "#grafana-plugins-platform-ci" 46 | -------------------------------------------------------------------------------- /.github/workflows/push.yml: -------------------------------------------------------------------------------- 1 | name: Plugins - CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | 9 | permissions: {} 10 | 11 | jobs: 12 | ci: 13 | name: CI 14 | uses: grafana/plugin-ci-workflows/.github/workflows/ci.yml@main # zizmor: ignore[unpinned-uses] 15 | permissions: 16 | contents: read 17 | id-token: write 18 | with: 19 | plugin-version-suffix: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || '' }} 20 | golangci-lint-version: 2.1.6 21 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: 'Close stale issues' 2 | on: 3 | schedule: 4 | # run at 1:30 every day 5 | - cron: '30 1 * * *' 6 | 7 | permissions: 8 | issues: write 9 | 10 | jobs: 11 | stale: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/stale@v9 15 | with: 16 | repo-token: ${{ secrets.GITHUB_TOKEN }} 17 | # start from the oldest issues when performing stale operations 18 | ascending: true 19 | days-before-issue-stale: 365 20 | days-before-issue-close: 30 21 | stale-issue-label: stale 22 | exempt-issue-labels: no stalebot,type/epic 23 | stale-issue-message: > 24 | This issue has been automatically marked as stale because it has not had 25 | activity in the last year. It will be closed in 30 days if no further activity occurs. Please 26 | feel free to leave a comment if you believe the issue is still relevant. 27 | Thank you for your contributions! 28 | close-issue-message: > 29 | This issue has been automatically closed because it has not had any further 30 | activity in the last 30 days. Thank you for your contributions! 31 | -------------------------------------------------------------------------------- /.github/zizmor.yml: -------------------------------------------------------------------------------- 1 | rules: 2 | unpinned-uses: 3 | config: 4 | policies: 5 | actions/*: any 6 | github/*: any 7 | grafana/*: any -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | node_modules/ 9 | 10 | # Runtime data 11 | pids 12 | *.pid 13 | *.seed 14 | *.pid.lock 15 | 16 | # Directory for instrumented libs generated by jscoverage/JSCover 17 | lib-cov 18 | 19 | # Coverage directory used by tools like istanbul 20 | coverage 21 | 22 | # Compiled binary addons (https://nodejs.org/api/addons.html) 23 | dist/ 24 | artifacts/ 25 | work/ 26 | ci/ 27 | e2e-results/ 28 | 29 | # Editor 30 | .idea 31 | .vscode/launch.json 32 | 33 | .DS_Store 34 | __debug_bin* 35 | .eslintcache 36 | 37 | # End to end tests 38 | /test-results/ 39 | /playwright-report/ 40 | /blob-report/ 41 | /playwright/.cache/ 42 | /playwright/.auth/ -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 20 2 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | // Prettier configuration provided by Grafana scaffolding 3 | ...require('./.config/.prettierrc.js'), 4 | }; 5 | -------------------------------------------------------------------------------- /Magefile.go: -------------------------------------------------------------------------------- 1 | //+build mage 2 | 3 | package main 4 | 5 | import ( 6 | "fmt" 7 | // mage:import 8 | build "github.com/grafana/grafana-plugin-sdk-go/build" 9 | ) 10 | 11 | // Hello prints a message (shows that you can define custom Mage targets). 12 | func Hello() { 13 | fmt.Println("hello plugin developer!") 14 | } 15 | 16 | // Default configures the default target. 17 | var Default = build.BuildAll 18 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | # Test against the latest version of this Node.js version 2 | environment: 3 | nodejs_version: "10" 4 | 5 | # Local NPM Modules 6 | cache: 7 | - node_modules 8 | 9 | # Install scripts. (runs after repo cloning) 10 | install: 11 | # Get the latest stable version of Node.js or io.js 12 | - ps: Install-Product node $env:nodejs_version 13 | # install modules 14 | - npm install -g yarn --quiet 15 | - yarn install --pure-lockfile 16 | 17 | # Post-install test scripts. 18 | test_script: 19 | # Output useful info for debugging. 20 | - node --version 21 | - npm --version 22 | 23 | # Run the build 24 | build_script: 25 | - yarn dev # This will also run prettier! 26 | - yarn build # make sure both scripts work 27 | -------------------------------------------------------------------------------- /cspell.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "ignorePaths": [ 3 | "node_modules/**", 4 | "dist/**", 5 | "**/mage_output_file.go", 6 | "**/testdata/*.json", 7 | "package.json", 8 | "go.sum", 9 | "go.mod", 10 | ".gitignore", 11 | ".config/**", 12 | "pkg/opensearch/snapshot_tests/testdata/**", 13 | "provisioning/dashboards/aws-opensearch/**" 14 | ], 15 | "words": [ 16 | "aggs", 17 | "awsds", 18 | "bodyclose", 19 | "botness", 20 | "bottomk", 21 | "clientip", 22 | "commitish", 23 | "ctxhttp", 24 | "dataframe", 25 | "dataframes", 26 | "datapoints", 27 | "datarow", 28 | "datarows", 29 | "datasource", 30 | "datasources", 31 | "datemath", 32 | "datetime", 33 | "docvalue", 34 | "dompurify", 35 | "dropfirst", 36 | "dsel", 37 | "ecommerce", 38 | "ewma", 39 | "extendedstats", 40 | "fielddata", 41 | "fieldname", 42 | "filebeat", 43 | "firstname", 44 | "fridgepoet", 45 | "geohash", 46 | "gille", 47 | "goconst", 48 | "goconvey", 49 | "gocyclo", 50 | "grafana", 51 | "GROUPBY", 52 | "healthcheck", 53 | "hostnames", 54 | "httpclient", 55 | "idelta", 56 | "indecies", 57 | "instancemgmt", 58 | "kevinwcyu", 59 | "lastname", 60 | "loglevel", 61 | "lucene", 62 | "lvta0909", 63 | "magefile", 64 | "memlock", 65 | "middlewares", 66 | "millis", 67 | "mmap", 68 | "moredata", 69 | "msearch", 70 | "multisearch", 71 | "Mwdhmsy", 72 | "nanos", 73 | "njvrzm", 74 | "nofile", 75 | "nolint", 76 | "ntime", 77 | "nvmrc", 78 | "Onechild", 79 | "opendistro", 80 | "opensearch", 81 | "opensearchproject", 82 | "pplbuilder", 83 | "pplresponse", 84 | "prismjs", 85 | "quantile", 86 | "querystring", 87 | "reduxjs", 88 | "sigv", 89 | "simplejson", 90 | "smhdwy", 91 | "stdpm", 92 | "stdvar", 93 | "stretchr", 94 | "templating", 95 | "testdata", 96 | "testdb", 97 | "timberio", 98 | "timefield", 99 | "timerange", 100 | "timeseries", 101 | "topk", 102 | "tsdb", 103 | "twochild", 104 | "typecheck", 105 | "typemap", 106 | "ulimits", 107 | "unmarshaling", 108 | "varname", 109 | "visualisation", 110 | "wojtekmaj", 111 | "aoss", 112 | "jaegertracing", 113 | "XGET", 114 | "Prepper", 115 | "idastambuk", 116 | "katebrenner", 117 | "iwysiu", 118 | "sarahzinger", 119 | "jaegertracing", 120 | "Prepper", 121 | "opentelemetry", 122 | "httptrace", 123 | "otelhttptrace", 124 | "testid", 125 | "Menges", 126 | "sympatheticmoose", 127 | "loru", 128 | "nosql", 129 | "Equalf", 130 | "unmarshaled", 131 | "Throughputs", 132 | "mainstat", 133 | "secondarystat", 134 | "x-ndjson", 135 | "Xtorm", 136 | "syslogd", 137 | "gofmt", 138 | "errorsource", 139 | "exphttpclient", 140 | "gomod", 141 | "grafanabot", 142 | "tibdex", 143 | "compatibilitycheck", 144 | "zizmor", 145 | "ranyhb", 146 | "golangci" 147 | ] 148 | } 149 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3.0' 2 | 3 | services: 4 | grafana: 5 | container_name: 'grafana-opensearch-datasource' 6 | platform: 'linux/amd64' 7 | build: 8 | context: ./.config 9 | args: 10 | grafana_image: ${GRAFANA_IMAGE:-grafana-enterprise} 11 | grafana_version: ${GRAFANA_VERSION:-latest} 12 | healthcheck: 13 | test: curl -f http://localhost:3000 || exit 1 14 | start_period: 30s 15 | start_interval: 500ms 16 | ports: 17 | - 3000:3000/tcp 18 | volumes: 19 | - ./dist:/var/lib/grafana/plugins/grafana-opensearch-datasource 20 | - ./provisioning:/etc/grafana/provisioning 21 | networks: 22 | - opensearch-net 23 | opensearch-node1: 24 | image: opensearchproject/opensearch:latest 25 | container_name: opensearch-node1 26 | environment: 27 | - cluster.name=opensearch-cluster 28 | - node.name=opensearch-node1 29 | - discovery.seed_hosts=opensearch-node1,opensearch-node2 30 | - cluster.initial_master_nodes=opensearch-node1,opensearch-node2 31 | - bootstrap.memory_lock=true # along with the memlock settings below, disables swapping 32 | - 'OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m' # minimum and maximum Java heap size, recommend setting both to 50% of system RAM 33 | - 'OPENSEARCH_INITIAL_ADMIN_PASSWORD=my_%New%_passW0rd!@#' 34 | ulimits: 35 | memlock: 36 | soft: -1 37 | hard: -1 38 | nofile: 39 | soft: 65536 # maximum number of open files for the OpenSearch user, set to at least 65536 on modern systems 40 | hard: 65536 41 | volumes: 42 | - opensearch-data1:/usr/share/opensearch/data 43 | ports: 44 | - 9200:9200 45 | - 9600:9600 # required for Performance Analyzer 46 | expose: 47 | - '9200' 48 | networks: 49 | - opensearch-net 50 | opensearch-node2: 51 | image: opensearchproject/opensearch:latest 52 | container_name: opensearch-node2 53 | environment: 54 | - cluster.name=opensearch-cluster 55 | - node.name=opensearch-node2 56 | - discovery.seed_hosts=opensearch-node1,opensearch-node2 57 | - cluster.initial_master_nodes=opensearch-node1,opensearch-node2 58 | - bootstrap.memory_lock=true 59 | - 'OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m' 60 | - 'OPENSEARCH_INITIAL_ADMIN_PASSWORD=my_%New%_passW0rd!@#' 61 | ulimits: 62 | memlock: 63 | soft: -1 64 | hard: -1 65 | nofile: 66 | soft: 65536 67 | hard: 65536 68 | volumes: 69 | - opensearch-data2:/usr/share/opensearch/data 70 | networks: 71 | - opensearch-net 72 | opensearch-dashboards: 73 | image: opensearchproject/opensearch-dashboards:latest 74 | container_name: opensearch-dashboards 75 | ports: 76 | - 5601:5601 77 | expose: 78 | - '5601' 79 | environment: 80 | OPENSEARCH_HOSTS: '["https://opensearch-node1:9200","https://opensearch-node2:9200"]' 81 | networks: 82 | - opensearch-net 83 | 84 | volumes: 85 | opensearch-data1: 86 | opensearch-data2: 87 | 88 | networks: 89 | opensearch-net: 90 | -------------------------------------------------------------------------------- /docs/img/opensearch-details.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grafana/opensearch-datasource/6a720a322d4cc7f3b34aaf5ea9d9e290d160ddb5/docs/img/opensearch-details.png -------------------------------------------------------------------------------- /docs/img/pipeline-aggregation-editor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grafana/opensearch-datasource/6a720a322d4cc7f3b34aaf5ea9d9e290d160ddb5/docs/img/pipeline-aggregation-editor.png -------------------------------------------------------------------------------- /docs/img/query-editor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grafana/opensearch-datasource/6a720a322d4cc7f3b34aaf5ea9d9e290d160ddb5/docs/img/query-editor.png -------------------------------------------------------------------------------- /docs/img/sigv4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grafana/opensearch-datasource/6a720a322d4cc7f3b34aaf5ea9d9e290d160ddb5/docs/img/sigv4.png -------------------------------------------------------------------------------- /docs/img/templating-query.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grafana/opensearch-datasource/6a720a322d4cc7f3b34aaf5ea9d9e290d160ddb5/docs/img/templating-query.png -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import path from 'node:path'; 2 | import { fileURLToPath } from 'node:url'; 3 | import js from '@eslint/js'; 4 | import { FlatCompat } from '@eslint/eslintrc'; 5 | 6 | const __filename = fileURLToPath(import.meta.url); 7 | const __dirname = path.dirname(__filename); 8 | const compat = new FlatCompat({ 9 | baseDirectory: __dirname, 10 | recommendedConfig: js.configs.recommended, 11 | allConfig: js.configs.all, 12 | }); 13 | 14 | export default [ 15 | { 16 | ignores: ['**/node_modules', '**/build', '**/dist'], 17 | }, 18 | ...compat.extends('./.config/.eslintrc'), 19 | { 20 | rules: { 21 | 'deprecation/deprecation': 'off', 22 | }, 23 | }, 24 | { 25 | files: ['src/**/*.{ts,tsx,js,jsx}'], 26 | rules: { 27 | '@typescript-eslint/no-deprecated': 'warn', 28 | }, 29 | }, 30 | ]; 31 | -------------------------------------------------------------------------------- /jest-setup.js: -------------------------------------------------------------------------------- 1 | // Jest setup provided by Grafana scaffolding 2 | import './.config/jest-setup'; 3 | import * as crypto from 'crypto'; 4 | 5 | Object.defineProperty(global, 'crypto', { 6 | value: { 7 | getRandomValues: (arr) => crypto.randomBytes(arr.length), 8 | subtle: crypto.webcrypto.subtle, 9 | }, 10 | }); 11 | 12 | Object.defineProperty(global, 'matchMedia', { 13 | writable: true, 14 | value: jest.fn().mockImplementation((query) => ({ 15 | matches: false, 16 | media: query, 17 | onchange: null, 18 | addListener: jest.fn(), // deprecated 19 | removeListener: jest.fn(), // deprecated 20 | addEventListener: jest.fn(), 21 | removeEventListener: jest.fn(), 22 | dispatchEvent: jest.fn(), 23 | })), 24 | }); 25 | 26 | // Used by LinkButton -> Text component from grafana/ui 27 | Object.defineProperty(global, 'ResizeObserver', { 28 | value: class ResizeObserver { 29 | //callback: ResizeObserverCallback; 30 | 31 | constructor(callback) { 32 | setTimeout(() => { 33 | callback( 34 | [ 35 | { 36 | contentRect: { 37 | x: 1, 38 | y: 2, 39 | width: 500, 40 | height: 500, 41 | top: 100, 42 | bottom: 0, 43 | left: 100, 44 | right: 0, 45 | }, 46 | target: {}, 47 | }, 48 | ], 49 | this 50 | ); 51 | }); 52 | } 53 | observe() {} 54 | disconnect() {} 55 | unobserve() {} 56 | }, 57 | }); 58 | 59 | Object.defineProperty(global, 'IntersectionObserver', { 60 | value: jest.fn(() => ({ 61 | observe: jest.fn(), 62 | unobserve: jest.fn(), 63 | disconnect: jest.fn(), 64 | takeRecords: jest.fn(), 65 | root: null, 66 | rootMargin: '', 67 | thresholds: [], 68 | })), 69 | }); 70 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | // force timezone to UTC to allow tests to work regardless of local timezone 2 | // generally used by snapshots, but can affect specific tests 3 | process.env.TZ = 'UTC'; 4 | 5 | module.exports = { 6 | // Jest configuration provided by Grafana scaffolding 7 | ...require('./.config/jest.config'), 8 | }; 9 | -------------------------------------------------------------------------------- /lefthook.yml: -------------------------------------------------------------------------------- 1 | pre-commit: 2 | parallel: true 3 | commands: 4 | frontend-lint: 5 | glob: '*.{js,ts,tsx}' 6 | run: | 7 | eslint --cache --ignore-path ./.gitignore --fix {staged_files} 8 | yarn prettier --write {staged_files} 9 | stage_fixed: true 10 | backend-format: 11 | glob: '*pkg/**/*.go' 12 | run: gofmt -w -s {staged_files} 13 | stage_fixed: true 14 | spellcheck: 15 | run: yarn cspell -c cspell.config.json --no-must-find-files \"**/*.{ts,tsx,js,go,md,mdx,yml,yaml,json,scss,css}\" {staged_files} 16 | -------------------------------------------------------------------------------- /pkg/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "os" 5 | 6 | "github.com/grafana/grafana-plugin-sdk-go/backend/datasource" 7 | "github.com/grafana/grafana-plugin-sdk-go/backend/log" 8 | "github.com/grafana/opensearch-datasource/pkg/opensearch" 9 | ) 10 | 11 | func main() { 12 | // Start listening to requests send from Grafana. This call is blocking so 13 | // it wont finish until Grafana shuts down the process or the plugin choose 14 | // to exit close down by itself 15 | err := datasource.Manage("grafana-opensearch-datasource", opensearch.NewOpenSearchDatasource, datasource.ManageOpts{}) 16 | 17 | // Log any error if we could not start the plugin. 18 | if err != nil { 19 | log.DefaultLogger.Error(err.Error()) 20 | os.Exit(1) 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /pkg/null/float.go: -------------------------------------------------------------------------------- 1 | package null 2 | 3 | import ( 4 | "database/sql" 5 | ) 6 | 7 | // Float is a nullable float64. 8 | // It does not consider zero values to be null. 9 | // It will decode to null, not zero, if null. 10 | type Float struct { 11 | sql.NullFloat64 12 | } 13 | 14 | // NewFloat creates a new Float 15 | func NewFloat(f float64, valid bool) Float { 16 | return Float{ 17 | NullFloat64: sql.NullFloat64{ 18 | Float64: f, 19 | Valid: valid, 20 | }, 21 | } 22 | } 23 | 24 | // FloatFrom creates a new Float that will always be valid. 25 | func FloatFrom(f float64) Float { 26 | return NewFloat(f, true) 27 | } 28 | 29 | // FloatFromPtr creates a new Float that be null if f is nil. 30 | func FloatFromPtr(f *float64) Float { 31 | if f == nil { 32 | return NewFloat(0, false) 33 | } 34 | return NewFloat(*f, true) 35 | } 36 | -------------------------------------------------------------------------------- /pkg/opensearch/client/ppl_request.go: -------------------------------------------------------------------------------- 1 | package client 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | ) 7 | 8 | // PPLRequestBuilder represents a PPL request builder 9 | type PPLRequestBuilder struct { 10 | index string 11 | pplQuery string 12 | } 13 | 14 | // NewPPLRequestBuilder create a new PPL request builder 15 | func NewPPLRequestBuilder(index string) *PPLRequestBuilder { 16 | builder := &PPLRequestBuilder{ 17 | index: index, 18 | } 19 | return builder 20 | } 21 | 22 | // Build builds and return a PPL query object 23 | func (b *PPLRequestBuilder) Build() (*PPLRequest, error) { 24 | return &PPLRequest{ 25 | Query: b.pplQuery, 26 | }, nil 27 | } 28 | 29 | // AddPPLQueryString adds a new PPL query string with time range filter 30 | func (b *PPLRequestBuilder) AddPPLQueryString(timeField, to, from, querystring string) *PPLRequestBuilder { 31 | var res []string 32 | timeFilter := fmt.Sprintf(" where `%s` >= timestamp('%s') and `%s` <= timestamp('%s')", timeField, from, timeField, to) 33 | 34 | trimmedQuerystring := strings.TrimSpace(querystring) 35 | // Sets a default query if the query string is empty 36 | if len(trimmedQuerystring) == 0 { 37 | querystring = fmt.Sprintf("source = %s", b.index) 38 | } 39 | 40 | // Set a source index if the query string is not empty, but includes ad-hoc filters. 41 | if strings.HasPrefix(trimmedQuerystring, "| where `") { 42 | querystring = fmt.Sprintf("source = %s %s", b.index, trimmedQuerystring) 43 | } 44 | 45 | // Time range filter always come right after the source=[index] 46 | querySplit := strings.SplitN(querystring, "|", 2) 47 | if len(querySplit) == 1 { 48 | res = []string{strings.TrimSpace(querySplit[0]), timeFilter} 49 | } else { 50 | res = []string{strings.TrimSpace(querySplit[0]), timeFilter, querySplit[1]} 51 | } 52 | b.pplQuery = strings.Join(res, " |") 53 | return b 54 | } 55 | -------------------------------------------------------------------------------- /pkg/opensearch/ppl_handler.go: -------------------------------------------------------------------------------- 1 | package opensearch 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "github.com/grafana/grafana-plugin-sdk-go/backend" 7 | "github.com/grafana/grafana-plugin-sdk-go/experimental/errorsource" 8 | "github.com/grafana/opensearch-datasource/pkg/opensearch/client" 9 | ) 10 | 11 | type pplHandler struct { 12 | client client.Client 13 | reqQueries []backend.DataQuery 14 | builders map[string]*client.PPLRequestBuilder 15 | queries map[string]*Query 16 | } 17 | 18 | func newPPLHandler(openSearchClient client.Client, queries []backend.DataQuery) *pplHandler { 19 | return &pplHandler{ 20 | client: openSearchClient, 21 | reqQueries: queries, 22 | builders: make(map[string]*client.PPLRequestBuilder), 23 | queries: make(map[string]*Query), 24 | } 25 | } 26 | 27 | func (h *pplHandler) processQuery(q *Query) error { 28 | from := h.reqQueries[0].TimeRange.From.UTC().Format("2006-01-02 15:04:05") 29 | to := h.reqQueries[0].TimeRange.To.UTC().Format("2006-01-02 15:04:05") 30 | 31 | builder := h.client.PPL() 32 | builder.AddPPLQueryString(h.client.GetConfiguredFields().TimeField, to, from, q.RawQuery) 33 | h.builders[q.RefID] = builder 34 | h.queries[q.RefID] = q 35 | return nil 36 | } 37 | 38 | func (h *pplHandler) executeQueries(ctx context.Context) (*backend.QueryDataResponse, error) { 39 | result := backend.NewQueryDataResponse() 40 | 41 | for refID, builder := range h.builders { 42 | req, err := builder.Build() 43 | if err != nil { 44 | return errorsource.AddPluginErrorToResponse(refID, result, err), nil 45 | } 46 | res, err := h.client.ExecutePPLQuery(ctx, req) 47 | if err != nil { 48 | if backend.IsDownstreamHTTPError(err) { 49 | err = errorsource.DownstreamError(err, false) 50 | } 51 | return errorsource.AddErrorToResponse(refID, result, err), nil 52 | } 53 | if res.Status >= 400 { 54 | details := "(no details)" 55 | if res.Error["reason"] != "" && res.Error["details"] != "" { 56 | details = fmt.Sprintf("%v, %v", res.Error["reason"], res.Error["details"]) 57 | } 58 | err = fmt.Errorf("ExecutePPLQuery received unexpected status code %d: %s", res.Status, details) 59 | if backend.ErrorSourceFromHTTPStatus(res.Status) == backend.ErrorSourceDownstream { 60 | err = backend.DownstreamError(err) 61 | } else { 62 | err = backend.PluginError(err) 63 | } 64 | return &backend.QueryDataResponse{ 65 | Responses: backend.Responses{ 66 | refID: backend.ErrorResponseWithErrorSource(err), 67 | }, 68 | }, nil 69 | } 70 | 71 | query := h.queries[refID] 72 | rp := newPPLResponseParser(res) 73 | queryRes, err := rp.parseResponse(h.client.GetConfiguredFields(), query.Format) 74 | if err != nil { 75 | return nil, err 76 | } 77 | result.Responses[refID] = *queryRes 78 | } 79 | return result, nil 80 | } 81 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/helpers_test.go: -------------------------------------------------------------------------------- 1 | package snapshot_tests 2 | 3 | import ( 4 | "bytes" 5 | "encoding/json" 6 | "fmt" 7 | "io" 8 | "net/http" 9 | "os" 10 | "testing" 11 | "time" 12 | 13 | "github.com/grafana/grafana-plugin-sdk-go/backend" 14 | "github.com/stretchr/testify/require" 15 | ) 16 | 17 | type queryDataTestRoundTripper struct { 18 | requestCallback func(req *http.Request) error 19 | body []byte 20 | statusCode int 21 | } 22 | 23 | // we fake the http-request-call. we return a fixed byte-array (defined by the test snapshot), 24 | // and we also check if the http-request-object has the correct data 25 | func (rt *queryDataTestRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { 26 | err := rt.requestCallback(req) 27 | if err != nil { 28 | return nil, err 29 | } 30 | 31 | return &http.Response{ 32 | StatusCode: rt.statusCode, 33 | Header: http.Header{}, 34 | Body: io.NopCloser(bytes.NewReader(rt.body)), 35 | }, nil 36 | } 37 | 38 | func newTestDsSettings() *backend.DataSourceInstanceSettings { 39 | return &backend.DataSourceInstanceSettings{ 40 | JSONData: json.RawMessage(`{ 41 | "database":"opensearch_dashboards_sample_data_flights", 42 | "flavor":"opensearch", 43 | "pplEnabled":true, 44 | "version":"2.3.0", 45 | "timeField":"timestamp", 46 | "interval":"Daily", 47 | "timeInterval":"1s", 48 | "maxConcurrentShardRequests":42 49 | }`), 50 | Database: "[testdb-]YYYY.MM.DD", 51 | URL: "http://localhost:9200", 52 | } 53 | } 54 | 55 | func setUpDataQueriesFromFileWithFixedTimeRange(t *testing.T, fileName string) ([]backend.DataQuery, error) { 56 | t.Helper() 57 | queriesBytes, err := os.ReadFile(fileName) 58 | require.NoError(t, err) 59 | 60 | var jsonBytesArray []json.RawMessage 61 | if err := json.Unmarshal(queriesBytes, &jsonBytesArray); err != nil { 62 | return nil, fmt.Errorf("error unmarshaling queriesBytes: %w", err) 63 | } 64 | var queries []backend.DataQuery 65 | for _, jsonBytes := range jsonBytesArray { 66 | var query = backend.DataQuery{ 67 | TimeRange: backend.TimeRange{ 68 | From: time.UnixMilli(1668422437218), 69 | To: time.UnixMilli(1668422625668), 70 | }, 71 | JSON: jsonBytes, 72 | } 73 | if err := json.Unmarshal(jsonBytes, &query); err != nil { 74 | return nil, err 75 | } 76 | 77 | queries = append(queries, query) 78 | } 79 | return queries, nil 80 | } 81 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/lucene_raw_data_test.go: -------------------------------------------------------------------------------- 1 | package snapshot_tests 2 | 3 | import ( 4 | "context" 5 | "io" 6 | "net/http" 7 | "os" 8 | "testing" 9 | 10 | "github.com/grafana/grafana-plugin-sdk-go/backend" 11 | "github.com/grafana/grafana-plugin-sdk-go/experimental" 12 | "github.com/grafana/opensearch-datasource/pkg/opensearch" 13 | "github.com/stretchr/testify/assert" 14 | "github.com/stretchr/testify/require" 15 | ) 16 | 17 | func Test_raw_data_request(t *testing.T) { 18 | queries, err := setUpDataQueriesFromFileWithFixedTimeRange(t, "testdata/lucene_raw_data.query_input.json") 19 | require.NoError(t, err) 20 | var interceptedRequest []byte 21 | openSearchDatasource := opensearch.OpenSearchDatasource{ 22 | HttpClient: &http.Client{ 23 | // we don't assert the response in this test 24 | Transport: &queryDataTestRoundTripper{body: []byte(`{"responses":[]}`), statusCode: 200, requestCallback: func(req *http.Request) error { 25 | interceptedRequest, err = io.ReadAll(req.Body) 26 | if err != nil { 27 | return err 28 | } 29 | defer func() { 30 | if err := req.Body.Close(); err != nil { 31 | t.Errorf("failed to close request body: %v", err) 32 | } 33 | }() 34 | return nil 35 | }}, 36 | }, 37 | } 38 | 39 | _, err = openSearchDatasource.QueryData(context.Background(), &backend.QueryDataRequest{ 40 | PluginContext: backend.PluginContext{DataSourceInstanceSettings: newTestDsSettings()}, 41 | Headers: nil, 42 | Queries: queries, 43 | }) 44 | require.NoError(t, err) 45 | 46 | // assert request's header and query 47 | expectedRequest := `{"ignore_unavailable":true,"index":"","search_type":"query_then_fetch"} 48 | {"fields":[{"field":"timestamp","format":"strict_date_optional_time_nanos"}],"query":{"bool":{"filter":[{"range":{"timestamp":{"format":"epoch_millis","gte":1668422437218,"lte":1668422625668}}},{"query_string":{"analyze_wildcard":true,"query":"FlightNum:*M"}}]}},"size":1337,"sort":[{"timestamp":{"order":"desc","unmapped_type":"boolean"}},{"_doc":{"order":"desc"}}]} 49 | ` 50 | assert.Equal(t, expectedRequest, string(interceptedRequest)) 51 | } 52 | 53 | func Test_raw_data_response(t *testing.T) { 54 | responseFromOpenSearch, err := os.ReadFile("testdata/lucene_raw_data.response_from_opensearch.json") 55 | require.NoError(t, err) 56 | queries, err := setUpDataQueriesFromFileWithFixedTimeRange(t, "testdata/lucene_raw_data.query_input.json") 57 | require.NoError(t, err) 58 | openSearchDatasource := opensearch.OpenSearchDatasource{ 59 | HttpClient: &http.Client{ 60 | Transport: &queryDataTestRoundTripper{body: responseFromOpenSearch, statusCode: 200, requestCallback: func(req *http.Request) error { return nil }}, 61 | }, 62 | } 63 | 64 | result, err := openSearchDatasource.QueryData(context.Background(), &backend.QueryDataRequest{ 65 | PluginContext: backend.PluginContext{DataSourceInstanceSettings: newTestDsSettings()}, 66 | Headers: nil, 67 | Queries: queries, 68 | }) 69 | require.NoError(t, err) 70 | 71 | responseForRefIdA, ok := result.Responses["A"] 72 | assert.True(t, ok) 73 | experimental.CheckGoldenJSONResponse(t, "testdata", "lucene_raw_data.expected_result_generated_snapshot.golden", &responseForRefIdA, false) 74 | } 75 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/lucene_raw_document_test.go: -------------------------------------------------------------------------------- 1 | package snapshot_tests 2 | 3 | import ( 4 | "context" 5 | "io" 6 | "net/http" 7 | "os" 8 | "testing" 9 | 10 | "github.com/grafana/grafana-plugin-sdk-go/backend" 11 | "github.com/grafana/grafana-plugin-sdk-go/experimental" 12 | "github.com/grafana/opensearch-datasource/pkg/opensearch" 13 | "github.com/stretchr/testify/assert" 14 | "github.com/stretchr/testify/require" 15 | ) 16 | 17 | func Test_raw_document_request(t *testing.T) { 18 | queries, err := setUpDataQueriesFromFileWithFixedTimeRange(t, "testdata/lucene_raw_document.query_input.json") 19 | require.NoError(t, err) 20 | var interceptedRequest []byte 21 | openSearchDatasource := opensearch.OpenSearchDatasource{ 22 | HttpClient: &http.Client{ 23 | // we don't assert the response in this test 24 | Transport: &queryDataTestRoundTripper{body: []byte(`{"responses":[]}`), statusCode: 200, requestCallback: func(req *http.Request) error { 25 | interceptedRequest, err = io.ReadAll(req.Body) 26 | if err != nil { 27 | return err 28 | } 29 | defer func() { 30 | if err := req.Body.Close(); err != nil { 31 | t.Errorf("failed to close request body: %v", err) 32 | } 33 | }() 34 | return nil 35 | }}, 36 | }, 37 | } 38 | 39 | _, err = openSearchDatasource.QueryData(context.Background(), &backend.QueryDataRequest{ 40 | PluginContext: backend.PluginContext{DataSourceInstanceSettings: newTestDsSettings()}, 41 | Headers: nil, 42 | Queries: queries, 43 | }) 44 | require.NoError(t, err) 45 | 46 | // assert request's header and query 47 | expectedRequest := `{"ignore_unavailable":true,"index":"","search_type":"query_then_fetch"} 48 | {"fields":[{"field":"timestamp","format":"strict_date_optional_time_nanos"}],"query":{"bool":{"filter":[{"range":{"timestamp":{"format":"epoch_millis","gte":1668422437218,"lte":1668422625668}}},{"query_string":{"analyze_wildcard":true,"query":"FlightNum:*M"}}]}},"size":480,"sort":[{"timestamp":{"order":"asc","unmapped_type":"boolean"}},{"_doc":{"order":"asc"}}]} 49 | ` 50 | assert.Equal(t, expectedRequest, string(interceptedRequest)) 51 | } 52 | 53 | func Test_raw_document_response(t *testing.T) { 54 | responseFromOpenSearch, err := os.ReadFile("testdata/lucene_raw_document.response_from_opensearch.json") 55 | require.NoError(t, err) 56 | queries, err := setUpDataQueriesFromFileWithFixedTimeRange(t, "testdata/lucene_raw_document.query_input.json") 57 | require.NoError(t, err) 58 | openSearchDatasource := opensearch.OpenSearchDatasource{ 59 | HttpClient: &http.Client{ 60 | Transport: &queryDataTestRoundTripper{body: responseFromOpenSearch, statusCode: 200, requestCallback: func(req *http.Request) error { return nil }}, 61 | }, 62 | } 63 | 64 | result, err := openSearchDatasource.QueryData(context.Background(), &backend.QueryDataRequest{ 65 | PluginContext: backend.PluginContext{DataSourceInstanceSettings: newTestDsSettings()}, 66 | Headers: nil, 67 | Queries: queries, 68 | }) 69 | require.NoError(t, err) 70 | 71 | responseForRefIdA, ok := result.Responses["A"] 72 | assert.True(t, ok) 73 | experimental.CheckGoldenJSONResponse(t, "testdata", "lucene_raw_document.expected_result_generated_snapshot.golden", &responseForRefIdA, false) 74 | } 75 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/ppl_logs_test.go: -------------------------------------------------------------------------------- 1 | package snapshot_tests 2 | 3 | import ( 4 | "context" 5 | "io" 6 | "net/http" 7 | "os" 8 | "testing" 9 | 10 | "github.com/grafana/grafana-plugin-sdk-go/backend" 11 | "github.com/grafana/grafana-plugin-sdk-go/experimental" 12 | "github.com/grafana/opensearch-datasource/pkg/opensearch" 13 | "github.com/stretchr/testify/assert" 14 | "github.com/stretchr/testify/require" 15 | ) 16 | 17 | func Test_ppl_logs_request(t *testing.T) { 18 | queries, err := setUpDataQueriesFromFileWithFixedTimeRange(t, "testdata/ppl_logs.query_input.json") 19 | require.NoError(t, err) 20 | var interceptedRequest []byte 21 | openSearchDatasource := opensearch.OpenSearchDatasource{ 22 | HttpClient: &http.Client{ 23 | // we don't assert the response in this test 24 | Transport: &queryDataTestRoundTripper{body: []byte(`{"responses":[]}`), statusCode: 200, requestCallback: func(req *http.Request) error { 25 | interceptedRequest, err = io.ReadAll(req.Body) 26 | if err != nil { 27 | return err 28 | } 29 | defer func() { 30 | if err := req.Body.Close(); err != nil { 31 | t.Errorf("failed to close request body: %v", err) 32 | } 33 | }() 34 | return nil 35 | }}, 36 | }, 37 | } 38 | 39 | _, err = openSearchDatasource.QueryData(context.Background(), &backend.QueryDataRequest{ 40 | PluginContext: backend.PluginContext{DataSourceInstanceSettings: newTestDsSettings()}, 41 | Headers: nil, 42 | Queries: queries, 43 | }) 44 | require.NoError(t, err) 45 | 46 | // assert request's header and query 47 | expectedRequest := 48 | `{"query":"source = opensearch_dashboards_sample_data_logs | where` + " `timestamp` " + `>= timestamp('2022-11-14 10:40:37') and` + " `timestamp` " + `<= timestamp('2022-11-14 10:43:45') | where geo.src = \"US\""} 49 | ` 50 | assert.Equal(t, expectedRequest, string(interceptedRequest)) 51 | } 52 | 53 | func Test_ppl_logs_response(t *testing.T) { 54 | responseFromOpenSearch, err := os.ReadFile("testdata/ppl_logs.response_from_opensearch.json") 55 | require.NoError(t, err) 56 | queries, err := setUpDataQueriesFromFileWithFixedTimeRange(t, "testdata/ppl_logs.query_input.json") 57 | require.NoError(t, err) 58 | openSearchDatasource := opensearch.OpenSearchDatasource{ 59 | HttpClient: &http.Client{ 60 | Transport: &queryDataTestRoundTripper{body: responseFromOpenSearch, statusCode: 200, requestCallback: func(req *http.Request) error { return nil }}, 61 | }, 62 | } 63 | 64 | result, err := openSearchDatasource.QueryData(context.Background(), &backend.QueryDataRequest{ 65 | PluginContext: backend.PluginContext{DataSourceInstanceSettings: newTestDsSettings()}, 66 | Headers: nil, 67 | Queries: queries, 68 | }) 69 | require.NoError(t, err) 70 | 71 | responseForRefIdA, ok := result.Responses["A"] 72 | assert.True(t, ok) 73 | experimental.CheckGoldenJSONResponse(t, "testdata", "ppl_logs.expected_result_generated_snapshot.golden", &responseForRefIdA, false) 74 | } 75 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/ppl_table_test.go: -------------------------------------------------------------------------------- 1 | package snapshot_tests 2 | 3 | import ( 4 | "context" 5 | "io" 6 | "net/http" 7 | "os" 8 | "testing" 9 | 10 | "github.com/grafana/grafana-plugin-sdk-go/backend" 11 | "github.com/grafana/grafana-plugin-sdk-go/experimental" 12 | "github.com/grafana/opensearch-datasource/pkg/opensearch" 13 | "github.com/stretchr/testify/assert" 14 | "github.com/stretchr/testify/require" 15 | ) 16 | 17 | func Test_ppl_table_request(t *testing.T) { 18 | queries, err := setUpDataQueriesFromFileWithFixedTimeRange(t, "testdata/ppl_table.query_input.json") 19 | require.NoError(t, err) 20 | var interceptedRequest []byte 21 | openSearchDatasource := opensearch.OpenSearchDatasource{ 22 | HttpClient: &http.Client{ 23 | // we don't assert the response in this test 24 | Transport: &queryDataTestRoundTripper{body: []byte(`{"responses":[]}`), statusCode: 200, requestCallback: func(req *http.Request) error { 25 | interceptedRequest, err = io.ReadAll(req.Body) 26 | if err != nil { 27 | return err 28 | } 29 | defer func() { 30 | if err := req.Body.Close(); err != nil { 31 | t.Errorf("failed to close request body: %v", err) 32 | } 33 | }() 34 | return nil 35 | }}, 36 | }, 37 | } 38 | 39 | _, err = openSearchDatasource.QueryData(context.Background(), &backend.QueryDataRequest{ 40 | PluginContext: backend.PluginContext{DataSourceInstanceSettings: newTestDsSettings()}, 41 | Headers: nil, 42 | Queries: queries, 43 | }) 44 | require.NoError(t, err) 45 | 46 | // assert request's header and query 47 | expectedRequest := 48 | `{"query":"search source=opensearch_dashboards_sample_data_flights | where` + " `timestamp` " + `>= timestamp('2022-11-14 10:40:37') and` + " `timestamp` " + `<= timestamp('2022-11-14 10:43:45') | where AvgTicketPrice > 1150 | where FlightDelay = true "} 49 | ` 50 | assert.Equal(t, expectedRequest, string(interceptedRequest)) 51 | } 52 | 53 | func Test_ppl_table_response(t *testing.T) { 54 | responseFromOpenSearch, err := os.ReadFile("testdata/ppl_table.response_from_opensearch.json") 55 | require.NoError(t, err) 56 | queries, err := setUpDataQueriesFromFileWithFixedTimeRange(t, "testdata/ppl_table.query_input.json") 57 | require.NoError(t, err) 58 | openSearchDatasource := opensearch.OpenSearchDatasource{ 59 | HttpClient: &http.Client{ 60 | Transport: &queryDataTestRoundTripper{body: responseFromOpenSearch, statusCode: 200, requestCallback: func(req *http.Request) error { return nil }}, 61 | }, 62 | } 63 | 64 | result, err := openSearchDatasource.QueryData(context.Background(), &backend.QueryDataRequest{ 65 | PluginContext: backend.PluginContext{DataSourceInstanceSettings: newTestDsSettings()}, 66 | Headers: nil, 67 | Queries: queries, 68 | }) 69 | require.NoError(t, err) 70 | 71 | responseForRefIdA, ok := result.Responses["A"] 72 | assert.True(t, ok) 73 | experimental.CheckGoldenJSONResponse(t, "testdata", "ppl_table.expected_result_generated_snapshot.golden", &responseForRefIdA, false) 74 | } 75 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_logs.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "datasource": { 4 | "type": "grafana-opensearch-datasource", 5 | "uid": "PE50363A9B6833EE7" 6 | }, 7 | "alias": "", 8 | "bucketAggs": [ 9 | { 10 | "field": "agent.keyword", 11 | "id": "2", 12 | "settings": { 13 | "interval": "1000", 14 | "min_doc_count": "15" 15 | }, 16 | "type": "histogram" 17 | } 18 | ], 19 | "format": "table", 20 | "luceneQueryType": "Metric", 21 | "metrics": [ 22 | { 23 | "id": "1", 24 | "type": "logs" 25 | } 26 | ], 27 | "query": "FlightDelayType:\"Carrier Delay\" AND Carrier:Open*", 28 | "queryType": "lucene", 29 | "refId": "A", 30 | "timeField": "timestamp", 31 | "datasourceId": 4536, 32 | "intervalMs": 1200000, 33 | "maxDataPoints": 1238 34 | } 35 | ] -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_metric_average_derivative_group_by_date_histogram.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "", 4 | "bucketAggs": [ 5 | { 6 | "field": "timestamp", 7 | "id": "2", 8 | "settings": { 9 | "interval": "1d" 10 | }, 11 | "type": "date_histogram" 12 | } 13 | ], 14 | "datasource": { 15 | "type": "grafana-opensearch-datasource", 16 | "uid": "cdba6469-80b4-49c5-84ad-d236738e8b91" 17 | }, 18 | "format": "table", 19 | "metrics": [ 20 | { 21 | "field": "AvgTicketPrice", 22 | "id": "1", 23 | "type": "avg" 24 | }, 25 | { 26 | "field": "1", 27 | "id": "3", 28 | "type": "derivative" 29 | } 30 | ], 31 | "query": "*", 32 | "queryType": "lucene", 33 | "refId": "A", 34 | "timeField": "timestamp", 35 | "datasourceId": 2397, 36 | "intervalMs": 1200000, 37 | "maxDataPoints": 1210 38 | } 39 | ] -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_metric_max_group_by_terms.expected_result_generated_snapshot.golden.jsonc: -------------------------------------------------------------------------------- 1 | // 🌟 This was machine generated. Do not edit. 🌟 2 | // 3 | // Frame[0] 4 | // Name: 5 | // Dimensions: 2 Fields by 10 Rows 6 | // +----------------------+------------------+ 7 | // | Name: AvgTicketPrice | Name: Max | 8 | // | Labels: | Labels: | 9 | // | Type: []*float64 | Type: []*float64 | 10 | // +----------------------+------------------+ 11 | // | 1199.72900390625 | null | 12 | // | 1199.642822265625 | null | 13 | // | 1199.5123291015625 | null | 14 | // | 1199.4913330078125 | null | 15 | // | 1199.4034423828125 | null | 16 | // | 1199.109130859375 | 1199 | 17 | // | 1198.8525390625 | null | 18 | // | 1198.62158203125 | null | 19 | // | 1198.4901123046875 | null | 20 | // | 1197.78564453125 | null | 21 | // +----------------------+------------------+ 22 | // 23 | // 24 | // 🌟 This was machine generated. Do not edit. 🌟 25 | { 26 | "status": 200, 27 | "frames": [ 28 | { 29 | "schema": { 30 | "fields": [ 31 | { 32 | "name": "AvgTicketPrice", 33 | "type": "number", 34 | "typeInfo": { 35 | "frame": "float64", 36 | "nullable": true 37 | }, 38 | "config": { 39 | "filterable": true 40 | } 41 | }, 42 | { 43 | "name": "Max", 44 | "type": "number", 45 | "typeInfo": { 46 | "frame": "float64", 47 | "nullable": true 48 | } 49 | } 50 | ] 51 | }, 52 | "data": { 53 | "values": [ 54 | [ 55 | 1199.72900390625, 56 | 1199.642822265625, 57 | 1199.5123291015625, 58 | 1199.4913330078125, 59 | 1199.4034423828125, 60 | 1199.109130859375, 61 | 1198.8525390625, 62 | 1198.62158203125, 63 | 1198.4901123046875, 64 | 1197.78564453125 65 | ], 66 | [ 67 | null, 68 | null, 69 | null, 70 | null, 71 | null, 72 | 1199, 73 | null, 74 | null, 75 | null, 76 | null 77 | ] 78 | ] 79 | } 80 | } 81 | ] 82 | } -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_metric_max_group_by_terms.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "", 4 | "bucketAggs": [ 5 | { 6 | "field": "AvgTicketPrice", 7 | "id": "2", 8 | "settings": { 9 | "min_doc_count": "0", 10 | "order": "desc", 11 | "orderBy": "_term", 12 | "size": "10" 13 | }, 14 | "type": "terms" 15 | } 16 | ], 17 | "datasource": { 18 | "type": "grafana-opensearch-datasource", 19 | "uid": "cdba6469-80b4-49c5-84ad-d236738e8b91" 20 | }, 21 | "format": "table", 22 | "metrics": [ 23 | { 24 | "field": "AvgTicketPrice", 25 | "id": "1", 26 | "type": "max" 27 | } 28 | ], 29 | "query": "*", 30 | "queryType": "lucene", 31 | "refId": "A", 32 | "timeField": "timestamp", 33 | "datasourceId": 2397, 34 | "intervalMs": 1800000, 35 | "maxDataPoints": 747 36 | } 37 | ] -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_metric_percentiles_group_by_terms_orderby_percentiles.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "refId": "A", 4 | "datasource": { 5 | "type": "grafana-opensearch-datasource", 6 | "uid": "c338efe8-de17-465c-ba45-9d1fdfca372c" 7 | }, 8 | "query": "*", 9 | "queryType": "lucene", 10 | "luceneQueryType": "Metric", 11 | "alias": "", 12 | "metrics": [ 13 | { 14 | "id": "1", 15 | "type": "percentiles", 16 | "settings": { 17 | "percents": ["50"] 18 | }, 19 | "field": "AvgTicketPrice" 20 | } 21 | ], 22 | "bucketAggs": [ 23 | { 24 | "id": "3", 25 | "type": "terms", 26 | "settings": { 27 | "min_doc_count": "1", 28 | "size": "10", 29 | "order": "desc", 30 | "orderBy": "1[50.0]" 31 | }, 32 | "field": "dayOfWeek" 33 | }, 34 | { 35 | "type": "date_histogram", 36 | "id": "2", 37 | "settings": { 38 | "interval": "auto" 39 | }, 40 | "field": "timestamp" 41 | } 42 | ], 43 | "format": "table", 44 | "timeField": "timestamp", 45 | "datasourceId": 11108, 46 | "intervalMs": 900000, 47 | "maxDataPoints": 788 48 | } 49 | ] 50 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_metric_sum_group_by_date_histogram.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "distanceKms", 4 | "bucketAggs": [ 5 | { 6 | "field": "timestamp", 7 | "id": "2", 8 | "settings": { 9 | "interval": "auto", 10 | "min_doc_count": "0", 11 | "trimEdges": "0" 12 | }, 13 | "type": "date_histogram" 14 | } 15 | ], 16 | "datasource": { 17 | "type": "grafana-opensearch-datasource", 18 | "uid": "cdba6469-80b4-49c5-84ad-d236738e8b91" 19 | }, 20 | "format": "table", 21 | "hide": false, 22 | "metrics": [ 23 | { 24 | "field": "DistanceKilometers", 25 | "id": "1", 26 | "type": "sum" 27 | } 28 | ], 29 | "query": "*", 30 | "queryType": "lucene", 31 | "refId": "A", 32 | "timeField": "timestamp", 33 | "datasourceId": 2397, 34 | "intervalMs": 1800000, 35 | "maxDataPoints": 950 36 | } 37 | ] -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_metric_sum_group_by_histogram.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "distanceKms", 4 | "bucketAggs": [ 5 | { 6 | "field": "timestamp", 7 | "id": "2", 8 | "settings": { 9 | "interval": "500", 10 | "min_doc_count": "0" 11 | }, 12 | "type": "histogram" 13 | } 14 | ], 15 | "datasource": { 16 | "type": "grafana-opensearch-datasource", 17 | "uid": "cdba6469-80b4-49c5-84ad-d236738e8b91" 18 | }, 19 | "format": "table", 20 | "hide": false, 21 | "metrics": [ 22 | { 23 | "field": "DistanceKilometers", 24 | "id": "1", 25 | "type": "sum" 26 | } 27 | ], 28 | "query": "*", 29 | "queryType": "lucene", 30 | "refId": "A", 31 | "timeField": "timestamp", 32 | "datasourceId": 2397, 33 | "intervalMs": 1800000, 34 | "maxDataPoints": 950 35 | } 36 | ] 37 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_metric_sum_group_by_histogram_decimal_interval.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "distanceKms", 4 | "bucketAggs": [ 5 | { 6 | "field": "timestamp", 7 | "id": "2", 8 | "settings": { 9 | "interval": "5.5", 10 | "min_doc_count": "0" 11 | }, 12 | "type": "histogram" 13 | } 14 | ], 15 | "datasource": { 16 | "type": "grafana-opensearch-datasource", 17 | "uid": "cdba6469-80b4-49c5-84ad-d236738e8b91" 18 | }, 19 | "format": "table", 20 | "hide": false, 21 | "metrics": [ 22 | { 23 | "field": "DistanceKilometers", 24 | "id": "1", 25 | "type": "sum" 26 | } 27 | ], 28 | "query": "*", 29 | "queryType": "lucene", 30 | "refId": "A", 31 | "timeField": "timestamp", 32 | "datasourceId": 2397, 33 | "intervalMs": 1800000, 34 | "maxDataPoints": 950 35 | } 36 | ] 37 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_metric_sum_group_by_histogram_invalid_interval.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "distanceKms", 4 | "bucketAggs": [ 5 | { 6 | "field": "timestamp", 7 | "id": "2", 8 | "settings": { 9 | "interval": "-10", 10 | "min_doc_count": "0" 11 | }, 12 | "type": "histogram" 13 | } 14 | ], 15 | "datasource": { 16 | "type": "grafana-opensearch-datasource", 17 | "uid": "cdba6469-80b4-49c5-84ad-d236738e8b91" 18 | }, 19 | "format": "table", 20 | "hide": false, 21 | "metrics": [ 22 | { 23 | "field": "DistanceKilometers", 24 | "id": "1", 25 | "type": "sum" 26 | } 27 | ], 28 | "query": "*", 29 | "queryType": "lucene", 30 | "refId": "A", 31 | "timeField": "timestamp", 32 | "datasourceId": 2397, 33 | "intervalMs": 1800000, 34 | "maxDataPoints": 950 35 | } 36 | ] 37 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_raw_data.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias":"", 4 | "bucketAggs":[], 5 | "datasource":{ 6 | "type":"grafana-opensearch-datasource", 7 | "uid":"PE50363A9B6833EE7" 8 | }, 9 | "datasourceId":2397, 10 | "format":"table", 11 | "intervalMs":900000, 12 | "maxDataPoints":1835, 13 | "metrics":[ 14 | { 15 | "id":"1", 16 | "settings":{ 17 | "order":"desc", 18 | "size":"1337", 19 | "useTimeRange":true 20 | }, 21 | "type":"raw_data" 22 | } 23 | ], 24 | "query":"FlightNum:*M", 25 | "queryType":"lucene", 26 | "refId":"A", 27 | "timeField":"timestamp" 28 | } 29 | ] -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_raw_document.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "", 4 | "bucketAggs": [], 5 | "datasource": { 6 | "type": "grafana-opensearch-datasource", 7 | "uid": "cdba6469-80b4-49c5-84ad-d236738e8b91" 8 | }, 9 | "format": "table", 10 | "metrics": [ 11 | { 12 | "id": "1", 13 | "settings": { 14 | "order": "asc", 15 | "size": "480", 16 | "useTimeRange": true 17 | }, 18 | "type": "raw_document" 19 | } 20 | ], 21 | "query": "FlightNum:*M", 22 | "queryType": "lucene", 23 | "refId": "A", 24 | "timeField": "timestamp", 25 | "datasourceId": 2397, 26 | "intervalMs": 1200000, 27 | "maxDataPoints": 1150 28 | } 29 | ] -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_service_map_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "", 4 | "bucketAggs": [ 5 | { 6 | "field": "@timestamp", 7 | "id": "2", 8 | "settings": { 9 | "interval": "auto" 10 | }, 11 | "type": "date_histogram" 12 | } 13 | ], 14 | "datasource": { 15 | "type": "grafana-opensearch-datasource", 16 | "uid": "a2a05fd1-c06c-4008-b469-720fea03add2" 17 | }, 18 | "serviceMap": true, 19 | "luceneQueryType": "Traces", 20 | "metrics": [ 21 | { 22 | "id": "1", 23 | "type": "count" 24 | } 25 | ], 26 | "query": "*", 27 | "queryType": "lucene", 28 | "refId": "A", 29 | "timeField": "@timestamp", 30 | "datasourceId": 2020, 31 | "intervalMs": 10000, 32 | "maxDataPoints": 1124 33 | } 34 | ] 35 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_service_map_input_trace_list.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "", 4 | "bucketAggs": [ 5 | { 6 | "field": "@timestamp", 7 | "id": "2", 8 | "settings": { 9 | "interval": "auto" 10 | }, 11 | "type": "date_histogram" 12 | } 13 | ], 14 | "datasource": { 15 | "type": "grafana-opensearch-datasource", 16 | "uid": "a2a05fd1-c06c-4008-b469-720fea03add2" 17 | }, 18 | "serviceMap": true, 19 | "luceneQueryType": "Traces", 20 | "metrics": [ 21 | { 22 | "id": "1", 23 | "type": "count" 24 | } 25 | ], 26 | "query": "*", 27 | "queryType": "lucene", 28 | "refId": "A", 29 | "timeField": "@timestamp", 30 | "datasourceId": 2020, 31 | "intervalMs": 10000, 32 | "maxDataPoints": 1124 33 | } 34 | ] 35 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_service_map_input_with_trace_id.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "", 4 | "bucketAggs": [ 5 | { 6 | "field": "@timestamp", 7 | "id": "2", 8 | "settings": { 9 | "interval": "auto" 10 | }, 11 | "type": "date_histogram" 12 | } 13 | ], 14 | "datasource": { 15 | "type": "grafana-opensearch-datasource", 16 | "uid": "a2a05fd1-c06c-4008-b469-720fea03add2" 17 | }, 18 | "serviceMap": true, 19 | "luceneQueryType": "Traces", 20 | "metrics": [ 21 | { 22 | "id": "1", 23 | "type": "count" 24 | } 25 | ], 26 | "query": "traceId:some-trace-id", 27 | "queryType": "lucene", 28 | "refId": "A", 29 | "timeField": "@timestamp", 30 | "datasourceId": 2020, 31 | "intervalMs": 10000, 32 | "maxDataPoints": 1124 33 | } 34 | ] 35 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_trace_list.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "", 4 | "bucketAggs": [ 5 | { 6 | "field": "@timestamp", 7 | "id": "2", 8 | "settings": { 9 | "interval": "auto" 10 | }, 11 | "type": "date_histogram" 12 | } 13 | ], 14 | "datasource": { 15 | "type": "grafana-opensearch-datasource", 16 | "uid": "a2a05fd1-c06c-4008-b469-720fea03add2" 17 | }, 18 | "format": "table", 19 | "luceneQueryType": "Traces", 20 | "metrics": [ 21 | { 22 | "id": "1", 23 | "type": "count" 24 | } 25 | ], 26 | "query": "some query", 27 | "queryType": "lucene", 28 | "refId": "A", 29 | "timeField": "@timestamp", 30 | "datasourceId": 2020, 31 | "intervalMs": 10000, 32 | "maxDataPoints": 1124 33 | } 34 | ] 35 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_trace_list.query_input_multiple.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "", 4 | "bucketAggs": [ 5 | { 6 | "field": "@timestamp", 7 | "id": "2", 8 | "settings": { 9 | "interval": "auto" 10 | }, 11 | "type": "date_histogram" 12 | } 13 | ], 14 | "datasource": { 15 | "type": "grafana-opensearch-datasource", 16 | "uid": "a2a05fd1-c06c-4008-b469-720fea03add2" 17 | }, 18 | "format": "table", 19 | "luceneQueryType": "Traces", 20 | "metrics": [ 21 | { 22 | "id": "1", 23 | "type": "count" 24 | } 25 | ], 26 | "query": "some query", 27 | "queryType": "lucene", 28 | "refId": "A", 29 | "timeField": "@timestamp", 30 | "datasourceId": 2020, 31 | "intervalMs": 10000, 32 | "maxDataPoints": 1124 33 | }, 34 | { 35 | "alias": "", 36 | "bucketAggs": [ 37 | { 38 | "field": "@timestamp", 39 | "id": "2", 40 | "settings": { 41 | "interval": "auto" 42 | }, 43 | "type": "date_histogram" 44 | } 45 | ], 46 | "datasource": { 47 | "type": "grafana-opensearch-datasource", 48 | "uid": "a2a05fd1-c06c-4008-b469-720fea03add2" 49 | }, 50 | "format": "table", 51 | "luceneQueryType": "Traces", 52 | "metrics": [ 53 | { 54 | "id": "1", 55 | "type": "count" 56 | } 57 | ], 58 | "query": "some query", 59 | "queryType": "lucene", 60 | "refId": "B", 61 | "timeField": "@timestamp", 62 | "datasourceId": 2020, 63 | "intervalMs": 10000, 64 | "maxDataPoints": 1124 65 | } 66 | ] 67 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_trace_list_and_spans.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "refId": "A", 4 | "datasource": { "type": "grafana-opensearch-datasource", "uid": "aca30e11-e305-46d1-b378-9b29b690bacb" }, 5 | "query": "traceId:test", 6 | "queryType": "lucene", 7 | "alias": "", 8 | "metrics": [{ "type": "count", "id": "1" }], 9 | "bucketAggs": [{ "type": "date_histogram", "id": "2", "settings": { "interval": "auto" }, "field": "@timestamp" }], 10 | "format": "table", 11 | "timeField": "@timestamp", 12 | "luceneQueryType": "Traces", 13 | "datasourceId": 13510, 14 | "intervalMs": 20000, 15 | "maxDataPoints": 1150 16 | }, 17 | { 18 | "alias": "", 19 | "bucketAggs": [ 20 | { 21 | "field": "@timestamp", 22 | "id": "2", 23 | "settings": { 24 | "interval": "auto" 25 | }, 26 | "type": "date_histogram" 27 | } 28 | ], 29 | "datasource": { 30 | "type": "grafana-opensearch-datasource", 31 | "uid": "a2a05fd1-c06c-4008-b469-720fea03add2" 32 | }, 33 | "format": "table", 34 | "luceneQueryType": "Traces", 35 | "metrics": [ 36 | { 37 | "id": "1", 38 | "type": "count" 39 | } 40 | ], 41 | "query": "some query", 42 | "queryType": "lucene", 43 | "refId": "B", 44 | "timeField": "@timestamp", 45 | "datasourceId": 2020, 46 | "intervalMs": 10000, 47 | "maxDataPoints": 1124 48 | } 49 | ] 50 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_trace_spans.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "refId": "A", 4 | "datasource": { "type": "grafana-opensearch-datasource", "uid": "aca30e11-e305-46d1-b378-9b29b690bacb" }, 5 | "query": "traceId:test", 6 | "queryType": "lucene", 7 | "alias": "", 8 | "metrics": [{ "type": "count", "id": "1" }], 9 | "bucketAggs": [{ "type": "date_histogram", "id": "2", "settings": { "interval": "auto" }, "field": "@timestamp" }], 10 | "format": "table", 11 | "timeField": "@timestamp", 12 | "luceneQueryType": "Traces", 13 | "datasourceId": 13510, 14 | "intervalMs": 20000, 15 | "maxDataPoints": 1150 16 | } 17 | ] 18 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/lucene_trace_spans.query_input_multiple.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "refId": "A", 4 | "datasource": { "type": "grafana-opensearch-datasource", "uid": "aca30e11-e305-46d1-b378-9b29b690bacb" }, 5 | "query": "traceId:test", 6 | "queryType": "lucene", 7 | "alias": "", 8 | "metrics": [{ "type": "count", "id": "1" }], 9 | "bucketAggs": [{ "type": "date_histogram", "id": "2", "settings": { "interval": "auto" }, "field": "@timestamp" }], 10 | "format": "table", 11 | "timeField": "@timestamp", 12 | "luceneQueryType": "Traces", 13 | "datasourceId": 13510, 14 | "intervalMs": 20000, 15 | "maxDataPoints": 1150 16 | }, 17 | { 18 | "refId": "B", 19 | "datasource": { "type": "grafana-opensearch-datasource", "uid": "aca30e11-e305-46d1-b378-9b29b690bacb" }, 20 | "query": "traceId:test123", 21 | "queryType": "lucene", 22 | "alias": "", 23 | "metrics": [{ "type": "count", "id": "1" }], 24 | "bucketAggs": [{ "type": "date_histogram", "id": "2", "settings": { "interval": "auto" }, "field": "@timestamp" }], 25 | "format": "table", 26 | "timeField": "@timestamp", 27 | "luceneQueryType": "Traces", 28 | "datasourceId": 13510, 29 | "intervalMs": 20000, 30 | "maxDataPoints": 1150 31 | } 32 | ] 33 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/ppl_logs.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "refId": "A", 4 | "datasource": { 5 | "type": "grafana-opensearch-datasource", 6 | "uid": "f54cf25a-219b-439a-9793-3113cf2c1dcf" 7 | }, 8 | "query": "source = opensearch_dashboards_sample_data_logs | where geo.src = \"US\"", 9 | "queryType": "PPL", 10 | "alias": "", 11 | "metrics": [ 12 | { 13 | "type": "count", 14 | "id": "1" 15 | } 16 | ], 17 | "bucketAggs": [ 18 | { 19 | "type": "date_histogram", 20 | "id": "2", 21 | "settings": { 22 | "interval": "auto" 23 | }, 24 | "field": "timestamp" 25 | } 26 | ], 27 | "format": "logs", 28 | "timeField": "timestamp", 29 | "key": "Q-72ff95d0-871e-46d3-888c-3e5e540031a0-0", 30 | "datasourceId": 9506, 31 | "intervalMs": 43200000, 32 | "maxDataPoints": 1280 33 | } 34 | ] 35 | -------------------------------------------------------------------------------- /pkg/opensearch/snapshot_tests/testdata/ppl_table.query_input.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "alias": "", 4 | "bucketAggs": [ 5 | { 6 | "field": "timestamp", 7 | "id": "2", 8 | "settings": { 9 | "interval": "auto" 10 | }, 11 | "type": "date_histogram" 12 | } 13 | ], 14 | "datasource": { 15 | "type": "grafana-opensearch-datasource", 16 | "uid": "b12b6921-d5bd-44e8-98c7-defbdbef73dd" 17 | }, 18 | "format": "table", 19 | "metrics": [ 20 | { 21 | "id": "1", 22 | "type": "count" 23 | } 24 | ], 25 | "query": "search source=opensearch_dashboards_sample_data_flights | where AvgTicketPrice > 1150 | where FlightDelay = true ", 26 | "queryType": "PPL", 27 | "refId": "A", 28 | "timeField": "timestamp", 29 | "datasourceId": 276, 30 | "intervalMs": 43200000, 31 | "maxDataPoints": 1125 32 | } 33 | ] -------------------------------------------------------------------------------- /pkg/utils/utils.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "encoding/json" 5 | "errors" 6 | "strconv" 7 | "strings" 8 | "time" 9 | 10 | "github.com/bitly/go-simplejson" 11 | "github.com/grafana/grafana-plugin-sdk-go/backend/log" 12 | "github.com/grafana/opensearch-datasource/pkg/null" 13 | ) 14 | 15 | const ( 16 | TimeFormat = "2006-01-02T15:04:05.999999999Z" 17 | ) 18 | 19 | func NewJsonFromAny(data interface{}) *simplejson.Json { 20 | dataByte, _ := json.Marshal(data) 21 | dataJson, _ := simplejson.NewJson(dataByte) 22 | return dataJson 23 | } 24 | 25 | func NewRawJsonFromAny(data interface{}) []byte { 26 | dataByte, _ := json.Marshal(data) 27 | dataJson, _ := simplejson.NewJson(dataByte) 28 | dataJsonRaw, _ := dataJson.MarshalJSON() 29 | return dataJsonRaw 30 | } 31 | 32 | func NullFloatToNullableTime(ts null.Float) *time.Time { 33 | if !ts.Valid { 34 | return nil 35 | } 36 | 37 | timestamp := time.UnixMilli(int64(ts.Float64)).UTC() 38 | return ×tamp 39 | } 40 | 41 | func FlattenNestedFieldsToObj(field map[string]interface{}) map[string]interface{} { 42 | // from "span.attributes.sampler@type": "test" 43 | // to map[span:map[attributes:map[sampler@type:test]]] 44 | result := make(map[string]interface{}) 45 | 46 | for key, value := range field { 47 | keys := strings.Split(key, ".") 48 | current := result 49 | 50 | for i := 0; i < len(keys)-1; i++ { 51 | if _, exists := current[keys[i]]; !exists { 52 | current[keys[i]] = make(map[string]interface{}) 53 | } 54 | current = current[keys[i]].(map[string]interface{}) 55 | } 56 | 57 | current[keys[len(keys)-1]] = value 58 | } 59 | 60 | return result 61 | } 62 | 63 | func TimeFieldToMilliseconds(date interface{}) (int64, error) { 64 | var timestamp *int64 65 | switch timeField := date.(type) { 66 | case string: 67 | t, err := time.Parse(TimeFormat, timeField) 68 | if err != nil { 69 | return 0, err 70 | } 71 | nano := t.UnixNano() / 1e6 72 | timestamp = &nano 73 | case int64: 74 | timestamp = &timeField 75 | default: 76 | return 0, errors.New("unrecognized time format") 77 | } 78 | return *timestamp, nil 79 | } 80 | func SpanHasError(spanEvents []interface{}) bool { 81 | for _, event := range spanEvents { 82 | eventMap, ok := event.(map[string]interface{}) 83 | if !ok { 84 | log.DefaultLogger.Debug("span event is not a map") 85 | continue 86 | } 87 | attributes, ok := eventMap["attributes"].(map[string]interface{}) 88 | if !ok { 89 | log.DefaultLogger.Debug("event attribute is not a map") 90 | continue 91 | } 92 | if attributes["error"] != nil { 93 | return true 94 | } 95 | } 96 | return false 97 | } 98 | 99 | func Pointer[T any](v T) *T { return &v } 100 | 101 | func StringToIntWithDefaultValue(valueStr string, defaultValue int) int { 102 | value, err := strconv.Atoi(valueStr) 103 | if err != nil { 104 | value = defaultValue 105 | } 106 | // In our case, 0 is not a valid value and in this case we default to defaultValue 107 | if value == 0 { 108 | value = defaultValue 109 | } 110 | return value 111 | } 112 | -------------------------------------------------------------------------------- /pkg/utils/utils_test.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stretchr/testify/assert" 7 | "github.com/stretchr/testify/require" 8 | ) 9 | 10 | func Test_TimeFieldToMilliseconds(t *testing.T) { 11 | t.Run("if field is string, return timestamp in milliseconds", func(t *testing.T) { 12 | timestamp, err := TimeFieldToMilliseconds("2021-09-16T15:04:05.000000Z") 13 | require.NoError(t, err) 14 | assert.Equal(t, int64(1631804645000), timestamp) 15 | }) 16 | t.Run("if field is string with nanoseconds, return timestamp in milliseconds", func(t *testing.T) { 17 | timestamp, err := TimeFieldToMilliseconds("2021-09-16T15:04:05.000000000Z") 18 | require.NoError(t, err) 19 | assert.Equal(t, int64(1631804645000), timestamp) 20 | }) 21 | t.Run("if field is invalid time string, return error", func(t *testing.T) { 22 | _, err := TimeFieldToMilliseconds("invalid time") 23 | require.Error(t, err) 24 | }) 25 | t.Run("if field is number, return timestamp in milliseconds", func(t *testing.T) { 26 | timestamp, err := TimeFieldToMilliseconds(int64(1631838937218)) 27 | require.NoError(t, err) 28 | assert.Equal(t, int64(1631838937218), timestamp) 29 | }) 30 | t.Run("if field is unrecognized time format, return error", func(t *testing.T) { 31 | _, err := TimeFieldToMilliseconds(true) 32 | require.Error(t, err) 33 | }) 34 | 35 | } 36 | 37 | func Test_FlattenNestedFieldsToObj(t *testing.T) { 38 | t.Run("create object from dot notation", func(t *testing.T) { 39 | field := map[string]interface{}{ 40 | "span.attributes.sampler@type": "test", 41 | } 42 | result := FlattenNestedFieldsToObj(field) 43 | expected := map[string]interface{}{ 44 | "span": map[string]interface{}{ 45 | "attributes": map[string]interface{}{ 46 | "sampler@type": "test", 47 | }, 48 | }, 49 | } 50 | assert.Equal(t, expected, result) 51 | }) 52 | } -------------------------------------------------------------------------------- /playwright.config.ts: -------------------------------------------------------------------------------- 1 | import { dirname } from 'path'; 2 | import { defineConfig, devices } from '@playwright/test'; 3 | import type { PluginOptions } from '@grafana/plugin-e2e'; 4 | 5 | const pluginE2eAuth = `${dirname(require.resolve('@grafana/plugin-e2e'))}/auth`; 6 | 7 | /** 8 | * Read environment variables from file. 9 | * https://github.com/motdotla/dotenv 10 | */ 11 | // import dotenv from 'dotenv'; 12 | // import path from 'path'; 13 | // dotenv.config({ path: path.resolve(__dirname, '.env') }); 14 | 15 | /** 16 | * See https://playwright.dev/docs/test-configuration. 17 | */ 18 | export default defineConfig({ 19 | testDir: './tests', 20 | /* Run tests in files in parallel */ 21 | fullyParallel: true, 22 | /* Fail the build on CI if you accidentally left test.only in the source code. */ 23 | forbidOnly: !!process.env.CI, 24 | /* Retry on CI only */ 25 | retries: process.env.CI ? 2 : 0, 26 | /* Opt out of parallel tests on CI. */ 27 | workers: process.env.CI ? 1 : undefined, 28 | /* Reporter to use. See https://playwright.dev/docs/test-reporters */ 29 | reporter: 'html', 30 | /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ 31 | use: { 32 | /* Base URL to use in actions like `await page.goto('/')`. */ 33 | baseURL: 'http://localhost:3000', 34 | 35 | /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ 36 | trace: 'on-first-retry', 37 | }, 38 | 39 | /* Configure projects for major browsers */ 40 | projects: [ 41 | { 42 | name: 'auth', 43 | testDir: pluginE2eAuth, 44 | testMatch: [/.*\.js/], 45 | }, 46 | { 47 | name: 'run-tests', 48 | use: { 49 | ...devices['Desktop Chrome'], 50 | // @grafana/plugin-e2e writes the auth state to this file, 51 | // the path should not be modified 52 | storageState: 'playwright/.auth/admin.json', 53 | }, 54 | dependencies: ['auth'], 55 | }, 56 | ], 57 | }); 58 | -------------------------------------------------------------------------------- /provisioning/dashboards/aws-opensearch.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | ## This file imports dashboards json from ./conf/provisioning/dashboards/aws-cloudwatch 4 | 5 | providers: 6 | # an unique provider name 7 | - name: 'AWSOpenSearchDashboards' 8 | # org id. will default to orgId 1 if not specified 9 | orgId: 1 10 | # name of the dashboard folder. Required 11 | folder: 'AWS OpenSearch' 12 | # folder UID. will be automatically generated if not specified 13 | folderUid: '' 14 | # provider type. Required 15 | type: file 16 | # disable dashboard deletion 17 | disableDeletion: false 18 | # enable dashboard editing 19 | editable: true 20 | # how often Grafana will scan for changed dashboards 21 | updateIntervalSeconds: 60 22 | options: 23 | # path to dashboard files on disk. Required 24 | path: /etc/grafana/provisioning/dashboards/aws-opensearch 25 | -------------------------------------------------------------------------------- /provisioning/datasources/aws-opensearch.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | deleteDatasources: 4 | - name: AWS OpenSearch 5 | orgId: 1 6 | - name: AWS OpenSearch eCommerce Sample 7 | orgId: 1 8 | - name: AWS OpenSearch Web Traffic Sample 9 | orgId: 1 10 | 11 | datasources: 12 | - name: AWS OpenSearch 13 | type: grafana-opensearch-datasource 14 | access: proxy 15 | url: https://host.docker.internal:9200/ 16 | basicAuth: true 17 | basicAuthUser: 'admin' 18 | uid: 'aws-opensearch' 19 | jsonData: 20 | flavor: 'opensearch' 21 | maxConcurrentShardRequests: 5 22 | pplEnabled: true 23 | serverless: false 24 | timeField: '@timestamp' 25 | tlsAuth: false 26 | tlsSkipVerify: true 27 | version: '2.18.0' 28 | versionLabel: 'OpenSearch 2.18.0' 29 | secureJsonData: 30 | basicAuthPassword: 'my_%New%_passW0rd!@#' # password is set in docker-compose.yaml with the `OPENSEARCH_INITIAL_ADMIN_PASSWORD` env var 31 | - name: AWS OpenSearch eCommerce Sample 32 | type: grafana-opensearch-datasource 33 | access: proxy 34 | url: https://host.docker.internal:9200/ 35 | basicAuth: true 36 | basicAuthUser: 'admin' 37 | uid: 'aws-opensearch-ecommerce-sample' 38 | jsonData: 39 | flavor: 'opensearch' 40 | database: 'opensearch_dashboards_sample_data_ecommerce' 41 | maxConcurrentShardRequests: 5 42 | pplEnabled: true 43 | serverless: false 44 | timeField: 'order_date' 45 | tlsAuth: false 46 | tlsSkipVerify: true 47 | version: '2.18.0' 48 | versionLabel: 'OpenSearch 2.18.0' 49 | secureJsonData: 50 | basicAuthPassword: 'my_%New%_passW0rd!@#' # password is set in docker-compose.yaml with the `OPENSEARCH_INITIAL_ADMIN_PASSWORD` env var 51 | - name: AWS OpenSearch Web Traffic Sample 52 | type: grafana-opensearch-datasource 53 | access: proxy 54 | url: https://host.docker.internal:9200/ 55 | basicAuth: true 56 | basicAuthUser: 'admin' 57 | uid: 'aws-opensearch-web-traffic-sample' 58 | jsonData: 59 | flavor: 'opensearch' 60 | database: 'opensearch_dashboards_sample_data_logs' 61 | maxConcurrentShardRequests: 5 62 | pplEnabled: true 63 | serverless: false 64 | timeField: 'timestamp' 65 | tlsAuth: false 66 | tlsSkipVerify: true 67 | version: '2.18.0' 68 | versionLabel: 'OpenSearch 2.18.0' 69 | secureJsonData: 70 | basicAuthPassword: 'my_%New%_passW0rd!@#' # password is set in docker-compose.yaml with the `OPENSEARCH_INITIAL_ADMIN_PASSWORD` env var 71 | -------------------------------------------------------------------------------- /src/__mocks__/DefaultConfigOptions.ts: -------------------------------------------------------------------------------- 1 | import { DataSourceSettings } from '@grafana/data'; 2 | import { Flavor, OpenSearchOptions } from '../types'; 3 | import { createDatasourceSettings } from '../dependencies/mocks'; 4 | 5 | export function createDefaultConfigOptions( 6 | options?: Partial 7 | ): DataSourceSettings { 8 | return createDatasourceSettings({ 9 | timeField: '@time', 10 | flavor: Flavor.OpenSearch, 11 | version: '1.0.0', 12 | interval: 'Hourly', 13 | timeInterval: '10s', 14 | maxConcurrentShardRequests: 300, 15 | logMessageField: 'test.message', 16 | logLevelField: 'test.level', 17 | database: '', 18 | pplEnabled: false, 19 | serverless: false, 20 | ...options, 21 | }); 22 | } 23 | -------------------------------------------------------------------------------- /src/__mocks__/OpenSearchDatasource.ts: -------------------------------------------------------------------------------- 1 | import { DataSourceInstanceSettings, DataSourcePluginMeta, PluginMetaInfo, PluginType } from '@grafana/data'; 2 | import { OpenSearchDatasource } from 'opensearchDatasource'; 3 | import { Flavor, OpenSearchOptions } from 'types'; 4 | 5 | const info: PluginMetaInfo = { 6 | author: { 7 | name: '', 8 | }, 9 | description: '', 10 | links: [], 11 | logos: { 12 | large: '', 13 | small: '', 14 | }, 15 | screenshots: [], 16 | updated: '', 17 | version: '', 18 | }; 19 | 20 | export const meta: DataSourcePluginMeta = { 21 | id: '', 22 | name: '', 23 | type: PluginType.datasource, 24 | info, 25 | module: '', 26 | baseUrl: '', 27 | }; 28 | 29 | export const OpenSearchSettings: DataSourceInstanceSettings = { 30 | jsonData: { 31 | defaultRegion: 'us-west-1', 32 | database: '', 33 | timeField: '', 34 | version: '2.13.0', 35 | flavor: Flavor.OpenSearch, 36 | timeInterval: '', 37 | }, 38 | id: 0, 39 | uid: '', 40 | type: '', 41 | name: 'OpenSearch Test Datasource', 42 | meta, 43 | access: 'direct', 44 | readOnly: false, 45 | }; 46 | export function setupMockedDataSource( 47 | customInstanceSettings: DataSourceInstanceSettings = OpenSearchSettings 48 | ) { 49 | const datasource = new OpenSearchDatasource(customInstanceSettings); 50 | return datasource; 51 | } 52 | -------------------------------------------------------------------------------- /src/components/AddRemove.test.tsx: -------------------------------------------------------------------------------- 1 | import '@testing-library/jest-dom'; 2 | import React from 'react'; 3 | import { render, screen } from '@testing-library/react'; 4 | import { AddRemove } from './AddRemove'; 5 | 6 | const noop = () => {}; 7 | 8 | const TestComponent = ({ items }: { items: any[] }) => ( 9 | <> 10 | {items.map((_, index) => ( 11 | 12 | ))} 13 | 14 | ); 15 | 16 | describe('AddRemove Button', () => { 17 | describe("When There's only one element in the list", () => { 18 | it('Should only show the add button', () => { 19 | render(); 20 | 21 | expect(screen.getByText('add')).toBeInTheDocument(); 22 | expect(screen.queryByText('remove')).not.toBeInTheDocument(); 23 | }); 24 | }); 25 | 26 | describe("When There's more than one element in the list", () => { 27 | it('Should show the remove button on every element', () => { 28 | const items = ['something', 'something else']; 29 | 30 | render(); 31 | 32 | expect(screen.getAllByText('remove')).toHaveLength(items.length); 33 | }); 34 | 35 | it('Should show the add button only once', () => { 36 | const items = ['something', 'something else']; 37 | 38 | render(); 39 | 40 | expect(screen.getAllByText('add')).toHaveLength(1); 41 | }); 42 | }); 43 | }); 44 | -------------------------------------------------------------------------------- /src/components/AddRemove.tsx: -------------------------------------------------------------------------------- 1 | import { css } from '@emotion/css'; 2 | import React from 'react'; 3 | import { IconButton } from './IconButton'; 4 | 5 | interface Props { 6 | index: number; 7 | elements: any[]; 8 | onAdd: () => void; 9 | onRemove: () => void; 10 | } 11 | 12 | /** 13 | * A component used to show add & remove buttons for mutable lists of values. Wether to show or not the add or the remove buttons 14 | * depends on the `index` and `elements` props. This enforces a consistent experience whenever this pattern is used. 15 | */ 16 | export const AddRemove = ({ index, onAdd, onRemove, elements }: Props) => { 17 | return ( 18 |
23 | {index === 0 && } 24 | 25 | {elements.length >= 2 && } 26 |
27 | ); 28 | }; 29 | -------------------------------------------------------------------------------- /src/components/IconButton.tsx: -------------------------------------------------------------------------------- 1 | import { Icon } from '@grafana/ui'; 2 | import { cx, css } from '@emotion/css'; 3 | import React, { ComponentProps, ButtonHTMLAttributes } from 'react'; 4 | 5 | const SROnly = css` 6 | clip: rect(0 0 0 0); 7 | clip-path: inset(50%); 8 | height: 1px; 9 | overflow: hidden; 10 | position: absolute; 11 | white-space: nowrap; 12 | width: 1px; 13 | `; 14 | 15 | interface Props { 16 | iconName: ComponentProps['name']; 17 | onClick: () => void; 18 | className?: string; 19 | label: string; 20 | } 21 | 22 | export const IconButton = ({ 23 | iconName, 24 | onClick, 25 | className, 26 | label, 27 | ...buttonProps 28 | }: Props & ButtonHTMLAttributes) => ( 29 | 33 | ); 34 | -------------------------------------------------------------------------------- /src/components/MetricPicker.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { css, cx } from '@emotion/css'; 3 | import { SelectableValue } from '@grafana/data'; 4 | import { Segment } from '@grafana/ui'; 5 | import { describeMetric } from '../utils'; 6 | import { MetricAggregation } from './QueryEditor/MetricAggregationsEditor/aggregations'; 7 | 8 | const noWrap = css` 9 | white-space: nowrap; 10 | `; 11 | 12 | const toOption = (metric: MetricAggregation) => ({ 13 | label: describeMetric(metric), 14 | value: metric, 15 | }); 16 | 17 | const toOptions = (metrics: MetricAggregation[]): Array> => metrics.map(toOption); 18 | 19 | interface Props { 20 | options: MetricAggregation[]; 21 | onChange: (e: SelectableValue) => void; 22 | className?: string; 23 | value?: string; 24 | } 25 | 26 | export const MetricPicker = ({ options, onChange, className, value }: Props) => ( 27 | option.id === value)!) : undefined} 33 | /> 34 | ); 35 | -------------------------------------------------------------------------------- /src/components/QueryEditor/BucketAggregationsEditor/BucketAggregationEditor.tsx: -------------------------------------------------------------------------------- 1 | import { MetricFindValue, SelectableValue } from '@grafana/data'; 2 | import { Segment, SegmentAsync, InlineSegmentGroup } from '@grafana/ui'; 3 | import React from 'react'; 4 | import { useDispatch } from '../../../hooks/useStatelessReducer'; 5 | import { useDatasource } from '../OpenSearchQueryContext'; 6 | import { segmentStyles } from '../styles'; 7 | import { BucketAggregation, BucketAggregationType, isBucketAggregationWithField } from './aggregations'; 8 | import { SettingsEditor } from './SettingsEditor'; 9 | import { changeBucketAggregationField, changeBucketAggregationType } from './state/actions'; 10 | import { bucketAggregationConfig } from './utils'; 11 | 12 | const bucketAggOptions: Array> = Object.entries(bucketAggregationConfig).map( 13 | ([key, { label }]) => ({ 14 | label, 15 | value: key as BucketAggregationType, 16 | }) 17 | ); 18 | 19 | // @ts-ignore 20 | const toSelectableValue = ({ value, text }: MetricFindValue): SelectableValue => ({ 21 | label: text, 22 | value: `${value || text}`, 23 | }); 24 | 25 | const toOption = (bucketAgg: BucketAggregation) => ({ 26 | label: bucketAggregationConfig[bucketAgg.type].label, 27 | value: bucketAgg.type, 28 | }); 29 | 30 | interface Props { 31 | value: BucketAggregation; 32 | } 33 | 34 | export const BucketAggregationEditor = ({ value }: Props) => { 35 | const datasource = useDatasource(); 36 | const dispatch = useDispatch(); 37 | 38 | // TODO: Move this in a separate hook (and simplify) 39 | const getFields = async () => { 40 | const get = () => { 41 | switch (value.type) { 42 | case 'date_histogram': 43 | return datasource.getFields('date'); 44 | case 'geohash_grid': 45 | return datasource.getFields('geo_point'); 46 | default: 47 | return datasource.getFields(); 48 | } 49 | }; 50 | 51 | return (await get()).map(toSelectableValue); 52 | }; 53 | 54 | return ( 55 | <> 56 | 57 | dispatch(changeBucketAggregationType({ id: value.id, newType: e.value! }))} 61 | value={toOption(value)} 62 | /> 63 | 64 | {isBucketAggregationWithField(value) && ( 65 | dispatch(changeBucketAggregationField({ id: value.id, newField: e.value }))} 69 | placeholder="Select Field" 70 | value={value.field} 71 | /> 72 | )} 73 | 74 | 75 | 76 | ); 77 | }; 78 | -------------------------------------------------------------------------------- /src/components/QueryEditor/BucketAggregationsEditor/SettingsEditor/FiltersSettingsEditor/index.tsx: -------------------------------------------------------------------------------- 1 | import { InlineField, Input, QueryField } from '@grafana/ui'; 2 | import { css } from '@emotion/css'; 3 | import React, { useEffect } from 'react'; 4 | import { AddRemove } from '../../../../AddRemove'; 5 | import { useDispatch, useStatelessReducer } from '../../../../../hooks/useStatelessReducer'; 6 | import { Filters } from '../../aggregations'; 7 | import { changeBucketAggregationSetting } from '../../state/actions'; 8 | import { addFilter, changeFilter, removeFilter } from './state/actions'; 9 | import { reducer as filtersReducer } from './state/reducer'; 10 | 11 | interface Props { 12 | value: Filters; 13 | } 14 | 15 | export const FiltersSettingsEditor = ({ value }: Props) => { 16 | const upperStateDispatch = useDispatch(); 17 | 18 | const dispatch = useStatelessReducer( 19 | (newState) => 20 | upperStateDispatch( 21 | changeBucketAggregationSetting({ bucketAgg: value, settingName: 'filters', newValue: newState }) 22 | ), 23 | value.settings?.filters, 24 | filtersReducer 25 | ); 26 | 27 | // The model might not have filters (or an empty array of filters) in it because of the way it was built in previous versions of the datasource. 28 | // If this is the case we add a default one. 29 | useEffect(() => { 30 | if (!value.settings?.filters?.length) { 31 | dispatch(addFilter()); 32 | } 33 | }, [value.settings?.filters?.length, dispatch]); 34 | 35 | return ( 36 | <> 37 |
43 | {value.settings?.filters!.map((filter, index) => ( 44 |
50 |
55 | 56 | {}} 60 | onChange={(query) => dispatch(changeFilter({ index, filter: { ...filter, query } }))} 61 | query={filter.query} 62 | /> 63 | 64 |
65 | 66 | dispatch(changeFilter({ index, filter: { ...filter, label: e.target.value } }))} 69 | defaultValue={filter.label} 70 | /> 71 | 72 | dispatch(addFilter())} 76 | onRemove={() => dispatch(removeFilter(index))} 77 | /> 78 |
79 | ))} 80 |
81 | 82 | ); 83 | }; 84 | -------------------------------------------------------------------------------- /src/components/QueryEditor/BucketAggregationsEditor/SettingsEditor/FiltersSettingsEditor/state/actions.ts: -------------------------------------------------------------------------------- 1 | import { createAction } from '@reduxjs/toolkit'; 2 | import { Filter } from '../../../aggregations'; 3 | import { ADD_FILTER, REMOVE_FILTER, CHANGE_FILTER } from './types'; 4 | 5 | export const addFilter = createAction(ADD_FILTER); 6 | 7 | export const removeFilter = createAction(REMOVE_FILTER); 8 | 9 | export const changeFilter = createAction<{ index: number; filter: Filter }>(CHANGE_FILTER); 10 | -------------------------------------------------------------------------------- /src/components/QueryEditor/BucketAggregationsEditor/SettingsEditor/FiltersSettingsEditor/state/reducer.test.ts: -------------------------------------------------------------------------------- 1 | import { reducerTester } from '../../../../../../reducerTester'; 2 | import { Filter } from '../../../aggregations'; 3 | import { addFilter, changeFilter, removeFilter } from './actions'; 4 | import { reducer } from './reducer'; 5 | 6 | describe('Filters Bucket Aggregation Settings Reducer', () => { 7 | it('Should correctly add new filter', () => { 8 | reducerTester() 9 | .givenReducer(reducer, []) 10 | .whenActionIsDispatched(addFilter()) 11 | .thenStatePredicateShouldEqual((state: Filter[]) => state.length === 1); 12 | }); 13 | 14 | it('Should correctly remove filters', () => { 15 | const firstFilter: Filter = { 16 | label: 'First', 17 | query: '*', 18 | }; 19 | 20 | const secondFilter: Filter = { 21 | label: 'Second', 22 | query: '*', 23 | }; 24 | 25 | reducerTester() 26 | .givenReducer(reducer, [firstFilter, secondFilter]) 27 | .whenActionIsDispatched(removeFilter(0)) 28 | .thenStateShouldEqual([secondFilter]); 29 | }); 30 | 31 | it("Should correctly change filter's attributes", () => { 32 | const firstFilter: Filter = { 33 | label: 'First', 34 | query: '*', 35 | }; 36 | 37 | const secondFilter: Filter = { 38 | label: 'Second', 39 | query: '*', 40 | }; 41 | 42 | const expectedSecondFilter: Filter = { 43 | label: 'Changed label', 44 | query: 'Changed query', 45 | }; 46 | 47 | reducerTester() 48 | .givenReducer(reducer, [firstFilter, secondFilter]) 49 | .whenActionIsDispatched(changeFilter({ index: 1, filter: expectedSecondFilter })) 50 | .thenStateShouldEqual([firstFilter, expectedSecondFilter]); 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /src/components/QueryEditor/BucketAggregationsEditor/SettingsEditor/FiltersSettingsEditor/state/reducer.ts: -------------------------------------------------------------------------------- 1 | import { Action } from '@reduxjs/toolkit'; 2 | import { Filter } from '../../../aggregations'; 3 | import { defaultFilter } from '../utils'; 4 | import { addFilter, removeFilter, changeFilter } from './actions'; 5 | 6 | export const reducer = (state: Filter[] = [], action: Action) => { 7 | if (addFilter.match(action)) { 8 | return [...state, defaultFilter()]; 9 | } 10 | 11 | if (removeFilter.match(action)) { 12 | return state.slice(0, action.payload).concat(state.slice(action.payload + 1)); 13 | } 14 | 15 | if (changeFilter.match(action)) { 16 | return state.map((filter, index) => { 17 | if (index !== action.payload.index) { 18 | return filter; 19 | } 20 | 21 | return action.payload.filter; 22 | }); 23 | } 24 | 25 | return state; 26 | }; 27 | -------------------------------------------------------------------------------- /src/components/QueryEditor/BucketAggregationsEditor/SettingsEditor/FiltersSettingsEditor/state/types.ts: -------------------------------------------------------------------------------- 1 | import { Action } from '@reduxjs/toolkit'; 2 | import { Filter } from '../../../aggregations'; 3 | 4 | export const ADD_FILTER = '@bucketAggregations/filter/add'; 5 | export const REMOVE_FILTER = '@bucketAggregations/filter/remove'; 6 | export const CHANGE_FILTER = '@bucketAggregations/filter/change'; 7 | 8 | export type AddFilterAction = Action; 9 | 10 | export interface RemoveFilterAction extends Action { 11 | payload: { 12 | index: number; 13 | }; 14 | } 15 | 16 | export interface ChangeFilterAction extends Action { 17 | payload: { 18 | index: number; 19 | filter: Filter; 20 | }; 21 | } 22 | export type FilterAction = AddFilterAction | RemoveFilterAction | ChangeFilterAction; 23 | -------------------------------------------------------------------------------- /src/components/QueryEditor/BucketAggregationsEditor/SettingsEditor/FiltersSettingsEditor/utils.ts: -------------------------------------------------------------------------------- 1 | import { Filter } from '../../aggregations'; 2 | 3 | export const defaultFilter = (): Filter => ({ label: '', query: '*' }); 4 | -------------------------------------------------------------------------------- /src/components/QueryEditor/BucketAggregationsEditor/aggregations.ts: -------------------------------------------------------------------------------- 1 | import { bucketAggregationConfig } from './utils'; 2 | 3 | export type BucketAggregationType = 'terms' | 'filters' | 'geohash_grid' | 'date_histogram' | 'histogram'; 4 | 5 | interface BaseBucketAggregation { 6 | id: string; 7 | type: BucketAggregationType; 8 | settings?: Record; 9 | } 10 | 11 | export interface BucketAggregationWithField extends BaseBucketAggregation { 12 | field?: string; 13 | } 14 | 15 | export interface DateHistogram extends BucketAggregationWithField { 16 | type: 'date_histogram'; 17 | settings?: { 18 | interval?: string; 19 | min_doc_count?: string; 20 | trimEdges?: string; 21 | offset?: string; 22 | }; 23 | } 24 | 25 | export interface Histogram extends BucketAggregationWithField { 26 | type: 'histogram'; 27 | settings?: { 28 | interval?: string; 29 | min_doc_count?: string; 30 | }; 31 | } 32 | 33 | export type TermsOrder = 'desc' | 'asc'; 34 | 35 | export interface Terms extends BucketAggregationWithField { 36 | type: 'terms'; 37 | settings?: { 38 | order?: TermsOrder; 39 | size?: string; 40 | min_doc_count?: string; 41 | orderBy?: string; 42 | missing?: string; 43 | }; 44 | } 45 | 46 | export type Filter = { 47 | query: string; 48 | label: string; 49 | }; 50 | export interface Filters extends BaseBucketAggregation { 51 | type: 'filters'; 52 | settings?: { 53 | filters?: Filter[]; 54 | }; 55 | } 56 | 57 | interface GeoHashGrid extends BucketAggregationWithField { 58 | type: 'geohash_grid'; 59 | settings?: { 60 | precision?: string; 61 | }; 62 | } 63 | 64 | export type BucketAggregation = DateHistogram | Histogram | Terms | Filters | GeoHashGrid; 65 | 66 | export const isBucketAggregationWithField = ( 67 | bucketAgg: BucketAggregation | BucketAggregationWithField 68 | ): bucketAgg is BucketAggregationWithField => bucketAggregationConfig[bucketAgg.type].requiresField; 69 | -------------------------------------------------------------------------------- /src/components/QueryEditor/BucketAggregationsEditor/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { BucketAggregationEditor } from './BucketAggregationEditor'; 3 | import { useDispatch } from '../../../hooks/useStatelessReducer'; 4 | import { addBucketAggregation, removeBucketAggregation } from './state/actions'; 5 | import { BucketAggregation } from './aggregations'; 6 | import { useQuery } from '../OpenSearchQueryContext'; 7 | import { QueryEditorRow } from '../QueryEditorRow'; 8 | import { IconButton } from '../../IconButton'; 9 | 10 | interface Props { 11 | nextId: BucketAggregation['id']; 12 | } 13 | 14 | export const BucketAggregationsEditor = ({ nextId }: Props) => { 15 | const dispatch = useDispatch(); 16 | const { bucketAggs } = useQuery(); 17 | const totalBucketAggs = bucketAggs?.length || 0; 18 | 19 | return ( 20 | <> 21 | {bucketAggs!.map((bucketAgg, index) => ( 22 | dispatch(removeBucketAggregation(bucketAgg.id))} 26 | disableRemove={!(totalBucketAggs > 1)} 27 | > 28 | 29 | 30 | {index === 0 && ( 31 | dispatch(addBucketAggregation(nextId))} label="add" /> 32 | )} 33 | 34 | ))} 35 | 36 | ); 37 | }; 38 | -------------------------------------------------------------------------------- /src/components/QueryEditor/BucketAggregationsEditor/state/actions.ts: -------------------------------------------------------------------------------- 1 | import { createAction } from '@reduxjs/toolkit'; 2 | import { BucketAggregation, BucketAggregationWithField } from '../aggregations'; 3 | import { 4 | ADD_BUCKET_AGG, 5 | REMOVE_BUCKET_AGG, 6 | CHANGE_BUCKET_AGG_TYPE, 7 | CHANGE_BUCKET_AGG_FIELD, 8 | CHANGE_BUCKET_AGG_SETTING, 9 | } from './types'; 10 | 11 | export const addBucketAggregation = createAction(ADD_BUCKET_AGG); 12 | 13 | export const removeBucketAggregation = createAction(REMOVE_BUCKET_AGG); 14 | 15 | export const changeBucketAggregationType = createAction<{ 16 | id: BucketAggregation['id']; 17 | newType: BucketAggregation['type']; 18 | }>(CHANGE_BUCKET_AGG_TYPE); 19 | 20 | export const changeBucketAggregationField = createAction<{ 21 | id: BucketAggregationWithField['id']; 22 | newField: BucketAggregationWithField['field']; 23 | }>(CHANGE_BUCKET_AGG_FIELD); 24 | 25 | export const changeBucketAggregationSetting = createAction<{ 26 | bucketAgg: BucketAggregation; 27 | settingName: string; 28 | newValue: string | string[] | unknown; 29 | }>(CHANGE_BUCKET_AGG_SETTING); 30 | -------------------------------------------------------------------------------- /src/components/QueryEditor/BucketAggregationsEditor/state/types.ts: -------------------------------------------------------------------------------- 1 | export const ADD_BUCKET_AGG = '@bucketAggs/add'; 2 | export const REMOVE_BUCKET_AGG = '@bucketAggs/remove'; 3 | export const CHANGE_BUCKET_AGG_TYPE = '@bucketAggs/change_type'; 4 | export const CHANGE_BUCKET_AGG_FIELD = '@bucketAggs/change_field'; 5 | export const CHANGE_BUCKET_AGG_SETTING = '@bucketAggs/change_setting'; 6 | -------------------------------------------------------------------------------- /src/components/QueryEditor/LuceneQueryEditor/LuceneQueryEditor.tsx: -------------------------------------------------------------------------------- 1 | import { InlineSegmentGroup, InlineField, InlineSwitch, Input, InlineFieldRow } from '@grafana/ui'; 2 | import { useNextId } from 'hooks/useNextId'; 3 | import React from 'react'; 4 | import { LuceneQueryType, OpenSearchQuery } from 'types'; 5 | import { BucketAggregationsEditor } from '../BucketAggregationsEditor'; 6 | import { MetricAggregationsEditor } from '../MetricAggregationsEditor'; 7 | import { QueryEditorRow } from '../QueryEditorRow'; 8 | import { LuceneQueryTypeSelector } from './LuceneQueryTypeSelector'; 9 | import { EditorRows } from '@grafana/plugin-ui'; 10 | 11 | type LuceneQueryEditorProps = { 12 | query: OpenSearchQuery; 13 | onChange: (query: OpenSearchQuery) => void; 14 | }; 15 | 16 | export const LuceneQueryEditor = (props: LuceneQueryEditorProps) => { 17 | const luceneQueryType = props.query.luceneQueryType || LuceneQueryType.Metric; 18 | const serviceMapSet = props.query.serviceMap || false; 19 | const nextId = useNextId(); 20 | 21 | return ( 22 | 23 | 24 | 25 | 26 | 27 | 28 | {props.query.luceneQueryType === LuceneQueryType.Traces && ( 29 | 30 | 31 | 32 | { 35 | const newVal = event.currentTarget.checked; 36 | props.onChange({ 37 | ...props.query, 38 | serviceMap: newVal, 39 | }); 40 | }} 41 | /> 42 | 43 | {!serviceMapSet && ( 44 | 45 | { 50 | const newVal = event.target.value; 51 | props.onChange({ 52 | ...props.query, 53 | tracesSize: newVal, 54 | }); 55 | }} 56 | /> 57 | 58 | )} 59 | 60 | 61 | )} 62 | {shouldHaveMetricAggs(luceneQueryType) && } 63 | {shouldHaveBucketAggs(luceneQueryType) && } 64 | 65 | ); 66 | }; 67 | 68 | const shouldHaveBucketAggs = (luceneQueryType: LuceneQueryType): boolean => { 69 | return luceneQueryType === LuceneQueryType.Metric; 70 | }; 71 | const shouldHaveMetricAggs = (luceneQueryType: LuceneQueryType): boolean => { 72 | return luceneQueryType !== LuceneQueryType.Traces; 73 | }; 74 | -------------------------------------------------------------------------------- /src/components/QueryEditor/LuceneQueryEditor/LuceneQueryTypeSelector.tsx: -------------------------------------------------------------------------------- 1 | import { SelectableValue } from '@grafana/data'; 2 | import { RadioButtonGroup } from '@grafana/ui'; 3 | 4 | import React from 'react'; 5 | import { MetricAggregation } from '../MetricAggregationsEditor/aggregations'; 6 | import { LuceneQueryType, OpenSearchQuery } from 'types'; 7 | import { useQuery } from '../OpenSearchQueryContext'; 8 | import { useDispatch } from 'hooks/useStatelessReducer'; 9 | import { metricAggregationConfig } from '../MetricAggregationsEditor/utils'; 10 | import { updateLuceneTypeAndMetrics } from './state'; 11 | 12 | const OPTIONS: Array> = [ 13 | { value: LuceneQueryType.Metric, label: 'Metric' }, 14 | { value: LuceneQueryType.Logs, label: 'Logs' }, 15 | { value: LuceneQueryType.RawData, label: 'Raw Data' }, 16 | { value: LuceneQueryType.RawDocument, label: 'Raw Document' }, 17 | { value: LuceneQueryType.Traces, label: 'Traces' }, 18 | ]; 19 | 20 | function queryTypeToMetricType(type: LuceneQueryType): MetricAggregation['type'] { 21 | switch (type) { 22 | case LuceneQueryType.Logs: 23 | return 'logs'; 24 | case LuceneQueryType.Metric: 25 | return 'count'; 26 | case LuceneQueryType.RawData: 27 | return 'raw_data'; 28 | case LuceneQueryType.RawDocument: 29 | return 'raw_document'; 30 | default: 31 | // should never happen 32 | throw new Error(`Query type ${type} does not have a corresponding metric aggregation`); 33 | } 34 | } 35 | 36 | export const LuceneQueryTypeSelector = (props: { onChange: (query: OpenSearchQuery) => void }) => { 37 | const query = useQuery(); 38 | const dispatch = useDispatch(); 39 | 40 | const firstMetric = query.metrics?.[0]; 41 | 42 | if (firstMetric == null) { 43 | // not sure if this can really happen, but we should handle it anyway 44 | return null; 45 | } 46 | 47 | const queryType = 48 | query.luceneQueryType === LuceneQueryType.Traces 49 | ? LuceneQueryType.Traces 50 | : metricAggregationConfig[firstMetric.type].impliedLuceneQueryType; 51 | 52 | const onChangeQueryType = (newQueryType: LuceneQueryType) => { 53 | if (newQueryType !== LuceneQueryType.Traces) { 54 | dispatch( 55 | updateLuceneTypeAndMetrics({ 56 | luceneQueryType: newQueryType, 57 | id: firstMetric.id, 58 | type: queryTypeToMetricType(newQueryType), 59 | }) 60 | ); 61 | } else { 62 | props.onChange({ ...query, luceneQueryType: newQueryType }); 63 | } 64 | }; 65 | 66 | return ( 67 | 68 | fullWidth={false} 69 | options={OPTIONS} 70 | value={queryType} 71 | onChange={onChangeQueryType} 72 | /> 73 | ); 74 | }; 75 | -------------------------------------------------------------------------------- /src/components/QueryEditor/LuceneQueryEditor/state.test.ts: -------------------------------------------------------------------------------- 1 | import { LuceneQueryType, OpenSearchQuery } from 'types'; 2 | import { initQuery } from '../state'; 3 | import { luceneQueryTypeReducer, updateLuceneTypeAndMetrics } from './state'; 4 | import { reducerTester } from 'reducerTester'; 5 | 6 | describe('Lucene Query Type Reducer', () => { 7 | describe('On Init', () => { 8 | it('Should maintain the previous query type if present', () => { 9 | const initialType: LuceneQueryType = LuceneQueryType.Logs; 10 | reducerTester() 11 | .givenReducer(luceneQueryTypeReducer, initialType) 12 | .whenActionIsDispatched(initQuery()) 13 | .thenStateShouldEqual(initialType); 14 | }); 15 | 16 | it('Should set lucene type to Metric if it is not already set', () => { 17 | const initialType: OpenSearchQuery['luceneQueryType'] = undefined; 18 | const expectedType = LuceneQueryType.Metric; 19 | 20 | reducerTester() 21 | .givenReducer(luceneQueryTypeReducer, initialType) 22 | .whenActionIsDispatched(initQuery()) 23 | .thenStateShouldEqual(expectedType); 24 | }); 25 | }); 26 | 27 | it('Should correctly set lucene query type', () => { 28 | const initialType: LuceneQueryType = LuceneQueryType.Traces; 29 | reducerTester() 30 | .givenReducer(luceneQueryTypeReducer, initialType) 31 | .whenActionIsDispatched( 32 | updateLuceneTypeAndMetrics({ luceneQueryType: LuceneQueryType.Logs, id: '1', type: 'count' }) 33 | ) 34 | .thenStateShouldEqual(LuceneQueryType.Logs); 35 | }); 36 | 37 | it('Should not change state with other action types', () => { 38 | const initialType: LuceneQueryType = LuceneQueryType.Traces; 39 | 40 | reducerTester() 41 | .givenReducer(luceneQueryTypeReducer, initialType) 42 | .whenActionIsDispatched({ type: 'THIS ACTION SHOULD NOT HAVE ANY EFFECT IN THIS REDUCER' }) 43 | .thenStateShouldEqual(initialType); 44 | }); 45 | }); 46 | -------------------------------------------------------------------------------- /src/components/QueryEditor/LuceneQueryEditor/state.ts: -------------------------------------------------------------------------------- 1 | import { type Action, createAction } from '@reduxjs/toolkit'; 2 | import { initQuery } from '../state'; 3 | import { LuceneQueryType, OpenSearchQuery } from 'types'; 4 | import { MetricAggregation } from '../MetricAggregationsEditor/aggregations'; 5 | 6 | export const UPDATE_LUCENE_TYPE_AND_METRICS = 'update_lucene_type_and_metrics'; 7 | 8 | export const updateLuceneTypeAndMetrics = createAction<{ 9 | id: string; 10 | type: MetricAggregation['type']; 11 | luceneQueryType: OpenSearchQuery['luceneQueryType']; 12 | }>(UPDATE_LUCENE_TYPE_AND_METRICS); 13 | 14 | export const luceneQueryTypeReducer = (prevQueryType: OpenSearchQuery['luceneQueryType'], action: Action) => { 15 | if (updateLuceneTypeAndMetrics.match(action)) { 16 | return action.payload.luceneQueryType; 17 | } 18 | if (initQuery.match(action)) { 19 | return prevQueryType || LuceneQueryType.Metric; 20 | } 21 | 22 | return prevQueryType; 23 | }; 24 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/MetricEditor.test.tsx: -------------------------------------------------------------------------------- 1 | import { render, screen } from '@testing-library/react'; 2 | import userEvent from '@testing-library/user-event'; 3 | import React, { PropsWithChildren } from 'react'; 4 | 5 | import { OpenSearchQuery } from '../../../types'; 6 | import { OpenSearchProvider } from '../OpenSearchQueryContext'; 7 | 8 | import { MetricEditor } from './MetricEditor'; 9 | import { setupMockedDataSource } from '__mocks__/OpenSearchDatasource'; 10 | import { MetricAggregation } from './aggregations'; 11 | 12 | describe('Metric Editor', () => { 13 | it('Should not list special metrics', async () => { 14 | const count: MetricAggregation = { 15 | id: '1', 16 | type: 'count', 17 | }; 18 | 19 | const mockQuery: OpenSearchQuery = { 20 | refId: 'A', 21 | query: '', 22 | metrics: [count], 23 | bucketAggs: [], 24 | }; 25 | 26 | const wrapper = ({ children }: PropsWithChildren<{}>) => ( 27 | 28 | {children} 29 | 30 | ); 31 | 32 | render(, { wrapper }); 33 | await userEvent.click(screen.getByText('Count')); 34 | 35 | // we check if the list-of-options is visible by 36 | // checking for an item to exist 37 | expect(screen.getByText('Extended Stats')).toBeInTheDocument(); 38 | 39 | // now we make sure that special metric aren't shown 40 | expect(screen.queryByText('Logs')).toBeNull(); 41 | expect(screen.queryByText('Raw Data')).toBeNull(); 42 | expect(screen.queryByText('Raw Document')).toBeNull(); 43 | }); 44 | }); 45 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/SettingsEditor/BucketScriptSettingsEditor/state/actions.ts: -------------------------------------------------------------------------------- 1 | import { createAction } from '@reduxjs/toolkit'; 2 | import { 3 | ADD_PIPELINE_VARIABLE, 4 | REMOVE_PIPELINE_VARIABLE, 5 | RENAME_PIPELINE_VARIABLE, 6 | CHANGE_PIPELINE_VARIABLE_METRIC, 7 | } from './types'; 8 | 9 | export const addPipelineVariable = createAction(ADD_PIPELINE_VARIABLE); 10 | 11 | export const removePipelineVariable = createAction(REMOVE_PIPELINE_VARIABLE); 12 | 13 | export const renamePipelineVariable = createAction<{ index: number; newName: string }>(RENAME_PIPELINE_VARIABLE); 14 | 15 | export const changePipelineVariableMetric = createAction<{ index: number; newMetric: string }>( 16 | CHANGE_PIPELINE_VARIABLE_METRIC 17 | ); 18 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/SettingsEditor/BucketScriptSettingsEditor/state/reducer.ts: -------------------------------------------------------------------------------- 1 | import { Action } from '@reduxjs/toolkit'; 2 | import { PipelineVariable } from '../../../aggregations'; 3 | import { defaultPipelineVariable } from '../utils'; 4 | import { 5 | addPipelineVariable, 6 | removePipelineVariable, 7 | renamePipelineVariable, 8 | changePipelineVariableMetric, 9 | } from './actions'; 10 | 11 | export const reducer = (state: PipelineVariable[] = [], action: Action) => { 12 | if (addPipelineVariable.match(action)) { 13 | return [...state, defaultPipelineVariable()]; 14 | } 15 | 16 | if (removePipelineVariable.match(action)) { 17 | return state.slice(0, action.payload).concat(state.slice(action.payload + 1)); 18 | } 19 | 20 | if (renamePipelineVariable.match(action)) { 21 | return state.map((pipelineVariable, index) => { 22 | if (index !== action.payload.index) { 23 | return pipelineVariable; 24 | } 25 | 26 | return { 27 | ...pipelineVariable, 28 | name: action.payload.newName, 29 | }; 30 | }); 31 | } 32 | 33 | if (changePipelineVariableMetric.match(action)) { 34 | return state.map((pipelineVariable, index) => { 35 | if (index !== action.payload.index) { 36 | return pipelineVariable; 37 | } 38 | 39 | return { 40 | ...pipelineVariable, 41 | pipelineAgg: action.payload.newMetric, 42 | }; 43 | }); 44 | } 45 | 46 | return state; 47 | }; 48 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/SettingsEditor/BucketScriptSettingsEditor/state/types.ts: -------------------------------------------------------------------------------- 1 | export const ADD_PIPELINE_VARIABLE = '@pipelineVariables/add'; 2 | export const REMOVE_PIPELINE_VARIABLE = '@pipelineVariables/remove'; 3 | export const RENAME_PIPELINE_VARIABLE = '@pipelineVariables/rename'; 4 | export const CHANGE_PIPELINE_VARIABLE_METRIC = '@pipelineVariables/change_metric'; 5 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/SettingsEditor/BucketScriptSettingsEditor/utils.ts: -------------------------------------------------------------------------------- 1 | import { PipelineVariable } from '../../aggregations'; 2 | 3 | export const defaultPipelineVariable = (): PipelineVariable => ({ name: 'var1', pipelineAgg: '' }); 4 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/SettingsEditor/SettingField.tsx: -------------------------------------------------------------------------------- 1 | import React, { ComponentProps, useState } from 'react'; 2 | import { InlineField, Input } from '@grafana/ui'; 3 | import { useDispatch } from '../../../../hooks/useStatelessReducer'; 4 | import { changeMetricSetting } from '../state/actions'; 5 | import { SettingKeyOf } from '../../../types'; 6 | import { MetricAggregationWithSettings } from '../aggregations'; 7 | import { uniqueId } from 'lodash'; 8 | 9 | interface Props> { 10 | label: string; 11 | settingName: K; 12 | metric: T; 13 | placeholder?: ComponentProps['placeholder']; 14 | tooltip?: ComponentProps['tooltip']; 15 | } 16 | 17 | export function SettingField>({ 18 | label, 19 | settingName, 20 | metric, 21 | placeholder, 22 | tooltip, 23 | }: Props) { 24 | const dispatch = useDispatch(); 25 | const [id] = useState(uniqueId(`es-field-id-`)); 26 | const settings = metric.settings; 27 | 28 | return ( 29 | 30 | dispatch(changeMetricSetting({ metric, settingName, newValue: e.target.value }))} 34 | defaultValue={settings?.[settingName as keyof typeof settings]} 35 | /> 36 | 37 | ); 38 | } 39 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/SettingsEditor/useDescription.ts: -------------------------------------------------------------------------------- 1 | import { extendedStats } from '../../../../query_def'; 2 | import { MetricAggregation } from '../aggregations'; 3 | 4 | const hasValue = (value: string) => (object: { value: string }) => object.value === value; 5 | 6 | // FIXME: All the defaults and validations down here should be defined somewhere else 7 | // as they are also the defaults that are gonna be applied to the query. 8 | // In the previous version, the same method was taking care of describing the settings and setting defaults. 9 | export const useDescription = (metric: MetricAggregation): string => { 10 | switch (metric.type) { 11 | case 'cardinality': { 12 | const precisionThreshold = metric.settings?.precision_threshold || ''; 13 | return `Precision threshold: ${precisionThreshold}`; 14 | } 15 | 16 | case 'percentiles': 17 | if (metric.settings?.percents && metric.settings?.percents?.length >= 1) { 18 | return `Values: ${metric.settings?.percents}`; 19 | } 20 | 21 | return 'Percents: Default'; 22 | 23 | case 'extended_stats': { 24 | const selectedStats = Object.entries(metric.meta || {}) 25 | .map(([key, value]) => value && extendedStats.find(hasValue(key))?.label) 26 | .filter(Boolean); 27 | 28 | return `Stats: ${selectedStats.length > 0 ? selectedStats.join(', ') : 'None selected'}`; 29 | } 30 | 31 | case 'raw_document': 32 | case 'raw_data': { 33 | const size = metric.settings?.size || 500; 34 | let description = `Size: ${size}`; 35 | const order = metric.settings?.order || 'desc'; 36 | description += `, Order: ${order}`; 37 | return description; 38 | } 39 | 40 | default: 41 | return 'Options'; 42 | } 43 | }; 44 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/SpecialMetricAggregationsRow.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | import { InlineFieldRow, InlineLabel, InlineSegmentGroup } from '@grafana/ui'; 4 | 5 | import { SettingsEditor } from './SettingsEditor'; 6 | import { MetricAggregation } from './aggregations'; 7 | 8 | type Props = { 9 | name: string; 10 | metric: MetricAggregation; 11 | info?: string; 12 | }; 13 | 14 | export const SpecialMetricAggregationsRow = ({ name, metric, info }: Props) => { 15 | // this widget is only used in scenarios when there is only a single 16 | // metric, so the array of "previousMetrics" (meaning all the metrics 17 | // before the current metric), is an empty-array 18 | const previousMetrics: MetricAggregation[] = []; 19 | 20 | return ( 21 | 22 | 23 | 24 | {name} 25 | 26 | 27 | 28 | {info != null && ( 29 | 30 | {info} 31 | 32 | )} 33 | 34 | ); 35 | }; 36 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { MetricEditor } from './MetricEditor'; 3 | import { useDispatch } from '../../../hooks/useStatelessReducer'; 4 | import { metricAggregationConfig } from './utils'; 5 | import { addMetric, removeMetric, toggleMetricVisibility } from './state/actions'; 6 | import { MetricAggregation } from './aggregations'; 7 | import { useQuery } from '../OpenSearchQueryContext'; 8 | import { QueryEditorRow } from '../QueryEditorRow'; 9 | import { IconButton } from '../../IconButton'; 10 | import { SpecialMetricAggregationsRow } from './SpecialMetricAggregationsRow'; 11 | 12 | interface Props { 13 | nextId: MetricAggregation['id']; 14 | } 15 | 16 | export const MetricAggregationsEditor = ({ nextId }: Props) => { 17 | const dispatch = useDispatch(); 18 | const { metrics } = useQuery(); 19 | const totalMetrics = metrics?.length || 0; 20 | 21 | return ( 22 | <> 23 | {metrics?.map((metric, index) => { 24 | switch (metric.type) { 25 | case 'logs': 26 | return ; 27 | case 'raw_data': 28 | return ; 29 | case 'raw_document': 30 | return ( 31 | 32 | ); 33 | default: 34 | return ( 35 | 49 | ); 50 | } 51 | })} 52 | 53 | ); 54 | }; 55 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/state/actions.ts: -------------------------------------------------------------------------------- 1 | import { createAction } from '@reduxjs/toolkit'; 2 | import { 3 | ExtendedStatMetaType, 4 | MetricAggregation, 5 | MetricAggregationWithMeta, 6 | MetricAggregationWithSettings, 7 | } from '../aggregations'; 8 | import { 9 | ADD_METRIC, 10 | CHANGE_METRIC_FIELD, 11 | CHANGE_METRIC_TYPE, 12 | REMOVE_METRIC, 13 | TOGGLE_METRIC_VISIBILITY, 14 | CHANGE_METRIC_SETTING, 15 | CHANGE_METRIC_META, 16 | CHANGE_METRIC_ATTRIBUTE, 17 | } from './types'; 18 | 19 | export const addMetric = createAction(ADD_METRIC); 20 | 21 | export const removeMetric = createAction(REMOVE_METRIC); 22 | 23 | export const changeMetricType = createAction<{ id: MetricAggregation['id']; type: MetricAggregation['type'] }>( 24 | CHANGE_METRIC_TYPE 25 | ); 26 | 27 | export const changeMetricField = createAction<{ id: MetricAggregation['id']; field: string }>(CHANGE_METRIC_FIELD); 28 | 29 | export const toggleMetricVisibility = createAction(TOGGLE_METRIC_VISIBILITY); 30 | 31 | export const changeMetricAttribute = createAction<{ 32 | metric: MetricAggregation; 33 | attribute: string; 34 | newValue: unknown; 35 | }>(CHANGE_METRIC_ATTRIBUTE); 36 | 37 | export const changeMetricSetting = createAction<{ 38 | metric: MetricAggregationWithSettings; 39 | settingName: string; 40 | newValue: unknown; 41 | }>(CHANGE_METRIC_SETTING); 42 | 43 | export const changeMetricMeta = createAction<{ 44 | metric: MetricAggregationWithMeta; 45 | meta: ExtendedStatMetaType; 46 | newValue: string | number | boolean; 47 | }>(CHANGE_METRIC_META); 48 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/state/types.ts: -------------------------------------------------------------------------------- 1 | export const ADD_METRIC = '@metrics/add'; 2 | export const REMOVE_METRIC = '@metrics/remove'; 3 | export const CHANGE_METRIC_TYPE = '@metrics/change_type'; 4 | export const CHANGE_METRIC_FIELD = '@metrics/change_field'; 5 | export const CHANGE_METRIC_SETTING = '@metrics/change_setting'; 6 | export const CHANGE_METRIC_META = '@metrics/change_meta'; 7 | export const CHANGE_METRIC_ATTRIBUTE = '@metrics/change_attr'; 8 | export const TOGGLE_METRIC_VISIBILITY = '@metrics/toggle_visibility'; 9 | -------------------------------------------------------------------------------- /src/components/QueryEditor/MetricAggregationsEditor/styles.ts: -------------------------------------------------------------------------------- 1 | import { GrafanaTheme } from '@grafana/data'; 2 | import { stylesFactory } from '@grafana/ui'; 3 | import { css } from '@emotion/css'; 4 | 5 | export const getStyles = stylesFactory((theme: GrafanaTheme, hidden: boolean) => ({ 6 | color: 7 | hidden && 8 | css` 9 | &, 10 | &:hover, 11 | label, 12 | a { 13 | color: ${hidden ? theme.colors.textFaint : theme.colors.text}; 14 | } 15 | `, 16 | })); 17 | -------------------------------------------------------------------------------- /src/components/QueryEditor/OpenSearchQueryContext.test.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { render, renderHook } from '@testing-library/react'; 3 | import { OpenSearchProvider, useDatasource, useQuery } from './OpenSearchQueryContext'; 4 | import { OpenSearchQuery } from '../../types'; 5 | import { OpenSearchDatasource } from '../../opensearchDatasource'; 6 | 7 | const query: OpenSearchQuery = { 8 | refId: 'A', 9 | query: '', 10 | metrics: [{ id: '1', type: 'count' }], 11 | bucketAggs: [{ type: 'date_histogram', id: '2' }], 12 | }; 13 | 14 | describe('OpenSearchQueryContext', () => { 15 | it('Should call onChange with the default query when the query is empty', () => { 16 | const datasource = { timeField: 'TIMEFIELD' } as OpenSearchDatasource; 17 | const onChange = jest.fn(); 18 | 19 | render(); 20 | 21 | const changedQuery: OpenSearchQuery = onChange.mock.calls[0][0]; 22 | expect(changedQuery.query).toBeDefined(); 23 | expect(changedQuery.alias).toBeDefined(); 24 | expect(changedQuery.metrics).toBeDefined(); 25 | expect(changedQuery.bucketAggs).toBeDefined(); 26 | 27 | // Should also set timeField to the configured `timeField` option in datasource configuration 28 | expect(changedQuery.timeField).toBe(datasource.timeField); 29 | }); 30 | 31 | describe('useQuery Hook', () => { 32 | it('Should throw when used outside of OpenSearchQueryContext', () => { 33 | expect(() => { 34 | renderHook(() => useQuery()); 35 | }).toThrow(); 36 | }); 37 | 38 | it('Should return the current query object', () => { 39 | const wrapper = ({ children }: { children: React.ReactNode }) => ( 40 | {}}> 41 | {children} 42 | 43 | ); 44 | 45 | const { result } = renderHook(() => useQuery(), { 46 | wrapper, 47 | }); 48 | 49 | expect(result.current).toBe(query); 50 | }); 51 | }); 52 | 53 | describe('useDatasource Hook', () => { 54 | it('Should throw when used outside of OpenSearchQueryContext', () => { 55 | expect(() => { 56 | renderHook(() => useDatasource()); 57 | }).toThrow(); 58 | }); 59 | 60 | it('Should return the current datasource instance', () => { 61 | const datasource = {} as OpenSearchDatasource; 62 | 63 | const wrapper = ({ children }: { children: React.ReactNode }) => ( 64 | {}}> 65 | {children} 66 | 67 | ); 68 | 69 | const { result } = renderHook(() => useDatasource(), { 70 | wrapper, 71 | }); 72 | 73 | expect(result.current).toBe(datasource); 74 | }); 75 | }); 76 | }); 77 | -------------------------------------------------------------------------------- /src/components/QueryEditor/OpenSearchQueryContext.tsx: -------------------------------------------------------------------------------- 1 | import React, { createContext, PropsWithChildren, useContext } from 'react'; 2 | import { OpenSearchDatasource } from '../../opensearchDatasource'; 3 | import { combineReducers, useStatelessReducer, DispatchContext } from '../../hooks/useStatelessReducer'; 4 | import { OpenSearchQuery } from '../../types'; 5 | 6 | import { reducer as metricsReducer } from './MetricAggregationsEditor/state/reducer'; 7 | import { createReducer as createBucketAggsReducer } from './BucketAggregationsEditor/state/reducer'; 8 | import { queryTypeReducer } from './QueryTypeEditor/state'; 9 | import { formatReducer } from './PPLFormatEditor/state'; 10 | import { aliasPatternReducer, queryReducer, initQuery } from './state'; 11 | import { luceneQueryTypeReducer } from './LuceneQueryEditor/state'; 12 | 13 | const DatasourceContext = createContext(undefined); 14 | const QueryContext = createContext(undefined); 15 | 16 | interface Props { 17 | query: OpenSearchQuery; 18 | onChange: (query: OpenSearchQuery) => void; 19 | datasource: OpenSearchDatasource; 20 | } 21 | 22 | export const OpenSearchProvider = ({ children, onChange, query, datasource }: PropsWithChildren) => { 23 | const reducer = combineReducers< 24 | Pick 25 | >({ 26 | query: queryReducer, 27 | queryType: queryTypeReducer, 28 | luceneQueryType: luceneQueryTypeReducer, 29 | alias: aliasPatternReducer, 30 | metrics: metricsReducer, 31 | bucketAggs: createBucketAggsReducer(datasource.timeField), 32 | format: formatReducer, 33 | }); 34 | 35 | const dispatch = useStatelessReducer( 36 | // timeField is part of the query model, but its value is always set to be the one from datasource settings. 37 | (newState) => onChange({ ...query, ...newState, timeField: datasource.timeField }), 38 | query, 39 | reducer 40 | ); 41 | 42 | // This initializes the query by dispatching an init action to each reducer. 43 | // useStatelessReducer will then call `onChange` with the newly generated query 44 | if (!query.metrics || !query.bucketAggs || query.query === undefined) { 45 | dispatch(initQuery()); 46 | 47 | return null; 48 | } 49 | 50 | return ( 51 | 52 | 53 | {children} 54 | 55 | 56 | ); 57 | }; 58 | 59 | export const useQuery = (): OpenSearchQuery => { 60 | const query = useContext(QueryContext); 61 | 62 | if (!query) { 63 | throw new Error('use OpenSearchProvider first.'); 64 | } 65 | 66 | return query; 67 | }; 68 | 69 | export const useDatasource = () => { 70 | const datasource = useContext(DatasourceContext); 71 | if (!datasource) { 72 | throw new Error('use OpenSearchProvider first.'); 73 | } 74 | 75 | return datasource; 76 | }; 77 | -------------------------------------------------------------------------------- /src/components/QueryEditor/PPLFormatEditor/HelpMessage.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | export const HelpMessage = () => ( 4 |
5 |
6 |
Table
7 |
    8 |
  • return any set of columns
  • 9 |
10 |
11 |
Logs
12 |
    13 |
  • return any set of columns
  • 14 |
15 |
16 |
Time series
17 |
    18 |
  • return column as date, datetime, or timestamp
  • 19 |
  • return column with numeric datatype as values
  • 20 |
21 |
22 | Example PPL query for time series: 23 |
24 | source=<index> | eval dateValue=timestamp(timestamp) | stats count(response) by dateValue 25 |
26 |
27 | ); 28 | -------------------------------------------------------------------------------- /src/components/QueryEditor/PPLFormatEditor/OpenCloseButton.test.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { OpenCloseButton } from './OpenCloseButton'; 3 | import { render, screen } from '@testing-library/react'; 4 | import userEvent from '@testing-library/user-event'; 5 | 6 | const onClickMock = jest.fn(); 7 | 8 | describe('OpenCloseButton', () => { 9 | it('should render correctly', () => { 10 | render(); 11 | }); 12 | 13 | it('should call onClick when button is clicked', async () => { 14 | render(); 15 | await userEvent.click(screen.getByTestId('open-close-button')); 16 | expect(onClickMock).toHaveBeenCalled(); 17 | }); 18 | }); 19 | -------------------------------------------------------------------------------- /src/components/QueryEditor/PPLFormatEditor/OpenCloseButton.tsx: -------------------------------------------------------------------------------- 1 | import { GrafanaTheme } from '@grafana/data'; 2 | import { Icon, stylesFactory, useTheme } from '@grafana/ui'; 3 | import { css, cx } from '@emotion/css'; 4 | import React from 'react'; 5 | import { segmentStyles } from '../styles'; 6 | 7 | const getStyles = stylesFactory((theme: GrafanaTheme) => { 8 | return { 9 | icon: css` 10 | margin-right: ${theme.spacing.xs}; 11 | `, 12 | button: css` 13 | justify-content: start; 14 | `, 15 | }; 16 | }); 17 | 18 | interface Props { 19 | label: string; 20 | open: boolean; 21 | onClick: () => void; 22 | } 23 | 24 | export const OpenCloseButton = ({ label, open, onClick }: Props) => { 25 | const styles = getStyles(useTheme()); 26 | 27 | return ( 28 | 32 | ); 33 | }; 34 | -------------------------------------------------------------------------------- /src/components/QueryEditor/PPLFormatEditor/SettingsEditor.test.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { SettingsEditor } from './SettingsEditor'; 3 | import { CHANGE_FORMAT } from './state'; 4 | import { render, screen } from '@testing-library/react'; 5 | import userEvent from '@testing-library/user-event'; 6 | import selectEvent from 'react-select-event'; 7 | 8 | const mockDispatch = jest.fn(); 9 | 10 | jest.mock('../../../hooks/useStatelessReducer', () => ({ 11 | useDispatch: jest.fn(() => mockDispatch), 12 | })); 13 | 14 | describe('SettingsEditor', () => { 15 | it('should render correctly', () => { 16 | render(); 17 | }); 18 | it('should dispatch action on change event', async () => { 19 | const expectedAction = { 20 | type: CHANGE_FORMAT, 21 | payload: 'time_series', 22 | }; 23 | render(); 24 | await userEvent.click(screen.getByText('Table')); 25 | const select = screen.getByTestId('settings-editor-wrapper'); 26 | await selectEvent.select(select, 'Time series', { container: document.body }); 27 | expect(mockDispatch).toHaveBeenCalledWith(expectedAction); 28 | }); 29 | }); 30 | -------------------------------------------------------------------------------- /src/components/QueryEditor/PPLFormatEditor/SettingsEditor.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { SelectableValue } from '@grafana/data'; 3 | import { Segment, InlineSegmentGroup } from '@grafana/ui'; 4 | import { useDispatch } from '../../../hooks/useStatelessReducer'; 5 | import { changeFormat } from './state'; 6 | import { formatConfig } from './utils'; 7 | import { PPLFormatType } from './formats'; 8 | import { segmentStyles } from '../styles'; 9 | 10 | const queryTypeOptions: Array> = Object.entries(formatConfig).map( 11 | ([key, { label }]) => ({ 12 | label, 13 | value: key as PPLFormatType, 14 | }) 15 | ); 16 | 17 | const toOption = (format: PPLFormatType) => ({ 18 | label: formatConfig[format].label, 19 | value: format, 20 | }); 21 | 22 | interface Props { 23 | value: PPLFormatType; 24 | } 25 | 26 | export const SettingsEditor = ({ value }: Props) => { 27 | const dispatch = useDispatch(); 28 | 29 | return ( 30 | 31 | dispatch(changeFormat(e.value!))} 35 | value={toOption(value)} 36 | /> 37 | 38 | ); 39 | }; 40 | -------------------------------------------------------------------------------- /src/components/QueryEditor/PPLFormatEditor/formats.ts: -------------------------------------------------------------------------------- 1 | export type PPLFormatType = 'table' | 'logs' | 'time_series'; 2 | -------------------------------------------------------------------------------- /src/components/QueryEditor/PPLFormatEditor/index.test.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { PPLFormatEditor } from './'; 3 | import { render, screen } from '@testing-library/react'; 4 | import userEvent from '@testing-library/user-event'; 5 | import { OpenSearchQuery, QueryType } from 'types'; 6 | import { OpenSearchDatasource } from 'opensearchDatasource'; 7 | import { OpenSearchProvider } from '../OpenSearchQueryContext'; 8 | 9 | const pplLogsQuery: OpenSearchQuery = { 10 | refId: 'A', 11 | queryType: QueryType.PPL, 12 | format: 'time_series', 13 | query: 'source = test-index', 14 | bucketAggs: [{ type: 'date_histogram', id: '2' }], 15 | metrics: [{ id: '1', type: 'count' }], 16 | }; 17 | 18 | const setup = () => { 19 | render( 20 | 25 | 26 | 27 | ); 28 | }; 29 | describe('PPLFormatEditor', () => { 30 | it('should render correctly', () => { 31 | setup(); 32 | }); 33 | it('should render all components of PPL format editor row', async () => { 34 | setup(); 35 | expect(await screen.findByText('Format')).toBeInTheDocument(); 36 | expect(screen.getByText('Time series')).toBeInTheDocument(); 37 | expect(screen.getByText('Show help')).toBeInTheDocument(); 38 | expect(screen.queryByTestId('help-message')).not.toBeInTheDocument(); 39 | }); 40 | it('should show help message on click', async () => { 41 | setup(); 42 | const button = await screen.findByText('Show help'); 43 | await userEvent.click(button); 44 | expect(screen.getByTestId('help-message')).toBeInTheDocument(); 45 | }); 46 | }); 47 | -------------------------------------------------------------------------------- /src/components/QueryEditor/PPLFormatEditor/index.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { defaultPPLFormat } from '../../../query_def'; 3 | import { useQuery } from '../OpenSearchQueryContext'; 4 | import { QueryEditorRow } from '../QueryEditorRow'; 5 | import { SettingsEditor } from './SettingsEditor'; 6 | import { OpenCloseButton } from './OpenCloseButton'; 7 | import { HelpMessage } from './HelpMessage'; 8 | 9 | export const PPLFormatEditor = () => { 10 | const { format } = useQuery(); 11 | 12 | const [displayHelp, setDisplayHelp] = useState(false); 13 | 14 | return ( 15 | <> 16 | 17 | 18 | setDisplayHelp(!displayHelp)} /> 19 | 20 | {displayHelp && } 21 | 22 | ); 23 | }; 24 | -------------------------------------------------------------------------------- /src/components/QueryEditor/PPLFormatEditor/state.test.ts: -------------------------------------------------------------------------------- 1 | import { reducerTester } from '../../../reducerTester'; 2 | import { OpenSearchQuery } from '../../../types'; 3 | import { changeFormat, formatReducer } from './state'; 4 | 5 | describe('Query Type Reducer', () => { 6 | it('Should correctly set `format`', () => { 7 | const expectedFormat: OpenSearchQuery['format'] = 'time_series'; 8 | 9 | reducerTester() 10 | .givenReducer(formatReducer, 'table') 11 | .whenActionIsDispatched(changeFormat(expectedFormat)) 12 | .thenStateShouldEqual(expectedFormat); 13 | }); 14 | 15 | it('Should not change state with other action types', () => { 16 | const initialState: OpenSearchQuery['format'] = 'time_series'; 17 | 18 | reducerTester() 19 | .givenReducer(formatReducer, initialState) 20 | .whenActionIsDispatched({ type: 'THIS ACTION SHOULD NOT HAVE ANY EFFECT IN THIS REDUCER' }) 21 | .thenStateShouldEqual(initialState); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /src/components/QueryEditor/PPLFormatEditor/state.ts: -------------------------------------------------------------------------------- 1 | import { type Action, createAction } from '@reduxjs/toolkit'; 2 | import { OpenSearchQuery } from 'types'; 3 | import { initQuery } from '../state'; 4 | 5 | export const CHANGE_FORMAT = 'change_format'; 6 | 7 | export const changeFormat = createAction(CHANGE_FORMAT); 8 | 9 | export const formatReducer = (prevFormat: OpenSearchQuery['format'], action: Action) => { 10 | if (changeFormat.match(action)) { 11 | return action.payload; 12 | } 13 | if (initQuery.match(action)) { 14 | return 'table'; 15 | } 16 | 17 | return prevFormat; 18 | }; 19 | -------------------------------------------------------------------------------- /src/components/QueryEditor/PPLFormatEditor/utils.ts: -------------------------------------------------------------------------------- 1 | import { FormatConfiguration } from '../../../types'; 2 | 3 | export const formatConfig: FormatConfiguration = { 4 | table: { label: 'Table' }, 5 | logs: { label: 'Logs' }, 6 | time_series: { label: 'Time series' }, 7 | }; 8 | -------------------------------------------------------------------------------- /src/components/QueryEditor/QueryEditorRow.tsx: -------------------------------------------------------------------------------- 1 | import { GrafanaTheme } from '@grafana/data'; 2 | import { IconButton, InlineFieldRow, InlineLabel, stylesFactory, useTheme, InlineSegmentGroup } from '@grafana/ui'; 3 | import { css } from '@emotion/css'; 4 | import { noop } from 'lodash'; 5 | import React, { PropsWithChildren } from 'react'; 6 | 7 | interface Props { 8 | label: string; 9 | onRemoveClick?: false | (() => void); 10 | onHideClick?: false | (() => void); 11 | hidden?: boolean; 12 | disableRemove?: boolean; 13 | } 14 | 15 | export const QueryEditorRow = ({ 16 | children, 17 | label, 18 | onRemoveClick, 19 | onHideClick, 20 | hidden = false, 21 | disableRemove = false, 22 | }: PropsWithChildren) => { 23 | const theme = useTheme(); 24 | const styles = getStyles(theme); 25 | 26 | return ( 27 | 28 | 29 | 30 | {label} 31 | 32 | {onHideClick && ( 33 | 41 | )} 42 | {onRemoveClick && ( 43 | 51 | )} 52 | 53 | 54 | 55 | {children} 56 | 57 | ); 58 | }; 59 | 60 | const getStyles = stylesFactory((theme: GrafanaTheme) => { 61 | return { 62 | iconWrapper: css` 63 | display: flex; 64 | `, 65 | icon: css` 66 | color: ${theme.colors.textWeak}; 67 | margin-left: ${theme.spacing.xxs}; 68 | `, 69 | }; 70 | }); 71 | -------------------------------------------------------------------------------- /src/components/QueryEditor/QueryTypeEditor/index.test.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { QueryTypeEditor } from './'; 3 | import { QueryType } from '../../../types'; 4 | import { CHANGE_QUERY_TYPE } from './state'; 5 | import { render, screen } from '@testing-library/react'; 6 | import userEvent from '@testing-library/user-event'; 7 | import selectEvent from 'react-select-event'; 8 | 9 | const mockDatasource = { 10 | getSupportedQueryTypes: () => [QueryType.Lucene, QueryType.PPL], 11 | }; 12 | 13 | jest.mock('../OpenSearchQueryContext', () => ({ 14 | useDatasource: jest.fn(() => mockDatasource), 15 | })); 16 | 17 | const mockDispatch = jest.fn(); 18 | 19 | jest.mock('../../../hooks/useStatelessReducer', () => ({ 20 | useDispatch: jest.fn(() => mockDispatch), 21 | })); 22 | 23 | describe('QueryTypeEditor', () => { 24 | it('should render correctly', () => { 25 | render(); 26 | }); 27 | 28 | it('should dispatch action on change event', async () => { 29 | const expectedAction = { 30 | type: CHANGE_QUERY_TYPE, 31 | payload: QueryType.Lucene, 32 | }; 33 | render(); 34 | await userEvent.click(screen.getByText('PPL')); 35 | const select = screen.getByTestId('query-type-select'); 36 | await selectEvent.select(select, 'Lucene', { container: document.body }); 37 | expect(mockDispatch).toHaveBeenCalledWith(expectedAction); 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /src/components/QueryEditor/QueryTypeEditor/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Segment } from '@grafana/ui'; 3 | import { useDatasource } from '../OpenSearchQueryContext'; 4 | import { useDispatch } from '../../../hooks/useStatelessReducer'; 5 | import { changeQueryType } from './state'; 6 | import { queryTypeConfig, getQueryTypeOptions } from './utils'; 7 | import { segmentStyles } from '../styles'; 8 | import { QueryType } from '../../../types'; 9 | 10 | const isValidQueryType = (t?: string): t is QueryType => { 11 | return Object.values(QueryType).includes(t as QueryType); 12 | }; 13 | const toOption = (queryType: QueryType) => { 14 | const config = queryTypeConfig[queryType]; 15 | if (!config) { 16 | return { label: 'Invalid query type', value: '' }; 17 | } 18 | return { 19 | label: config.label, 20 | value: queryType, 21 | }; 22 | }; 23 | 24 | interface Props { 25 | value: QueryType; 26 | } 27 | 28 | export const QueryTypeEditor = ({ value }: Props) => { 29 | const datasource = useDatasource(); 30 | const dispatch = useDispatch(); 31 | 32 | return ( 33 | { 38 | isValidQueryType(e.value) && dispatch(changeQueryType(e.value!)); 39 | }} 40 | value={toOption(value)} 41 | /> 42 | ); 43 | }; 44 | -------------------------------------------------------------------------------- /src/components/QueryEditor/QueryTypeEditor/state.test.ts: -------------------------------------------------------------------------------- 1 | import { reducerTester } from '../../../reducerTester'; 2 | import { OpenSearchQuery, QueryType } from '../../../types'; 3 | import { changeQueryType, queryTypeReducer } from './state'; 4 | 5 | describe('Query Type Reducer', () => { 6 | it('Should correctly set `queryType`', () => { 7 | const expectedQueryType: OpenSearchQuery['queryType'] = QueryType.PPL; 8 | 9 | reducerTester() 10 | .givenReducer(queryTypeReducer, QueryType.Lucene) 11 | .whenActionIsDispatched(changeQueryType(expectedQueryType)) 12 | .thenStateShouldEqual(expectedQueryType); 13 | }); 14 | 15 | it('Should not change state with other action types', () => { 16 | const initialState: OpenSearchQuery['queryType'] = QueryType.Lucene; 17 | 18 | reducerTester() 19 | .givenReducer(queryTypeReducer, initialState) 20 | .whenActionIsDispatched({ type: 'THIS ACTION SHOULD NOT HAVE ANY EFFECT IN THIS REDUCER' }) 21 | .thenStateShouldEqual(initialState); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /src/components/QueryEditor/QueryTypeEditor/state.ts: -------------------------------------------------------------------------------- 1 | import { type Action, createAction } from '@reduxjs/toolkit'; 2 | import { OpenSearchQuery, QueryType } from '../../../types'; 3 | import { initQuery } from '../state'; 4 | 5 | export const CHANGE_QUERY_TYPE = 'change_query_type'; 6 | 7 | export const changeQueryType = createAction(CHANGE_QUERY_TYPE); 8 | 9 | export const queryTypeReducer = (prevQueryType: OpenSearchQuery['queryType'], action: Action) => { 10 | if (changeQueryType.match(action)) { 11 | return action.payload; 12 | } 13 | if (initQuery.match(action)) { 14 | return QueryType.Lucene; 15 | } 16 | 17 | return prevQueryType; 18 | }; 19 | -------------------------------------------------------------------------------- /src/components/QueryEditor/QueryTypeEditor/utils.test.ts: -------------------------------------------------------------------------------- 1 | import { getQueryTypeOptions } from './utils'; 2 | import { QueryType } from '../../../types'; 3 | 4 | describe('getQueryTypeOptions', () => { 5 | describe('given no supported types', () => { 6 | const queryTypeOptions = getQueryTypeOptions([]); 7 | it('should return no query type options', () => { 8 | expect(queryTypeOptions.length).toBe(0); 9 | }); 10 | }); 11 | 12 | describe('given Lucene as a supported type', () => { 13 | const queryTypeOptions = getQueryTypeOptions([QueryType.Lucene]); 14 | it('should return Lucene query type option', () => { 15 | expect(queryTypeOptions.length).toBe(1); 16 | expect(queryTypeOptions[0].value).toBe(QueryType.Lucene); 17 | }); 18 | }); 19 | }); 20 | -------------------------------------------------------------------------------- /src/components/QueryEditor/QueryTypeEditor/utils.ts: -------------------------------------------------------------------------------- 1 | import { SelectableValue } from '@grafana/data'; 2 | import { QueryType, QueryTypeConfiguration } from '../../../types'; 3 | 4 | export const queryTypeConfig: QueryTypeConfiguration = { 5 | [QueryType.Lucene]: { label: 'Lucene' }, 6 | [QueryType.PPL]: { label: 'PPL' }, 7 | }; 8 | 9 | export const getQueryTypeOptions = (supportedTypes: QueryType[]): Array> => { 10 | return Object.entries(queryTypeConfig) 11 | .filter(([queryType, _]) => supportedTypes.includes(queryType as QueryType)) 12 | .map(([key, { label }]) => ({ 13 | label, 14 | value: key as QueryType, 15 | })); 16 | }; 17 | -------------------------------------------------------------------------------- /src/components/QueryEditor/SettingsEditorContainer.tsx: -------------------------------------------------------------------------------- 1 | import { GrafanaTheme } from '@grafana/data'; 2 | import { Icon, stylesFactory, useTheme, InlineSegmentGroup } from '@grafana/ui'; 3 | import { css, cx } from '@emotion/css'; 4 | import React, { ReactNode, useState } from 'react'; 5 | import { segmentStyles } from './styles'; 6 | 7 | const getStyles = stylesFactory((theme: GrafanaTheme, hidden: boolean) => { 8 | return { 9 | wrapper: css` 10 | display: flex; 11 | flex-direction: column; 12 | `, 13 | settingsWrapper: css` 14 | padding-top: ${theme.spacing.xs}; 15 | `, 16 | icon: css` 17 | margin-right: ${theme.spacing.xs}; 18 | `, 19 | button: css` 20 | justify-content: start; 21 | ${hidden && 22 | css` 23 | color: ${theme.colors.textFaint}; 24 | `} 25 | `, 26 | }; 27 | }); 28 | interface Props { 29 | label: string; 30 | hidden?: boolean; 31 | children: ReactNode; 32 | } 33 | 34 | export const SettingsEditorContainer = ({ label, children, hidden = false }: Props) => { 35 | const [open, setOpen] = useState(false); 36 | 37 | const styles = getStyles(useTheme(), hidden); 38 | 39 | return ( 40 | 41 |
42 | 51 | 52 | {open &&
{children}
} 53 |
54 |
55 | ); 56 | }; 57 | -------------------------------------------------------------------------------- /src/components/QueryEditor/state.test.ts: -------------------------------------------------------------------------------- 1 | import { reducerTester } from '../../reducerTester'; 2 | import { OpenSearchQuery } from '../../types'; 3 | import { aliasPatternReducer, changeAliasPattern, changeQuery, initQuery, queryReducer } from './state'; 4 | 5 | describe('Query Reducer', () => { 6 | describe('On Init', () => { 7 | it('Should maintain the previous `query` if present', () => { 8 | const initialQuery: OpenSearchQuery['query'] = 'Some lucene query'; 9 | 10 | reducerTester() 11 | .givenReducer(queryReducer, initialQuery) 12 | .whenActionIsDispatched(initQuery()) 13 | .thenStateShouldEqual(initialQuery); 14 | }); 15 | 16 | it('Should set an empty `query` if it is not already set', () => { 17 | const initialQuery: OpenSearchQuery['query'] = undefined; 18 | const expectedQuery = ''; 19 | 20 | reducerTester() 21 | .givenReducer(queryReducer, initialQuery) 22 | .whenActionIsDispatched(initQuery()) 23 | .thenStateShouldEqual(expectedQuery); 24 | }); 25 | }); 26 | it('Should correctly set `query`', () => { 27 | const expectedQuery: OpenSearchQuery['query'] = 'Some lucene query'; 28 | 29 | reducerTester() 30 | .givenReducer(queryReducer, '') 31 | .whenActionIsDispatched(changeQuery(expectedQuery)) 32 | .thenStateShouldEqual(expectedQuery); 33 | }); 34 | 35 | it('Should not change state with other action types', () => { 36 | const initialState: OpenSearchQuery['query'] = 'Some lucene query'; 37 | 38 | reducerTester() 39 | .givenReducer(queryReducer, initialState) 40 | .whenActionIsDispatched({ type: 'THIS ACTION SHOULD NOT HAVE ANY EFFECT IN THIS REDUCER' }) 41 | .thenStateShouldEqual(initialState); 42 | }); 43 | }); 44 | 45 | describe('Alias Pattern Reducer', () => { 46 | it('Should correctly set `alias`', () => { 47 | const expectedAlias: OpenSearchQuery['alias'] = 'Some alias pattern'; 48 | 49 | reducerTester() 50 | .givenReducer(aliasPatternReducer, '') 51 | .whenActionIsDispatched(changeAliasPattern(expectedAlias)) 52 | .thenStateShouldEqual(expectedAlias); 53 | }); 54 | 55 | it('Should not change state with other action types', () => { 56 | const initialState: OpenSearchQuery['alias'] = 'Some alias pattern'; 57 | 58 | reducerTester() 59 | .givenReducer(aliasPatternReducer, initialState) 60 | .whenActionIsDispatched({ type: 'THIS ACTION SHOULD NOT HAVE ANY EFFECT IN THIS REDUCER' }) 61 | .thenStateShouldEqual(initialState); 62 | }); 63 | }); 64 | -------------------------------------------------------------------------------- /src/components/QueryEditor/state.ts: -------------------------------------------------------------------------------- 1 | import { type Action, createAction } from '@reduxjs/toolkit'; 2 | import { OpenSearchQuery } from 'types'; 3 | 4 | export const INIT = 'init'; 5 | const CHANGE_QUERY = 'change_query'; 6 | const CHANGE_ALIAS_PATTERN = 'change_alias_pattern'; 7 | 8 | export interface InitAction extends Action {} 9 | 10 | /** 11 | * When the `initQuery` Action is dispatched, the query gets populated with default values where values are not present. 12 | * This means it won't override any existing value in place, but just ensure the query is in a "runnable" state. 13 | */ 14 | export const initQuery = createAction(INIT); 15 | 16 | export const changeQuery = createAction(CHANGE_QUERY); 17 | 18 | export const changeAliasPattern = createAction(CHANGE_ALIAS_PATTERN); 19 | 20 | export const queryReducer = (prevQuery: OpenSearchQuery['query'], action: Action) => { 21 | if (changeQuery.match(action)) { 22 | return action.payload; 23 | } 24 | if (initQuery.match(action)) { 25 | return prevQuery || ''; 26 | } 27 | 28 | return prevQuery; 29 | }; 30 | 31 | export const aliasPatternReducer = (prevAliasPattern: OpenSearchQuery['alias'], action: Action) => { 32 | if (changeAliasPattern.match(action)) { 33 | return action.payload; 34 | } 35 | if (initQuery.match(action)) { 36 | return prevAliasPattern || ''; 37 | } 38 | 39 | return prevAliasPattern; 40 | }; 41 | -------------------------------------------------------------------------------- /src/components/QueryEditor/styles.ts: -------------------------------------------------------------------------------- 1 | import { css } from '@emotion/css'; 2 | 3 | export const segmentStyles = css` 4 | min-width: 150px; 5 | `; 6 | -------------------------------------------------------------------------------- /src/components/types.ts: -------------------------------------------------------------------------------- 1 | export type SettingKeyOf }> = Extract< 2 | keyof NonNullable, 3 | string 4 | >; 5 | -------------------------------------------------------------------------------- /src/configuration/ConfigEditor.test.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { ConfigEditor } from './ConfigEditor'; 3 | import { createDefaultConfigOptions } from '__mocks__/DefaultConfigOptions'; 4 | import { render, screen } from '@testing-library/react'; 5 | 6 | describe('ConfigEditor', () => { 7 | it('should render without error', () => { 8 | render( {}} options={createDefaultConfigOptions()} />); 9 | }); 10 | 11 | it('should render all parts of the config', () => { 12 | render( {}} options={createDefaultConfigOptions()} />); 13 | expect(screen.getByText('HTTP')).toBeInTheDocument(); 14 | expect(screen.getByText('OpenSearch details')).toBeInTheDocument(); 15 | expect(screen.getByText('Logs')).toBeInTheDocument(); 16 | }); 17 | 18 | it('should set defaults', () => { 19 | const options = createDefaultConfigOptions(); 20 | 21 | // @ts-ignore 22 | delete options.jsonData.flavor; 23 | // @ts-ignore 24 | delete options.jsonData.version; 25 | // @ts-ignore 26 | delete options.jsonData.timeField; 27 | delete options.jsonData.maxConcurrentShardRequests; 28 | delete options.jsonData.pplEnabled; 29 | 30 | render( 31 | { 33 | expect(options.jsonData.flavor).toBe(undefined); 34 | expect(options.jsonData.version).toBe(''); 35 | expect(options.jsonData.timeField).toBe('@timestamp'); 36 | expect(options.jsonData.maxConcurrentShardRequests).toBe(0); 37 | expect(options.jsonData.pplEnabled).toBe(true); 38 | }} 39 | options={options} 40 | /> 41 | ); 42 | expect.assertions(5); 43 | }); 44 | 45 | it('should set serverless defaults', () => { 46 | const options = createDefaultConfigOptions(); 47 | 48 | // @ts-ignore 49 | delete options.jsonData.flavor; 50 | // @ts-ignore 51 | delete options.jsonData.version; 52 | // @ts-ignore 53 | delete options.jsonData.timeField; 54 | delete options.jsonData.maxConcurrentShardRequests; 55 | delete options.jsonData.pplEnabled; 56 | options.jsonData.serverless = true; 57 | 58 | render( 59 | { 61 | expect(options.jsonData.flavor).toBe('opensearch'); 62 | expect(options.jsonData.version).toBe('1.0.0'); 63 | expect(options.jsonData.timeField).toBe('@timestamp'); 64 | expect(options.jsonData.maxConcurrentShardRequests).toBe(5); 65 | expect(options.jsonData.pplEnabled).toBe(true); 66 | }} 67 | options={options} 68 | /> 69 | ); 70 | expect.assertions(5); 71 | }); 72 | 73 | it('should not apply default if values are set', () => { 74 | const onChange = jest.fn(); 75 | 76 | render(); 77 | 78 | expect(onChange).toHaveBeenCalledTimes(0); 79 | }); 80 | }); 81 | -------------------------------------------------------------------------------- /src/configuration/DataLinks.test.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { render, screen } from '@testing-library/react'; 3 | import userEvent from '@testing-library/user-event'; 4 | import { DataLinks } from './DataLinks'; 5 | 6 | describe('DataLinks', () => { 7 | let originalGetSelection: typeof window.getSelection; 8 | beforeAll(() => { 9 | originalGetSelection = window.getSelection; 10 | window.getSelection = () => null; 11 | }); 12 | 13 | afterAll(() => { 14 | window.getSelection = originalGetSelection; 15 | }); 16 | 17 | it('renders correctly when no fields', async () => { 18 | render( {}} />); 19 | expect(screen.getByTestId('button-add')).toBeInTheDocument(); 20 | expect(screen.queryByText('Field')).not.toBeInTheDocument(); 21 | }); 22 | 23 | it('renders correctly when there are fields', async () => { 24 | render( {}} />); 25 | await userEvent.click(screen.getByTestId('button-add')); 26 | expect(screen.getByTestId('button-add')).toBeInTheDocument(); 27 | expect(screen.getAllByText('Field').length).toBe(2); 28 | }); 29 | 30 | it('adds new field', async () => { 31 | const onChangeMock = jest.fn(); 32 | render(); 33 | await userEvent.click(screen.getByTestId('button-add')); 34 | expect(onChangeMock.mock.calls[0][0].length).toBe(1); 35 | }); 36 | 37 | it('removes field', async () => { 38 | const onChangeMock = jest.fn(); 39 | render(); 40 | await userEvent.click(screen.getByTestId('remove-button-regex1')); 41 | const newValue = onChangeMock.mock.calls[0][0]; 42 | expect(newValue.length).toBe(1); 43 | expect(newValue[0]).toMatchObject({ 44 | field: 'regex2', 45 | url: 'localhost2', 46 | }); 47 | }); 48 | }); 49 | 50 | const testValue = [ 51 | { 52 | field: 'regex1', 53 | url: 'localhost1', 54 | }, 55 | { 56 | field: 'regex2', 57 | url: 'localhost2', 58 | }, 59 | ]; 60 | -------------------------------------------------------------------------------- /src/configuration/DataLinks.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { css } from '@emotion/css'; 3 | import { Button, useStyles } from '@grafana/ui'; 4 | import { GrafanaTheme, VariableOrigin, DataLinkBuiltInVars } from '@grafana/data'; 5 | import { DataLinkConfig } from '../types'; 6 | import { DataLink } from './DataLink'; 7 | 8 | const getStyles = (theme: GrafanaTheme) => ({ 9 | infoText: css` 10 | padding-bottom: ${theme.spacing.md}; 11 | color: ${theme.colors.textWeak}; 12 | `, 13 | dataLink: css` 14 | margin-bottom: ${theme.spacing.sm}; 15 | `, 16 | }); 17 | 18 | type Props = { 19 | value?: DataLinkConfig[]; 20 | onChange: (value: DataLinkConfig[]) => void; 21 | }; 22 | export const DataLinks = (props: Props) => { 23 | const { value, onChange } = props; 24 | const styles = useStyles(getStyles); 25 | 26 | return ( 27 | <> 28 |

Data links

29 | 30 |
31 | Add links to existing fields. Links will be shown in log row details next to the field value. 32 |
33 | 34 | {value && value.length > 0 && ( 35 |
36 | {value.map((field, index) => { 37 | return ( 38 | { 44 | const newDataLinks = [...value]; 45 | newDataLinks.splice(index, 1, newField); 46 | onChange(newDataLinks); 47 | }} 48 | onDelete={() => { 49 | const newDataLinks = [...value]; 50 | newDataLinks.splice(index, 1); 51 | onChange(newDataLinks); 52 | }} 53 | suggestions={[ 54 | { 55 | value: DataLinkBuiltInVars.valueRaw, 56 | label: 'Raw value', 57 | documentation: 'Raw value of the field', 58 | origin: VariableOrigin.Value, 59 | }, 60 | ]} 61 | /> 62 | ); 63 | })} 64 |
65 | )} 66 | 67 | 82 | 83 | ); 84 | }; 85 | -------------------------------------------------------------------------------- /src/configuration/LogsConfig.test.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { fireEvent, render, screen } from '@testing-library/react'; 3 | import { LogsConfig } from './LogsConfig'; 4 | import { createDefaultConfigOptions } from '__mocks__/DefaultConfigOptions'; 5 | 6 | describe('OpenSearchDetails', () => { 7 | it('should render without error', () => { 8 | render( {}} value={createDefaultConfigOptions().jsonData} />); 9 | }); 10 | 11 | it('should render fields', () => { 12 | render( {}} value={createDefaultConfigOptions().jsonData} />); 13 | expect(screen.getByTestId('log-message-input')).toBeInTheDocument(); 14 | expect(screen.getByTestId('field-name-input')).toBeInTheDocument(); 15 | }); 16 | 17 | it('should pass correct data to onChange', async () => { 18 | const onChangeMock = jest.fn(); 19 | render(); 20 | await fireEvent.change(screen.getByTestId('log-message-input'), {target: {value: 'test_field'}}); 21 | expect(onChangeMock.mock.calls[0][0].logMessageField).toBe('test_field'); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /src/configuration/LogsConfig.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { LegacyForms } from '@grafana/ui'; 3 | const { FormField } = LegacyForms; 4 | import { OpenSearchOptions } from '../types'; 5 | 6 | type Props = { 7 | value: OpenSearchOptions; 8 | onChange: (value: OpenSearchOptions) => void; 9 | }; 10 | export const LogsConfig = (props: Props) => { 11 | const { value, onChange } = props; 12 | const changeHandler = (key: keyof OpenSearchOptions) => ( 13 | event: React.SyntheticEvent 14 | ) => { 15 | onChange({ 16 | ...value, 17 | [key]: event.currentTarget.value, 18 | }); 19 | }; 20 | 21 | return ( 22 | <> 23 |

Logs

24 | 25 |
26 |
27 | 35 |
36 |
37 | 44 |
45 |
46 | 47 | ); 48 | }; 49 | -------------------------------------------------------------------------------- /src/configuration/utils.test.ts: -------------------------------------------------------------------------------- 1 | import { DataSourceSettings } from '@grafana/data'; 2 | import { Flavor, OpenSearchOptions } from '../types'; 3 | import { coerceOptions } from './utils'; 4 | 5 | describe('coerceOptions', () => { 6 | it('should set version if serverless', () => { 7 | const options: DataSourceSettings = { 8 | id: 0, 9 | uid: '', 10 | orgId: 0, 11 | name: '', 12 | typeLogoUrl: '', 13 | type: '', 14 | typeName: '', 15 | access: '', 16 | url: '', 17 | user: '', 18 | database: '', 19 | basicAuth: false, 20 | basicAuthUser: '', 21 | isDefault: false, 22 | jsonData: { 23 | database: '', 24 | timeField: '', 25 | version: '', 26 | // flavor isn't optional and isn't settable to a null equivalents 27 | flavor: Flavor.OpenSearch, 28 | timeInterval: '', 29 | }, 30 | secureJsonFields: {}, 31 | readOnly: false, 32 | withCredentials: false, 33 | }; 34 | options.jsonData.serverless = true; 35 | 36 | const result = coerceOptions(options); 37 | expect(result.jsonData.flavor).toEqual(Flavor.OpenSearch); 38 | expect(result.jsonData.version).toEqual('1.0.0'); 39 | }); 40 | }); 41 | -------------------------------------------------------------------------------- /src/dependencies/PluginSignatureBadge.tsx: -------------------------------------------------------------------------------- 1 | import React, { HTMLAttributes } from 'react'; 2 | import { Badge, BadgeProps } from '@grafana/ui'; 3 | import { PluginErrorCode, PluginSignatureStatus } from '@grafana/data'; 4 | 5 | interface Props extends HTMLAttributes { 6 | status?: PluginSignatureStatus; 7 | } 8 | 9 | export const PluginSignatureBadge: React.FC = ({ status, ...otherProps }) => { 10 | const display = getSignatureDisplayModel(status); 11 | return ( 12 | 19 | ); 20 | }; 21 | 22 | export function isUnsignedPluginSignature(signature?: PluginSignatureStatus) { 23 | return signature && signature !== PluginSignatureStatus.valid && signature !== PluginSignatureStatus.internal; 24 | } 25 | 26 | export function mapPluginErrorCodeToSignatureStatus(code: PluginErrorCode) { 27 | switch (code) { 28 | case PluginErrorCode.invalidSignature: 29 | return PluginSignatureStatus.invalid; 30 | case PluginErrorCode.missingSignature: 31 | return PluginSignatureStatus.missing; 32 | case PluginErrorCode.modifiedSignature: 33 | return PluginSignatureStatus.modified; 34 | default: 35 | return PluginSignatureStatus.missing; 36 | } 37 | } 38 | 39 | function getSignatureDisplayModel(signature?: PluginSignatureStatus): BadgeProps { 40 | if (!signature) { 41 | signature = PluginSignatureStatus.invalid; 42 | } 43 | 44 | switch (signature) { 45 | case PluginSignatureStatus.internal: 46 | return { text: 'Core', icon: 'cube', color: 'blue', tooltip: 'Core plugin that is bundled with Grafana' }; 47 | case PluginSignatureStatus.valid: 48 | return { text: 'Signed', icon: 'lock', color: 'green', tooltip: 'Signed and verified plugin' }; 49 | case PluginSignatureStatus.invalid: 50 | return { 51 | text: 'Invalid signature', 52 | icon: 'exclamation-triangle', 53 | color: 'red', 54 | tooltip: 'Invalid plugin signature', 55 | }; 56 | case PluginSignatureStatus.modified: 57 | return { 58 | text: 'Modified signature', 59 | icon: 'exclamation-triangle', 60 | color: 'red', 61 | tooltip: 'Valid signature but content has been modified', 62 | }; 63 | case PluginSignatureStatus.missing: 64 | return { 65 | text: 'Missing signature', 66 | icon: 'exclamation-triangle', 67 | color: 'red', 68 | tooltip: 'Missing plugin signature', 69 | }; 70 | } 71 | 72 | return { text: 'Unsigned', icon: 'exclamation-triangle', color: 'red', tooltip: 'Unsigned external plugin' }; 73 | } 74 | 75 | PluginSignatureBadge.displayName = 'PluginSignatureBadge'; 76 | -------------------------------------------------------------------------------- /src/dependencies/flatten.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2014, Hugh Kennedy 2 | // Based on code from https://github.com/hughsk/flat/blob/master/index.js 3 | // 4 | export default function flatten(target: object, opts?: { delimiter?: any; maxDepth?: any; safe?: any }): any { 5 | opts = opts || {}; 6 | 7 | const delimiter = opts.delimiter || '.'; 8 | let maxDepth = opts.maxDepth || 3; 9 | let currentDepth = 1; 10 | const output: any = {}; 11 | 12 | function step(object: any, prev: string | null) { 13 | Object.keys(object).forEach(key => { 14 | const value = object[key]; 15 | const isSafe = opts?.safe; 16 | const isArray = Array.isArray(value); 17 | const isSafeArray = isSafe && isArray; 18 | const type = Object.prototype.toString.call(value); 19 | const isObject = type === '[object Object]'; 20 | 21 | const newKey = prev ? prev + delimiter + key : key; 22 | 23 | if (!opts?.maxDepth) { 24 | maxDepth = currentDepth + 1; 25 | } 26 | 27 | if (!isSafeArray && isObject && Object.keys(value).length && currentDepth < maxDepth) { 28 | ++currentDepth; 29 | return step(value, newKey); 30 | } 31 | 32 | if (isArray) { 33 | output[newKey] = JSON.stringify(value); 34 | } else { 35 | output[newKey] = value; 36 | } 37 | }); 38 | } 39 | 40 | step(target, null); 41 | 42 | return output; 43 | } 44 | -------------------------------------------------------------------------------- /src/dependencies/matchers/index.ts: -------------------------------------------------------------------------------- 1 | import { ObservableMatchers } from './types'; 2 | import { toEmitValuesWith } from './toEmitValuesWith'; 3 | import { Observable } from 'rxjs'; 4 | 5 | export const matchers: ObservableMatchers> = { 6 | toEmitValuesWith, 7 | }; 8 | -------------------------------------------------------------------------------- /src/dependencies/matchers/toEmitValuesWith.ts: -------------------------------------------------------------------------------- 1 | import { Observable, Subscription } from 'rxjs'; 2 | import { expectObservable, forceObservableCompletion } from './utils'; 3 | import { matcherHint, printReceived } from 'jest-matcher-utils'; 4 | 5 | function tryExpectations(received: any[], expectations: (received: any[]) => void): jest.CustomMatcherResult { 6 | try { 7 | expectations(received); 8 | return { 9 | pass: true, 10 | message: () => `${matcherHint('.not.toEmitValues')} 11 | 12 | Expected observable to complete with 13 | ${printReceived(received)} 14 | `, 15 | }; 16 | } catch (err) { 17 | return { 18 | pass: false, 19 | message: () => 'failed ' + err, 20 | }; 21 | } 22 | } 23 | 24 | export function toEmitValuesWith( 25 | received: Observable, 26 | expectations: (actual: any[]) => void 27 | ): Promise { 28 | const failsChecks = expectObservable(received); 29 | if (failsChecks) { 30 | return Promise.resolve(failsChecks); 31 | } 32 | 33 | return new Promise(resolve => { 34 | const receivedValues: any[] = []; 35 | const subscription = new Subscription(); 36 | 37 | subscription.add( 38 | received.subscribe({ 39 | next: value => { 40 | receivedValues.push(value); 41 | }, 42 | error: err => { 43 | receivedValues.push(err); 44 | subscription.unsubscribe(); 45 | resolve(tryExpectations(receivedValues, expectations)); 46 | }, 47 | complete: () => { 48 | subscription.unsubscribe(); 49 | resolve(tryExpectations(receivedValues, expectations)); 50 | }, 51 | }) 52 | ); 53 | 54 | forceObservableCompletion(subscription, resolve); 55 | }); 56 | } 57 | -------------------------------------------------------------------------------- /src/dependencies/matchers/types.ts: -------------------------------------------------------------------------------- 1 | import { Observable } from 'rxjs'; 2 | 3 | export const OBSERVABLE_TEST_TIMEOUT_IN_MS = 1000; 4 | 5 | export type ObservableType = T extends Observable ? V : never; 6 | 7 | export interface ObservableMatchers extends jest.ExpectExtendMap { 8 | toEmitValuesWith>( 9 | received: T, 10 | expectations: (received: E[]) => void 11 | ): Promise; 12 | } 13 | -------------------------------------------------------------------------------- /src/dependencies/matchers/utils.ts: -------------------------------------------------------------------------------- 1 | import { matcherHint, printExpected, printReceived } from 'jest-matcher-utils'; 2 | import { OBSERVABLE_TEST_TIMEOUT_IN_MS } from './types'; 3 | import { asapScheduler, Observable, Subscription, timer } from 'rxjs'; 4 | 5 | export function forceObservableCompletion(subscription: Subscription, resolve: (args: any) => void) { 6 | const timeoutObservable = timer(OBSERVABLE_TEST_TIMEOUT_IN_MS, asapScheduler); 7 | 8 | subscription.add( 9 | timeoutObservable.subscribe(() => { 10 | subscription.unsubscribe(); 11 | resolve({ 12 | pass: false, 13 | message: () => 14 | `${matcherHint('.toEmitValues')} 15 | 16 | Expected ${printReceived('Observable')} to be ${printExpected( 17 | `completed within ${OBSERVABLE_TEST_TIMEOUT_IN_MS}ms` 18 | )} but it did not.`, 19 | }); 20 | }) 21 | ); 22 | } 23 | 24 | export function expectObservableToBeDefined(received: any): jest.CustomMatcherResult | null { 25 | if (received) { 26 | return null; 27 | } 28 | 29 | return { 30 | pass: false, 31 | message: () => `${matcherHint('.toEmitValues')} 32 | 33 | Expected ${printReceived(received)} to be ${printExpected('defined')}.`, 34 | }; 35 | } 36 | 37 | export function expectObservableToBeObservable(received: any): jest.CustomMatcherResult | null { 38 | if (received instanceof Observable) { 39 | return null; 40 | } 41 | 42 | return { 43 | pass: false, 44 | message: () => `${matcherHint('.toEmitValues')} 45 | 46 | Expected ${printReceived(received)} to be ${printExpected('an Observable')}.`, 47 | }; 48 | } 49 | 50 | export function expectObservable(received: any): jest.CustomMatcherResult | null { 51 | const toBeDefined = expectObservableToBeDefined(received); 52 | if (toBeDefined) { 53 | return toBeDefined; 54 | } 55 | 56 | const toBeObservable = expectObservableToBeObservable(received); 57 | if (toBeObservable) { 58 | return toBeObservable; 59 | } 60 | 61 | return null; 62 | } 63 | -------------------------------------------------------------------------------- /src/dependencies/mocks.ts: -------------------------------------------------------------------------------- 1 | import { DataSourceJsonData, DataSourceSettings } from '@grafana/data'; 2 | 3 | export function createDatasourceSettings(jsonData: T): DataSourceSettings { 4 | return { 5 | id: 0, 6 | uid: 'test', 7 | orgId: 0, 8 | name: 'datasource-test', 9 | typeLogoUrl: '', 10 | type: 'datasource', 11 | typeName: 'Datasource', 12 | access: 'server', 13 | url: 'http://localhost', 14 | user: '', 15 | database: '', 16 | basicAuth: false, 17 | basicAuthUser: '', 18 | isDefault: false, 19 | jsonData, 20 | readOnly: false, 21 | withCredentials: false, 22 | secureJsonFields: {}, 23 | }; 24 | } 25 | -------------------------------------------------------------------------------- /src/hooks/useNextId.test.tsx: -------------------------------------------------------------------------------- 1 | import React, { PropsWithChildren } from 'react'; 2 | import { renderHook } from '@testing-library/react'; 3 | import { OpenSearchProvider } from '../components/QueryEditor/OpenSearchQueryContext'; 4 | import { useNextId } from './useNextId'; 5 | import { OpenSearchQuery } from '../types'; 6 | 7 | describe('useNextId', () => { 8 | it('Should return the next available id', () => { 9 | const query: OpenSearchQuery = { 10 | refId: 'A', 11 | query: '', 12 | metrics: [{ id: '1', type: 'avg' }], 13 | bucketAggs: [{ id: '2', type: 'date_histogram' }], 14 | }; 15 | const wrapper = ({ children }: PropsWithChildren<{}>) => { 16 | return ( 17 | {}}> 18 | {children} 19 | 20 | ); 21 | }; 22 | 23 | const { result } = renderHook(() => useNextId(), { 24 | wrapper, 25 | }); 26 | 27 | expect(result.current).toBe('3'); 28 | }); 29 | }); 30 | -------------------------------------------------------------------------------- /src/hooks/useNextId.ts: -------------------------------------------------------------------------------- 1 | import { useMemo } from 'react'; 2 | import { useQuery } from '../components/QueryEditor/OpenSearchQueryContext'; 3 | import { BucketAggregation } from '../components/QueryEditor/BucketAggregationsEditor/aggregations'; 4 | import { MetricAggregation } from '../components/QueryEditor/MetricAggregationsEditor/aggregations'; 5 | 6 | const toId = (e: T): T['id'] => e.id; 7 | 8 | const toInt = (idString: string) => parseInt(idString, 10); 9 | 10 | export const useNextId = (): MetricAggregation['id'] | BucketAggregation['id'] => { 11 | const { metrics, bucketAggs } = useQuery(); 12 | 13 | return useMemo( 14 | () => 15 | (Math.max(...[...(metrics?.map(toId) || ['0']), ...(bucketAggs?.map(toId) || ['0'])].map(toInt)) + 1).toString(), 16 | [metrics, bucketAggs] 17 | ); 18 | }; 19 | -------------------------------------------------------------------------------- /src/hooks/useStatelessReducer.test.tsx: -------------------------------------------------------------------------------- 1 | import React, { PropsWithChildren } from 'react'; 2 | import { renderHook } from '@testing-library/react'; 3 | import { useStatelessReducer, useDispatch, DispatchContext, combineReducers } from './useStatelessReducer'; 4 | 5 | describe('useStatelessReducer Hook', () => { 6 | it('When dispatch is called, it should call the provided reducer with the correct action and state', () => { 7 | const action = { type: 'SOME ACTION' }; 8 | const reducer = jest.fn(); 9 | const state = { someProp: 'some state' }; 10 | 11 | const { result } = renderHook(() => useStatelessReducer(() => {}, state, reducer)); 12 | 13 | result.current(action); 14 | 15 | expect(reducer).toHaveBeenCalledWith(state, action); 16 | }); 17 | 18 | it('When an action is dispatched, it should call the provided onChange callback with the result from the reducer', () => { 19 | const action = { type: 'SOME ACTION' }; 20 | const state = { propA: 'A', propB: 'B' }; 21 | const expectedState = { ...state, propB: 'Changed' }; 22 | const reducer = () => expectedState; 23 | const onChange = jest.fn(); 24 | 25 | const { result } = renderHook(() => useStatelessReducer(onChange, state, reducer)); 26 | 27 | result.current(action); 28 | 29 | expect(onChange).toHaveBeenLastCalledWith(expectedState); 30 | }); 31 | }); 32 | 33 | describe('useDispatch Hook', () => { 34 | it('Should throw when used outside of DispatchContext', () => { 35 | expect(() => { 36 | renderHook(() => useDispatch()); 37 | }).toThrow(); 38 | }); 39 | 40 | it('Should return a dispatch function', () => { 41 | const dispatch = jest.fn(); 42 | const wrapper = ({ children }: PropsWithChildren<{}>) => ( 43 | {children} 44 | ); 45 | 46 | const { result } = renderHook(() => useDispatch(), { 47 | wrapper, 48 | }); 49 | 50 | expect(result.current).toBe(dispatch); 51 | }); 52 | }); 53 | 54 | describe('combineReducers', () => { 55 | it('Should correctly combine reducers', () => { 56 | const reducerA = jest.fn(); 57 | const reducerB = jest.fn(); 58 | 59 | const combinedReducer = combineReducers({ reducerA, reducerB }); 60 | 61 | const action = { type: 'SOME ACTION' }; 62 | const initialState = { reducerA: 'A', reducerB: 'B' }; 63 | 64 | combinedReducer(initialState, action); 65 | 66 | expect(reducerA).toHaveBeenCalledWith(initialState.reducerA, action); 67 | expect(reducerB).toHaveBeenCalledWith(initialState.reducerB, action); 68 | }); 69 | }); 70 | -------------------------------------------------------------------------------- /src/hooks/useStatelessReducer.ts: -------------------------------------------------------------------------------- 1 | import { Action } from '@reduxjs/toolkit'; 2 | import { createContext, useCallback, useContext } from 'react'; 3 | 4 | export type Reducer = (state: S, action: A) => S; 5 | 6 | export const combineReducers = 7 | (reducers: { [P in keyof S]: Reducer }) => 8 | (state: S, action: A): Partial => { 9 | const newState = {} as S; 10 | for (const key in reducers) { 11 | newState[key] = reducers[key](state[key], action); 12 | } 13 | return newState; 14 | }; 15 | 16 | export const useStatelessReducer = ( 17 | onChange: (value: State) => void, 18 | state: State, 19 | reducer: (state: State, action: A) => State 20 | ) => { 21 | const dispatch = useCallback( 22 | (action: A) => { 23 | onChange(reducer(state, action)); 24 | }, 25 | [onChange, state, reducer] 26 | ); 27 | 28 | return dispatch; 29 | }; 30 | 31 | export const DispatchContext = createContext<((action: Action) => void) | undefined>(undefined); 32 | 33 | export const useDispatch = (): ((action: T) => void) => { 34 | const dispatch = useContext(DispatchContext); 35 | 36 | if (!dispatch) { 37 | throw new Error('Use DispatchContext first.'); 38 | } 39 | 40 | return dispatch; 41 | }; 42 | -------------------------------------------------------------------------------- /src/img/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /src/index_pattern.ts: -------------------------------------------------------------------------------- 1 | import { toUtc, dateTime, DateTime } from '@grafana/data'; 2 | 3 | const intervalMap: any = { 4 | Hourly: { startOf: 'hour', amount: 'hours' }, 5 | Daily: { startOf: 'day', amount: 'days' }, 6 | Weekly: { startOf: 'isoWeek', amount: 'weeks' }, 7 | Monthly: { startOf: 'month', amount: 'months' }, 8 | Yearly: { startOf: 'year', amount: 'years' }, 9 | }; 10 | 11 | export class IndexPattern { 12 | private dateLocale = 'en'; 13 | 14 | constructor(private pattern: any, private interval?: string) {} 15 | 16 | getIndexForToday() { 17 | if (this.interval) { 18 | return toUtc().locale(this.dateLocale).format(this.pattern); 19 | } else { 20 | return this.pattern; 21 | } 22 | } 23 | 24 | getIndexList(from?: DateTime, to?: DateTime) { 25 | // When no `from` or `to` is provided, we request data from 7 subsequent/previous indices 26 | // for the provided index pattern. 27 | // This is useful when requesting log context where the only time data we have is the log 28 | // timestamp. 29 | const indexOffset = 7; 30 | if (!this.interval) { 31 | return this.pattern; 32 | } 33 | 34 | const intervalInfo = intervalMap[this.interval]; 35 | const start = dateTime(from || dateTime(to).add(-indexOffset, intervalInfo.amount)) 36 | .utc() 37 | .startOf(intervalInfo.startOf); 38 | const endEpoch = dateTime(to || dateTime(from).add(indexOffset, intervalInfo.amount)) 39 | .utc() 40 | .startOf(intervalInfo.startOf) 41 | .valueOf(); 42 | const indexList = []; 43 | 44 | while (start.valueOf() <= endEpoch) { 45 | indexList.push(start.locale(this.dateLocale).format(this.pattern)); 46 | start.add(1, intervalInfo.amount); 47 | } 48 | 49 | return indexList; 50 | } 51 | 52 | getPPLIndexPattern() { 53 | // PPL currently does not support multi-indexing through lists, so a wildcard 54 | // pattern is used to match all patterns and relies on the time range filter 55 | // to filter out the incorrect indexes. 56 | if (!this.interval) { 57 | return this.pattern; 58 | } 59 | 60 | let indexPattern = this.pattern.match(/\[(.*?)\]/)[1]; 61 | 62 | if (this.pattern.startsWith('[')) { 63 | indexPattern = indexPattern + '*'; 64 | } else if (this.pattern.endsWith(']')) { 65 | indexPattern = '*' + indexPattern; 66 | } 67 | return indexPattern; 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/module.ts: -------------------------------------------------------------------------------- 1 | import { DataSourcePlugin } from '@grafana/data'; 2 | import { OpenSearchDatasource } from './opensearchDatasource'; 3 | import { ConfigEditor } from './configuration/ConfigEditor'; 4 | import { QueryEditor } from './components/QueryEditor'; 5 | 6 | export const plugin = new DataSourcePlugin(OpenSearchDatasource) 7 | .setQueryEditor(QueryEditor) 8 | .setConfigEditor(ConfigEditor); 9 | -------------------------------------------------------------------------------- /src/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "datasource", 3 | "name": "OpenSearch", 4 | "id": "grafana-opensearch-datasource", 5 | "category": "logging", 6 | "alerting": true, 7 | "annotations": true, 8 | "metrics": true, 9 | "logs": true, 10 | "backend": true, 11 | 12 | "executable": "gpx_opensearch-datasource", 13 | "info": { 14 | "description": "Query OpenSearch to visualize logs or metrics.", 15 | "author": { 16 | "name": "Grafana Labs", 17 | "url": "https://grafana.com" 18 | }, 19 | "keywords": ["datasource", "elasticsearch", "opensearch", "aws", "amazon", "cloud provider", "database", "logs", "nosql", "traces"], 20 | "logos": { 21 | "small": "img/logo.svg", 22 | "large": "img/logo.svg" 23 | }, 24 | "links": [ 25 | { 26 | "name": "Github", 27 | "url": "https://github.com/grafana/opensearch-datasource" 28 | } 29 | ], 30 | "screenshots": [], 31 | "version": "%VERSION%", 32 | "updated": "%TODAY%" 33 | }, 34 | "dependencies": { 35 | "grafanaDependency": ">=10.4.0", 36 | "plugins": [] 37 | }, 38 | "queryOptions": { 39 | "minInterval": true 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/query_def.ts: -------------------------------------------------------------------------------- 1 | import { DateHistogram } from './components/QueryEditor/BucketAggregationsEditor/aggregations'; 2 | import { 3 | ExtendedStat, 4 | MetricAggregation, 5 | MovingAverageModelOption, 6 | MetricAggregationType, 7 | } from './components/QueryEditor/MetricAggregationsEditor/aggregations'; 8 | import { metricAggregationConfig, pipelineOptions } from './components/QueryEditor/MetricAggregationsEditor/utils'; 9 | import { PPLFormatType } from './components/QueryEditor/PPLFormatEditor/formats'; 10 | 11 | export const extendedStats: ExtendedStat[] = [ 12 | { label: 'Avg', value: 'avg' }, 13 | { label: 'Min', value: 'min' }, 14 | { label: 'Max', value: 'max' }, 15 | { label: 'Sum', value: 'sum' }, 16 | { label: 'Count', value: 'count' }, 17 | { label: 'Std Dev', value: 'std_deviation' }, 18 | { label: 'Std Dev Upper', value: 'std_deviation_bounds_upper' }, 19 | { label: 'Std Dev Lower', value: 'std_deviation_bounds_lower' }, 20 | ]; 21 | 22 | export const movingAvgModelOptions: MovingAverageModelOption[] = [ 23 | { label: 'Simple', value: 'simple' }, 24 | { label: 'Linear', value: 'linear' }, 25 | { label: 'Exponentially Weighted', value: 'ewma' }, 26 | { label: 'Holt Linear', value: 'holt' }, 27 | { label: 'Holt Winters', value: 'holt_winters' }, 28 | ]; 29 | 30 | export function defaultMetricAgg(id = '1'): MetricAggregation { 31 | return { type: 'count', id }; 32 | } 33 | 34 | export function defaultBucketAgg(id = '1'): DateHistogram { 35 | return { type: 'date_histogram', id, settings: { interval: 'auto' } }; 36 | } 37 | 38 | export function defaultPPLFormat(): PPLFormatType { 39 | return 'table'; 40 | } 41 | 42 | export const findMetricById = (metrics: MetricAggregation[], id: MetricAggregation['id']) => 43 | metrics.find(metric => metric.id === id); 44 | 45 | export function hasMetricOfType(target: any, type: string): boolean { 46 | return target && target.metrics && target.metrics.some((m: any) => m.type === type); 47 | } 48 | 49 | // Even if we have type guards when building a query, we currently have no way of getting this information from the response. 50 | // We should try to find a better (type safe) way of doing the following 2. 51 | export function isPipelineAgg(metricType: MetricAggregationType) { 52 | return metricType in pipelineOptions; 53 | } 54 | 55 | export function isPipelineAggWithMultipleBucketPaths(metricType: MetricAggregationType) { 56 | return !!metricAggregationConfig[metricType].supportsMultipleBucketPaths; 57 | } 58 | -------------------------------------------------------------------------------- /src/query_help.md: -------------------------------------------------------------------------------- 1 | #### Alias patterns 2 | - {{term fieldname}} = replaced with value of term group by 3 | - {{metric}} = replaced with metric name (ex. Average, Min, Max) 4 | - {{field}} = replaced with the metric field name 5 | 6 | #### Documentation links 7 | 8 | [Grafana's Elasticsearch Documentation](http://docs.grafana.org/features/datasources/elasticsearch) 9 | 10 | [Official Elasticsearch Documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html) 11 | -------------------------------------------------------------------------------- /src/tracking.ts: -------------------------------------------------------------------------------- 1 | import { CoreApp, DataQueryResponse } from '@grafana/data'; 2 | import { reportInteraction } from '@grafana/runtime'; 3 | import { LuceneQueryType, OpenSearchQuery, QueryType } from 'types'; 4 | 5 | export function trackQuery(response: DataQueryResponse, queries: OpenSearchQuery[], app: string): void { 6 | if (app === CoreApp.Dashboard || app === CoreApp.PanelViewer) { 7 | return; 8 | } 9 | 10 | for (const query of queries) { 11 | try { 12 | reportInteraction('grafana_opensearch_query_executed', { 13 | app, 14 | with_lucene_query: query.queryType === QueryType.Lucene, 15 | with_ppl_query: query.queryType === QueryType.PPL, 16 | query_type: getQueryType(query), 17 | has_data: response.data.some(frame => frame.datapoints?.length > 0), 18 | has_error: response.error !== undefined, 19 | simultaneously_sent_query_count: queries.length, 20 | alias: query.alias, 21 | }); 22 | } catch (error) { 23 | console.error('error while reporting opensearch query', error); 24 | } 25 | } 26 | } 27 | 28 | function getQueryType(query: OpenSearchQuery) { 29 | if (!query.metrics || !query.metrics.length) { 30 | return undefined; 31 | } 32 | 33 | if (query.isLogsQuery) { 34 | return 'logs'; 35 | } 36 | 37 | if (query.luceneQueryType === LuceneQueryType.Traces) { 38 | if (query.serviceMap) { 39 | return 'traces with service map' 40 | } 41 | return 'traces'; 42 | } 43 | 44 | // PPL queries are a bit special, as they can be either raw_data or metric, depending on the format 45 | if (query.queryType === QueryType.PPL) { 46 | return query.format === 'table' ? 'raw_data' : 'metric'; 47 | } 48 | 49 | const types = ['raw_data', 'raw_document']; 50 | if (types.includes(query.metrics[0].type)) { 51 | return query.metrics[0].type; 52 | } 53 | return 'metric'; 54 | } 55 | -------------------------------------------------------------------------------- /src/typings/index.d.ts: -------------------------------------------------------------------------------- 1 | import { Observable, Subscription } from 'rxjs'; 2 | export {}; 3 | 4 | type ObservableType = T extends Observable ? V : never; 5 | 6 | declare global { 7 | namespace jest { 8 | interface Matchers { 9 | toEmitValuesWith>(expectations: (received: E[]) => void): Promise; 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /tests/annotationsEditor.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect, test } from '@grafana/plugin-e2e'; 2 | 3 | test('should render annotations editor', async ({ annotationEditPage, page, selectors }) => { 4 | await annotationEditPage.datasource.set('AWS OpenSearch'); 5 | await expect(page.getByTestId(selectors.components.QueryField.container)).toBeVisible(); 6 | }); 7 | -------------------------------------------------------------------------------- /tests/configEditor.spec.ts: -------------------------------------------------------------------------------- 1 | import { test, expect } from '@grafana/plugin-e2e'; 2 | 3 | test('should render config editor', async ({ createDataSourceConfigPage, readProvisionedDataSource, page }) => { 4 | const ds = await readProvisionedDataSource({ fileName: 'aws-opensearch.yaml', name: 'AWS OpenSearch' }); 5 | await createDataSourceConfigPage({ type: ds.type }); 6 | await expect(page.getByText('OpenSearch details')).toBeVisible(); 7 | }); 8 | -------------------------------------------------------------------------------- /tests/queryEditor.spec.ts: -------------------------------------------------------------------------------- 1 | import { test, expect } from '@grafana/plugin-e2e'; 2 | 3 | test('should render query editor', async ({ panelEditPage, selectors }) => { 4 | await panelEditPage.datasource.set('AWS OpenSearch'); 5 | await expect( 6 | panelEditPage.getQueryEditorRow('A').getByTestId(selectors.components.QueryField.container) 7 | ).toBeVisible(); 8 | }); 9 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./.config/tsconfig.json" 3 | } 4 | --------------------------------------------------------------------------------