├── .eslintrc ├── .gitignore ├── .prettierrc ├── .vscode └── settings.json ├── CONFIGURE_AS_CODE.md ├── Gruntfile.js ├── LICENSE ├── README.md ├── VERSION ├── VERSION_GRAFANA ├── build ├── Dockerfile ├── Jenkinsfile ├── build.sh ├── notify-slack.js ├── publish.sh └── start.sh ├── datasource-settings.png ├── deployment ├── Dockerfile └── build-image.sh ├── package-lock.json ├── package.json ├── spec ├── .eslintrc ├── cache_spec.js ├── datasource_spec.js ├── formatter_service_spec.js ├── metrics_service_spec.js └── test-main.js ├── src ├── api_service.js ├── cache.js ├── config_ctrl.js ├── css │ ├── config-editor.css │ └── query-editor.css ├── dashboards_service.js ├── data_service.js ├── datasource.js ├── formatter_service.js ├── img │ └── sysdig_logo.svg ├── metrics_service.js ├── module.js ├── partials │ ├── annotations.editor.html │ ├── config.html │ ├── query.editor.html │ └── query.options.html ├── plugin.json ├── query_ctrl.js ├── sysdig_dashboard_helper.js ├── templating_service.js └── time_service.js └── yarn.lock /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "eslint:recommended", 3 | "parserOptions": { 4 | "ecmaVersion": 8, 5 | "sourceType": "module" 6 | }, 7 | "globals": { 8 | "_": true, 9 | "ActiveXObject": true, 10 | "console": true, 11 | "window": true, 12 | "XMLHttpRequest": true, 13 | "Promise": true 14 | }, 15 | "rules": { 16 | "no-console": "off" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | 3 | # Dependencies 4 | node_modules/ 5 | 6 | # Build artifacts 7 | dist/ 8 | dist-image/ 9 | out/ 10 | sysdig/ 11 | 12 | # Local data 13 | grafana-data-*/ 14 | 15 | # IDE settings 16 | .idea/ 17 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 100, 3 | "tabWidth": 4, 4 | "arrowParens": "always", 5 | "singleQuote": true 6 | } 7 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.formatOnSave": true, 3 | "files.exclude": { 4 | "dist/**": true, 5 | "node_modules/**": true 6 | } 7 | } -------------------------------------------------------------------------------- /CONFIGURE_AS_CODE.md: -------------------------------------------------------------------------------- 1 | #### Codifing the Sysdig Datasource with Grafana 2 | 3 | These instructions will often apply to container-based platforms such as kubernetes and is focused more on how to codify the installation and configuration of the datasource & dashboards. Grafana supports many dynamic configuration capabilites such as using Environment Variables or loading in dashboard and datasource configurations. 4 | 5 | 1. Install the plugin in a Grafana container image with an Environment Variable 6 | - Set the following environment variable to auto-install the plugin at launch (Tip: you must use the .zip package) 7 | ``` 8 | GF_INSTALL_PLUGINS=https://download.sysdig.com/stable/grafana-sysdig-datasource/grafana-sysdig-datasource-v0.7.zip;sysdig 9 | ``` 10 | 11 | 2. Configure the datasource in code (such as a kubernetes configMap) 12 | - Create the sysdig datasource configuration such as `/etc/grafana/provisioning/datasources/datasource.yml` 13 | - What's important here is that the apiToken is embedded into the jsonData section 14 | ``` 15 | apiVersion: 1 16 | datasources: 17 | - name: Sysdig 18 | type: sysdig 19 | access: proxy 20 | jsonData: 21 | apiToken: [insert api token here] 22 | orgId: 1 23 | editable: true 24 | ``` 25 | 26 | 3. Configure Grafana to load dashboards from config files 27 | - Configure the Grafana dashboard provider file `/etc/grafana/provisioning/dashboards/dashboard.yml` 28 | - Take note of the path specified where Grafana will look for dashboards 29 | ``` 30 | apiVersion: 1 31 | providers: 32 | # provider name 33 | - name: 'default' 34 | # org id. will default to orgId 1 if not specified 35 | orgId: 1 36 | # name of the dashboard folder. Required 37 | folder: '' 38 | # folder UID. will be automatically generated if not specified 39 | folderUid: '' 40 | # provider type. Required 41 | type: file 42 | # disable dashboard deletion 43 | disableDeletion: false 44 | # enable dashboard editing 45 | editable: true 46 | # how often Grafana will scan for changed dashboards 47 | updateIntervalSeconds: 10 48 | options: 49 | # path to dashboard files on disk. Required 50 | path: /var/lib/grafana/dashboards 51 | ``` 52 | - Drop your sysdig dashboard json into `/var/lib/grafana/dashboards`, for example; 53 | ``` 54 | { 55 | "annotations": { 56 | "list": [ 57 | { 58 | "builtIn": 1, 59 | "datasource": "-- Grafana --", 60 | "enable": true, 61 | "hide": true, 62 | "iconColor": "rgba(0, 211, 255, 1)", 63 | "name": "Annotations & Alerts", 64 | "type": "dashboard" 65 | } 66 | ] 67 | }, 68 | "editable": true, 69 | "gnetId": null, 70 | "graphTooltip": 0, 71 | "id": 56, 72 | "links": [], 73 | "panels": [ 74 | { 75 | "content": "Ready Master Nodes", 76 | "datasource": "Sysdig", 77 | "gridPos": { 78 | "h": 2, 79 | "w": 24, 80 | "x": 0, 81 | "y": 0 82 | }, 83 | "id": 11, 84 | "mode": "markdown", 85 | "options": {}, 86 | "type": "text" 87 | }, 88 | { 89 | "cacheTimeout": null, 90 | "colorBackground": false, 91 | "colorValue": false, 92 | "colors": [ 93 | "#299c46", 94 | "rgba(237, 129, 40, 0.89)", 95 | "#d44a3a" 96 | ], 97 | "datasource": "Sysdig", 98 | "format": "short", 99 | "gauge": { 100 | "maxValue": 100, 101 | "minValue": 0, 102 | "show": false, 103 | "thresholdLabels": false, 104 | "thresholdMarkers": true 105 | }, 106 | "gridPos": { 107 | "h": 4, 108 | "w": 4, 109 | "x": 0, 110 | "y": 2 111 | }, 112 | "id": 9, 113 | "interval": null, 114 | "links": [], 115 | "mappingType": 1, 116 | "mappingTypes": [ 117 | { 118 | "name": "value to text", 119 | "value": 1 120 | }, 121 | { 122 | "name": "range to text", 123 | "value": 2 124 | } 125 | ], 126 | "maxDataPoints": 100, 127 | "nullPointMode": "connected", 128 | "nullText": null, 129 | "options": {}, 130 | "postfix": "", 131 | "postfixFontSize": "50%", 132 | "prefix": "", 133 | "prefixFontSize": "50%", 134 | "rangeMaps": [ 135 | { 136 | "from": "null", 137 | "text": "N/A", 138 | "to": "null" 139 | } 140 | ], 141 | "sparkline": { 142 | "fillColor": "rgba(31, 118, 189, 0.18)", 143 | "full": false, 144 | "lineColor": "rgb(31, 120, 193)", 145 | "show": false, 146 | "ymax": null, 147 | "ymin": null 148 | }, 149 | "tableColumn": "", 150 | "targets": [ 151 | { 152 | "filter": "kubernetes.node.label.region in (\"master\")", 153 | "groupAggregation": "sum", 154 | "isSingleDataPoint": true, 155 | "isTabularFormat": false, 156 | "refId": "0", 157 | "segmentBy": null, 158 | "target": "kubernetes.node.ready", 159 | "timeAggregation": "avg" 160 | } 161 | ], 162 | "thresholds": "", 163 | "title": "Masters - Ready", 164 | "type": "singlestat", 165 | "valueFontSize": "80%", 166 | "valueMaps": [ 167 | { 168 | "op": "=", 169 | "text": "N/A", 170 | "value": "null" 171 | } 172 | ], 173 | "valueName": "avg" 174 | } 175 | ], 176 | "schemaVersion": 20, 177 | "style": "dark", 178 | "tags": [ 179 | "Sysdig", 180 | "Shared dashboard" 181 | ], 182 | "templating": { 183 | "list": [] 184 | }, 185 | "time": { 186 | "from": "now-1h", 187 | "to": "now" 188 | }, 189 | "timepicker": {}, 190 | "timezone": "browser", 191 | "title": " Capacity and Utilization" 192 | } 193 | ``` -------------------------------------------------------------------------------- /Gruntfile.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | const fs = require('fs'); 17 | 18 | module.exports = function(grunt) { 19 | 20 | require('load-grunt-tasks')(grunt); 21 | 22 | grunt.loadNpmTasks('grunt-execute'); 23 | grunt.loadNpmTasks('grunt-contrib-clean'); 24 | 25 | const version = fs.readFileSync('VERSION').toString().trim(); 26 | 27 | grunt.initConfig({ 28 | 29 | clean: ['dist'], 30 | 31 | copy: { 32 | src_to_dist: { 33 | cwd: 'src', 34 | expand: true, 35 | src: ['**/*', '!**/*.js', '!**/*.scss'], 36 | dest: 'dist' 37 | }, 38 | pluginDef: { 39 | expand: true, 40 | src: ['README.md'], 41 | dest: 'dist' 42 | } 43 | }, 44 | 45 | replace: { 46 | dist: { 47 | options: { 48 | patterns: [ 49 | { 50 | json: { 51 | version 52 | } 53 | } 54 | ] 55 | }, 56 | files: [ 57 | { 58 | expand: true, 59 | flatten: true, 60 | src: ['src/plugin.json'], 61 | dest: 'dist/' 62 | } 63 | ] 64 | } 65 | }, 66 | 67 | watch: { 68 | rebuild_all: { 69 | files: ['src/**/*'], 70 | tasks: ['default'], 71 | options: {spawn: false} 72 | } 73 | }, 74 | 75 | babel: { 76 | options: { 77 | sourceMap: true, 78 | presets: ['es2015'] 79 | }, 80 | dist: { 81 | options: { 82 | plugins: ['transform-es2015-modules-systemjs', 'transform-es2015-for-of'] 83 | }, 84 | files: [{ 85 | cwd: 'src', 86 | expand: true, 87 | src: ['**/*.js'], 88 | dest: 'dist', 89 | ext:'.js' 90 | }] 91 | }, 92 | distTestNoSystemJs: { 93 | files: [{ 94 | cwd: 'src', 95 | expand: true, 96 | src: ['**/*.js'], 97 | dest: 'dist/test', 98 | ext:'.js' 99 | }] 100 | }, 101 | distTestsSpecsNoSystemJs: { 102 | files: [{ 103 | expand: true, 104 | cwd: 'spec', 105 | src: ['**/*.js'], 106 | dest: 'dist/test/spec', 107 | ext:'.js' 108 | }] 109 | } 110 | }, 111 | 112 | mochaTest: { 113 | test: { 114 | options: { 115 | reporter: 'spec' 116 | }, 117 | src: ['dist/test/spec/test-main.js', 'dist/test/spec/*_spec.js'] 118 | } 119 | } 120 | }); 121 | 122 | grunt.registerTask('default', ['clean', 'copy:src_to_dist', 'copy:pluginDef', 'replace', 'babel', 'mochaTest']); 123 | }; 124 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Sysdig Datasource Plugin for Grafana (Beta) 2 | 3 | This README discusses the installation and configuration instructions for the Sysdig datasource plugin for Grafana. 4 | 5 |

6 | Sysdig datasource 7 |

8 | 9 | --- 10 | 11 | ## Support and Known Limitations 12 | 13 | The Sysdig datasource plugin is currently in BETA and tested with Grafana version up to [8.5.5](https://github.com/grafana/grafana/releases/tag/v8.5.5). 14 | 15 | **NOTE: this plugin is not supported with later versions of Grafana. Instead, use the official Prometheus data source plugin to query the Sysdig API. For more info: [Sysdig Docs](https://docs.sysdig.com/en/docs/sysdig-monitor/monitoring-integrations/advanced-configuration/configure-sysdig-with-grafana/).** 16 | 17 | Known limitations of the Sysdig datasource plugin are listed below: 18 | * [Annotations](http://docs.grafana.org/reference/annotations/) are leveraged to show Sysdig events, but not broadly supported. 19 | * With Grafana you can enter any arbitrary [time range](https://grafana.com/docs/grafana/v8.5/dashboards/time-range-controls/), but data will be fetched according to retention and granularity restrictions as explained in [Sysdig Docs](https://docs.sysdig.com/en/docs/sysdig-monitor/explore/time-windows/#time-window-limitations). 20 | 21 | --- 22 | 23 | ## Getting Started 24 | 25 | ### Installation 26 | 27 | There are several installation approaches available for the Sysdig datasource plugin. 28 | 29 | > **Note:** The Sysdig datasource plugin is currently not included in the [official & community built plugin page](https://grafana.com/plugins), and needs to be installed manually. 30 | 31 | #### Compatibility 32 | 33 | | Grafana Version | Plugin Version | 34 | |-----------------|----------------| 35 | | <= 7.3.10 | <= 0.10 | 36 | | 7.4.0 - 8.5.5 | 0.11 | 37 | 38 | > **Note:** Starting from version 8, Grafana will not load unsigned plugins. 39 | > To load the sysdig plugin you must set the [allow_loading_unsigned_plugins](https://grafana.com/docs/grafana/v8.5/administration/configuration/#allow_loading_unsigned_plugins) property. (E.g. `allow_loading_unsigned_plugins=sysdig`) 40 | > For more information about the configuration files, refer to the [Grafana docs](https://grafana.com/docs/grafana/v8.5/administration/configuration/#configuration-file-location). 41 | 42 | #### Using a Grafana Docker Container 43 | 44 | We offer a Docker container image based on Grafana that comes with the plugin pre-installed: 45 | 46 | ``` 47 | docker run -d -p 3000:3000 -e GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS=sysdig --name grafana sysdiglabs/grafana:latest 48 | ``` 49 | 50 | > For more information, refer to the [Docker Hub repository page](https://hub.docker.com/r/sysdiglabs/grafana). 51 | 52 | --- 53 | 54 | Alternatively, the default Grafana container image can be used as is, and the plugin directory can be mounted on the host to make it available in the container: 55 | 56 | 1. Prepare the Grafana data directory and download the plugin: 57 | ``` 58 | mkdir grafana-data 59 | mkdir grafana-data/plugins 60 | curl https://download.sysdig.com/stable/grafana-sysdig-datasource/grafana-sysdig-datasource-v0.11.tgz -o sysdig.tgz 61 | tar zxf sysdig.tgz -C grafana-data/plugins 62 | ``` 63 | 2. Start the container with the current user, to give read/write permissions to the data directory: 64 | ``` 65 | ID=$(id -u) 66 | docker run -d --user $ID --volume "$PWD/grafana-data:/var/lib/grafana" -p 3000:3000 -e GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS=sysdig grafana/grafana:latest 67 | ``` 68 | 69 | > For more information, refer to the [Grafana installation documentation](http://docs.grafana.org/installation/docker/#grafana-container-using-bind-mounts) and the [Docker documentation](https://docs.docker.com/storage/bind-mounts/). 70 | 71 | #### Codifing the Sysdig Datasource with Grafana 72 | 73 | These instructions will often apply to container-based platforms such as kubernetes and is focused more on how to codify the installation and configuration of the datasource & dashboards. Grafana supports many dynamic configuration capabilites such as using Environment Variables or loading in dashboard and datasource configurations. 74 | 75 | You can refer to [CONFIGURE_AS_CODE](CONFIGURE_AS_CODE.md) file for instructions. 76 | 77 | #### Using Grafana Installed on the Host 78 | 79 | The plugin can be installed on any host where Grafana is installed. To install the plugin: 80 | 81 | ##### Linux 82 | 83 | 1. Open a shell terminal. 84 | 2. Run the series of commands below: 85 | ``` 86 | curl https://download.sysdig.com/stable/grafana-sysdig-datasource/grafana-sysdig-datasource-v0.11.tgz -o sysdig.tgz 87 | tar zxf sysdig.tgz 88 | sudo cp -R sysdig /var/lib/grafana/plugins 89 | sudo service grafana-server restart 90 | ``` 91 | 92 | > **Note**: Grafana plugins are installed in `/usr/share/grafana/plugins`. However, the Sysdig plugin must be installed in `/var/lib/grafana/plugins` instead. 93 | 94 | 95 | ##### Mac 96 | 97 | 98 | 1. Open a shell terminal. 99 | 2. Run the series of commands below: 100 | ``` 101 | curl https://download.sysdig.com/stable/grafana-sysdig-datasource/grafana-sysdig-datasource-v0.11.tgz -o sysdig.tgz 102 | tar zxf sysdig.tgz 103 | cp -R sysdig /usr/local/var/lib/grafana/plugins 104 | brew services restart grafana 105 | ``` 106 | 107 | > **Note:** For more information, refer to the [Grafana installation on Mac](http://docs.grafana.org/installation/mac/) documentation. 108 | 109 | 110 | ##### Windows 111 | 112 | 1. Download the plugin from: https://download.sysdig.com/stable/grafana-sysdig-datasource/grafana-sysdig-datasource-v0.11.zip 113 | 2. Install the plugin in the Grafana plugins folder. 114 | 3. Restart Grafana. 115 | 116 | > **Note:** For more information, refer to the [Grafana installation on Windows](http://docs.grafana.org/installation/windows/) documentation. 117 | 118 | 119 | ### 2. Add datasource 120 | 121 | To add a datasource to Grafana: 122 | 123 | 1. Open Grafana. 124 | 2. On the Datasources tab, click the **Add Data Sources** button. 125 | 3. Define a name for the datasource. 126 | 4. Open the Type dropdown menu, and select _Sysdig_. 127 | 5. Open the Plan dropdown menu, and select either _Basic/Pro Cloud_ for Sysdig SaaS or _Pro Software_ for on-premises installations. 128 | 6. Open the Sysdig UI, and navigate to **Settings -> User Profile -> Sysdig Monitor API token**. 129 | 7. Copy the API token, and paste it into the API Token field in Grafana. 130 | 131 |

132 | Add Sysdig datasource 133 |

134 | 135 | 136 | ## Panels 137 | 138 | Custom panels can be added once the Sysdig datasource is installed. Any panel supported by Grafana can be used. 139 | 140 | > **Note:** For more information, refer to the [Grafana documentation website](http://docs.grafana.org/features/panels/graph/). 141 | 142 | 143 | ### Aggregated panels 144 | 145 | In Sysdig, number panels, bar charts and histograms display aggregated data (i.e. a single data point across the entire time window). By default, Grafana loads time series and then applies an additional aggregation to data points to calculate a single value (displayed in the Singlestat panel for instance). 146 | 147 | > **Note:** To maintain the same aggregation mechanism and precision offered by the Sysdig API, create panels with the "Fetch single data point" flag turned on. This will instruct the datasource to make an aggregated data request to the API. 148 | 149 | ### Table panels 150 | 151 | Starting from Grafana 7.4, and Sysdig plugin 0.11, the table panel must be created with the "Fetch as table" flag turned on. 152 | This flag can be used also with other Grafana panel types that requires data in a table format, like `Bar chart` and `Bar gauge`. 153 | 154 | > **Note**: no migration is required for the existing panels. 155 | 156 | ### Filters 157 | 158 | A panel can be configured with an optional filter to fetch data for a subset of the infrastructure or only for a given label. 159 | 160 | The filter is a string, and should follow the Sysdig filtering language syntax: 161 | 162 | * The syntax of an expression is `label_name operator "label_value"` (double-quotes are mandatory) 163 | * Expressions can be combined with the boolean operators and/or (`expression and expression or expression`) 164 | * The following operators are supported: 165 | * `=` and `!=` (e.g. `name = "value"` or `name != "value"`) 166 | * `contains` and `not ... contains` (e.g. `name contains "value"` or `not name contains "value"`) 167 | * `in` and `not... in` (e.g. `name in ("value-1", "value-2")` or `not name in ("value-1", "value-2")`) 168 | * Valid label names are essentially the ones used for the segmentation (use the *Segment by* dropdown to review what is needed). 169 | 170 | Some examples: 171 | 172 | * `host.hostName = "ip-1-2-3-4"` 173 | * `cloudProvider.availabilityZone = "us-east-2b" and container.name = "boring_sinoussi"` (where `cloudProvider.*` are labels coming from AWS) 174 | * `kubernetes.namespace.name = "java-app" and kubernetes.deployment.name in ("cassandra", "redis")` 175 | 176 | 177 | ### Aliases 178 | 179 | The Sysdig datasource tries to name panel graphical element (eg. graph line, or table column) so that the legend can clearly identify what eacy element refers to (eg. which process, or host, or container). 180 | 181 | In some cases, the default configuration is not enough. 182 | 183 | You can use the _alias_ field to configure how elements of a query should be named in the panel. Here's what you can do: 184 | 185 | * Any literal text will be used as is (eg. `host:`) 186 | * The following patterns are available 187 | * `{{metric}}` will be replaced with the metric name (eg. `cpu.used.percent`) 188 | * `{{segment_name}}` will be replaced with the segment label name (eg. `proc.name`) 189 | * `{{segment_value}}` will be replaced with the segment value name (eg. `cassandra`) 190 | * Each pattern can also use the following modifiers: 191 | * `{{pattern:X:Y}}` where `X` is the number of characters to be used at the beginning, and `Y` is the number of characters to be used from the end. Example: `{{segment_value:4:6}}` for `sysdig-34e2a10cc` would be replaced with `sysd..2a10cc` 192 | * `{{pattern /regular expression/}}` will be replaced with the result of the regular expression, where only capturing groups will be used. Example: `{{segment_value /(\w+)$/}}` for `sysdig-34e2a10cc` would be replaced with `34e2a10cc`. Please refer to [Regular Expressions guide on MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions) for additional information about regular expressions 193 | 194 | Here's an example: 195 | 196 |

197 | Aliases 198 |

199 | 200 | 201 | 202 | ## Variables 203 | 204 | The Sysdig datasource plugin supports variables, allowing for dynamic and interactive dashboards to be created. 205 | 206 | > **Note:** Sysdig recommends reviewing the [Grafana Variables documentation](http://docs.grafana.org/reference/templating/) for use cases, examples, and more. 207 | 208 | Variables can be used to configure any property of a dashboard panel: 209 | 210 | * **Metric**: Select the **metric name** to use for the panel query 211 | * **Time and group aggregations**: Select the aggregation (basic aggregations are `timeAvg` for rate, `avg`, `sum`, `min`, `max`) 212 | * **Segmentation** (*Segment by* fields): Select the **label name** to segment data 213 | * **Filter**: Use either **label names** or **label values** to define a data filter 214 | * **Display direction**: Select to show top or bottom values (valid values are `desc` and `top` for "top values", or `asc` and `bottom` for "bottom values") 215 | * **Display paging**: Select how many elements to show 216 | 217 | The following list shows how variables can be configured: 218 | 219 | * *Query*, *custom*, and *constant* variable types are supported 220 | * The query for a **metric name** can use the function `metrics(pattern)` that returns a list of metrics matching the specific `pattern` regex 221 | * The query for a **label name** can use the function `label_names(pattern)` that returns a list of label names matching the specific `pattern` regex 222 | * The query for a **label value** can use the function `label_values(label_name)` that returns a list of label values for the specified label name 223 | * A **label value** can be configured with *multi-value* and/or *include all option* properties enabled **only** with `in` and `not ... in` operators 224 | 225 | Please note that **metric name** and **label name** variables cannot have *multi-value* or *include all option* properties enabled 226 | 227 | 228 | ### Metric names 229 | 230 | Variables can be created to identify a metric name, and then use it to configure a panel with a dynamic metric. 231 | 232 | A couple of notes about variables for metric names: 233 | 234 | * *Query*, *Custom*, or *Constant* variables can be used. 235 | > **Note:** Please note that the *Multi-value* and *Include All* options must be disabled. 236 | * *Query* variables can use the `metrics(pattern)` function, that returns a list of metrics matching the specific `pattern` regex. 237 | 238 |

239 | Metric variable configuration 240 |

241 |

242 | Metric variable 243 |

244 | 245 | 246 | ### Label names 247 | 248 | Label names are used for panel segmentations (*Segment by* field) and filters. 249 | 250 | A couple of notes about variables for label names: 251 | 252 | * *Query*, *Custom*, or *Constant* variables can be used. 253 | > **Note:** Please note that the *Multi-value* and *Include All* options must be disabled. 254 | * *Query* variables can use the `label_names(pattern)` function, that returns a list of label names matching the specific `pattern` regex. 255 | 256 |

257 | Segmentation variable configuration 258 |

259 |

260 | Segmentation variable 261 |

262 | 263 | 264 | ### Label values 265 | 266 | Label values are used in filters to identify a subset of the infrastructure or data in general, allowing users to create a row per service, or use a single dashboard to analyze all available applications. 267 | 268 | Some notes about variables for label values: 269 | 270 | * You can use a *Query*, *Custom*, or *Constant* variables. 271 | * *Query* variables can use the `label_values(label_name)` function, that returns a list of label values for the specified label name. 272 | * The query accepts the following optional parameters: 273 | 1. `filter` to limit the list of values according to the specified filter. Example: `label_values(kubernetes.namespace.name, filter='kubernetes.deployment.name = "foo"')` to return a list of Kubernetes namespaces within the Kubernetes deployment named `foo`. You can also refer to other variables in the filter for an additional level of customization in dashboards 274 | 2. `from`, `to`, `limit` to control the subset of values to show in the menu in the dashboard (by default, `from=0, to=99` to return the first 100 entries) 275 | * *Multi-value* variables, or variables with the *Include All* option enabled can **only** be used with `in` and `not ... in` operators. 276 | * Variables must not be enclosed by quotes. 277 | > **Note:** The final string will contain quotes when needed (e.g. `$name = $value` will be resolved to `metric = "foo"`). 278 | 279 |

280 | Filter variable configuration 281 |

282 |

283 | Filter variable 284 |

285 | 286 | 287 | The complete example below contains dynamic rows and panels: 288 | 289 |

290 | Final dashboard with variables 291 |

292 | 293 | --- 294 | 295 | ## Support / Community 296 | 297 | We'd love to hear from you! Join our [Public Slack](https://slack.sysdig.com) channel ([#grafana](https://sysdig.slack.com/messages/CA7RSQXK9)) for announcements and discussions. 298 | -------------------------------------------------------------------------------- /VERSION: -------------------------------------------------------------------------------- 1 | 0.11 2 | 3 | -------------------------------------------------------------------------------- /VERSION_GRAFANA: -------------------------------------------------------------------------------- 1 | 8.5.5 2 | -------------------------------------------------------------------------------- /build/Dockerfile: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2018 Draios Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | FROM debian:stable-slim 17 | 18 | 19 | 20 | ############################################################################### 21 | # # 22 | # Install basic tools/utilities # 23 | # # 24 | ############################################################################### 25 | RUN apt-get update -y && \ 26 | apt-get install -y --no-install-recommends \ 27 | curl \ 28 | zip \ 29 | git \ 30 | awscli \ 31 | && \ 32 | apt-get install -f 33 | 34 | # Install the latest Docker CE binaries 35 | # From https://github.com/getintodevops/jenkins-withdocker/blob/master/Dockerfile 36 | RUN apt-get update && \ 37 | apt-get -y install apt-transport-https \ 38 | ca-certificates \ 39 | curl \ 40 | gnupg2 \ 41 | software-properties-common && \ 42 | curl -fsSL https://download.docker.com/linux/$(. /etc/os-release; echo "$ID")/gpg > /tmp/dkey; apt-key add /tmp/dkey && \ 43 | add-apt-repository \ 44 | "deb [arch=amd64] https://download.docker.com/linux/$(. /etc/os-release; echo "$ID") \ 45 | $(lsb_release -cs) \ 46 | stable" && \ 47 | apt-get update && \ 48 | apt-get -y install docker-ce 49 | 50 | # Install Node.js v10 51 | # (ref. https://github.com/nodesource/distributions/blob/master/README.md#installation-instructions) 52 | RUN curl -sL https://deb.nodesource.com/setup_10.x | bash - && \ 53 | apt-get install -y nodejs 54 | 55 | # Cleanup 56 | RUN apt-get clean autoclean && \ 57 | rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* 58 | 59 | 60 | 61 | ############################################################################### 62 | # # 63 | # Prepare environment # 64 | # # 65 | ############################################################################### 66 | 67 | WORKDIR /usr/bin/grafana-sysdig-datasource 68 | 69 | 70 | 71 | ############################################################################### 72 | # # 73 | # Run the build # 74 | # # 75 | ############################################################################### 76 | 77 | CMD ["./build/build.sh"] 78 | -------------------------------------------------------------------------------- /build/Jenkinsfile: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | pipeline { 17 | agent { 18 | label 'amazon-linux2' 19 | } 20 | 21 | options { 22 | disableConcurrentBuilds() 23 | } 24 | 25 | environment { 26 | BUILDER = "sysdiglabs/grafana-sysdig-datasource-builder:0.1" 27 | VERSION = readFile "VERSION" 28 | } 29 | 30 | stages { 31 | stage('Prepare') { 32 | steps { 33 | // At this point, Jenkins checked out the repository already. 34 | // Also, the workspace is persistent. 35 | // Since the previous build might have left some artifacts then now we're going to 36 | // remove everything and checkout the repository again 37 | step([$class: 'WsCleanup']) 38 | 39 | sh "docker rm grafana-sysdig-datasource-builder || echo \\\"Builder image not found\\\"" 40 | 41 | // NOTE: 'checkout scm' is equivalent to 42 | // git url: "https://github.com/:account/:repository", branch: "${env.BRANCH_NAME}", credentialsId: ${env.MY_CREDENTAILS_NAME}" 43 | checkout scm 44 | 45 | // Define build number 46 | script { 47 | def now = new Date() 48 | 49 | env.VERSION_BUILD_NUMBER=now.format("yyyyMMdd.HHmmss", TimeZone.getTimeZone('UTC')) 50 | } 51 | } 52 | } 53 | 54 | stage('Build') { 55 | environment { 56 | CWD = sh(script: 'pwd', , returnStdout: true).trim() 57 | } 58 | steps { 59 | sh "docker run --name grafana-sysdig-datasource-builder -v ${CWD}:/usr/bin/grafana-sysdig-datasource -v /var/run/docker.sock:/var/run/docker.sock -e 'GIT_BRANCH=${env.BRANCH_NAME}' -e 'BUILD_NUMBER=${env.VERSION_BUILD_NUMBER}' ${env.BUILDER}" 60 | } 61 | } 62 | 63 | stage('Publish Docker image') { 64 | steps { 65 | withCredentials([usernamePassword(credentialsId: "dockerhub-robot-account", passwordVariable: "DOCKER_PASSWORD", usernameVariable: "DOCKER_USERNAME")]) { 66 | sh "GIT_BRANCH=${env.BRANCH_NAME} BUILD_NUMBER=${env.VERSION_BUILD_NUMBER} DOCKER_USERNAME=${DOCKER_USERNAME} DOCKER_PASSWORD=${DOCKER_PASSWORD} ./build/publish.sh" 67 | } 68 | } 69 | } 70 | 71 | stage('Tag') { 72 | when { 73 | branch 'master' 74 | } 75 | steps { 76 | echo "Tagging repository v${VERSION}" 77 | 78 | withCredentials([usernamePassword(credentialsId: "github-jenkins-user-token", passwordVariable: "GIT_PASSWORD", usernameVariable: "GIT_USERNAME")]) { 79 | sh("git tag -a v${VERSION} -m 'Release v${VERSION}'") 80 | sh("git push https://${GIT_USERNAME}:${GIT_PASSWORD}@github.com/draios/grafana-sysdig-datasource --tags") 81 | } 82 | } 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /build/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Env parameters 4 | # - BUILD_CONTAINER (default: true) 5 | # - CLEANUP (default: true) 6 | # - ENVIRONMENT (default: development) 7 | # - BUILD_NUMBER 8 | # - GIT_BRANCH (default: dev) 9 | 10 | setup_env() { 11 | echo "Prepare environment..." 12 | 13 | set +u 14 | 15 | # 16 | # Set default variables 17 | # 18 | if [ -z ${BUILD_CONTAINER} ]; then 19 | BUILD_CONTAINER=true 20 | fi 21 | if [ -z ${CLEANUP} ]; then 22 | CLEANUP=true 23 | fi 24 | if [ -z ${ENVIRONMENT} ]; then 25 | ENVIRONMENT=development 26 | fi 27 | if [ -z ${GIT_BRANCH} ]; then 28 | GIT_BRANCH=dev 29 | fi 30 | if [ -z ${BUILD_NUMBER} ]; then 31 | BUILD_NUMBER=42 32 | fi 33 | 34 | set -u 35 | 36 | GIT_BRANCH_NAME=$(echo ${GIT_BRANCH} | cut -d"/" -f2) 37 | 38 | if [ "${GIT_BRANCH_NAME}" = "master" ]; then 39 | ENVIRONMENT=production 40 | fi 41 | 42 | USER_VERSION=`cat VERSION` 43 | if [ "${ENVIRONMENT}" = "production" ]; then 44 | VERSION=${USER_VERSION} 45 | else 46 | VERSION=${USER_VERSION}.${BUILD_NUMBER} 47 | fi 48 | GRAFANA_VERSION=`cat VERSION_GRAFANA` 49 | 50 | FILE_NAME_PREFIX="grafana-sysdig-datasource" 51 | BUILD_FILE_NAME="${FILE_NAME_PREFIX}-v${USER_VERSION}.${BUILD_NUMBER}" 52 | BUILD_FILE_NAME_LATEST="${FILE_NAME_PREFIX}-v${USER_VERSION}" 53 | 54 | DOCKER_IMAGE_TAG=sysdiglabs/grafana 55 | if [ "${ENVIRONMENT}" = "production" ]; then 56 | DOCKER_IMAGE_VERSION=${GRAFANA_VERSION}-sysdig-${USER_VERSION} 57 | DOCKER_IMAGE_VERSION_LATEST="latest" 58 | else 59 | DOCKER_IMAGE_VERSION=${GRAFANA_VERSION}-sysdig-${USER_VERSION}.${BUILD_NUMBER}-${GIT_BRANCH_NAME} 60 | DOCKER_IMAGE_VERSION_LATEST="dev" 61 | fi 62 | 63 | DIST_PATH="dist" 64 | 65 | # Disabling interactive progress bar, and spinners gains 2x performances 66 | # as stated on https://twitter.com/gavinjoyce/status/691773956144119808 67 | npm config set progress false 68 | npm config set spin false 69 | } 70 | 71 | build() { 72 | echo "Building..." 73 | 74 | npm ci 75 | 76 | npm run build 77 | 78 | echo "Cleaning up artifacts...." 79 | cp -R ${DIST_PATH} sysdig 80 | rm -rf sysdig/test 81 | 82 | mkdir out 83 | zip -ry out/${BUILD_FILE_NAME}.zip sysdig 84 | tar zcvf out/${BUILD_FILE_NAME}.tgz sysdig 85 | 86 | if [ "${BUILD_CONTAINER}" = "true" ]; then 87 | # 88 | # create temporary folder with image content 89 | # 90 | rm -rf dist-image 91 | mkdir dist-image 92 | cp -r sysdig dist-image 93 | cp deployment/Dockerfile dist-image 94 | 95 | # 96 | # build Docker image 97 | # 98 | docker build dist-image \ 99 | --build-arg GRAFANA_VERSION=${GRAFANA_VERSION} \ 100 | -t ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION} \ 101 | -t ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION_LATEST} 102 | fi 103 | } 104 | 105 | cleanup() { 106 | if [ "${CLEANUP}" = "true" ]; then 107 | echo "Cleaning up..." 108 | 109 | rm -rf out 110 | rm -rf dist-image 111 | 112 | npm run clean 113 | 114 | docker rmi ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION} || echo "Image ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION} not found!" 115 | docker rmi ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION_LATEST} || echo "Image ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION_LATEST} not found!" 116 | fi 117 | } 118 | 119 | set -ex 120 | setup_env 121 | cleanup 122 | build 123 | set +ex 124 | 125 | echo "Done!" 126 | -------------------------------------------------------------------------------- /build/notify-slack.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | /* global require */ 17 | /* global process */ 18 | /* global Buffer */ 19 | 20 | const https = require('https'); 21 | const url = require('url'); 22 | 23 | const slackUrl = process.argv[2]; 24 | const version = process.argv[3]; 25 | const buildNumber = process.argv[4]; 26 | const branchName = process.argv[5]; 27 | const buildUrl = process.argv[6]; 28 | const previousResult = process.argv[7]; 29 | const result = process.argv[8]; 30 | const startTime = process.argv[9]; 31 | const duration = process.argv[10]; 32 | const gitCommitHash = process.argv[11]; 33 | 34 | const colors = { 35 | bad: '#FF871E', 36 | veryBad: '#EB3250', 37 | warning: '#FAFA3C', 38 | good: '#55EB5A', 39 | unknown: '#B3C3C6' 40 | }; 41 | 42 | const changeAnalysis = analyzeChange(previousResult, result); 43 | 44 | if ( 45 | branchName !== 'master' && 46 | changeAnalysis.isFirstBuild === false && 47 | changeAnalysis.isSuccessful && 48 | changeAnalysis.isFirstSuccess === false 49 | ) { 50 | // No need to post the successful message again 51 | process.exit(0); 52 | } 53 | 54 | let title; 55 | let text; 56 | let color; 57 | switch (result) { 58 | case 'SUCCESS': 59 | title = 'Build succeeded'; 60 | text = `The build #${buildNumber} succeeded in ${duration / 1000} seconds.`; 61 | color = colors.good; 62 | break; 63 | 64 | case 'FAILURE': 65 | title = 'Build failed'; 66 | text = `The build #${buildNumber} failed in ${duration / 1000} seconds.`; 67 | color = colors.bad; 68 | break; 69 | case 'ABORTED': 70 | title = 'Build aborted'; 71 | text = `The build #${buildNumber} has been aborted in ${duration / 1000} seconds.`; 72 | color = colors.warning; 73 | break; 74 | case 'UNSTABLE': 75 | title = 'Build unstable'; 76 | text = `The build #${buildNumber} is unstable in ${duration / 1000} seconds.`; 77 | color = colors.bad; 78 | break; 79 | 80 | case 'FIXED': 81 | title = 'Build fixed'; 82 | text = `The build #${buildNumber} got fixed in ${duration / 1000} seconds.`; 83 | color = colors.good; 84 | break; 85 | 86 | default: 87 | title = 'Build terminated'; 88 | text = `The build #${buildNumber} terminated with result ${result} in ${duration / 89 | 1000} seconds.`; 90 | break; 91 | } 92 | 93 | const json = { 94 | channel: '#grafana-ds-activity', 95 | username: 'jenkins', 96 | attachments: [ 97 | { 98 | color, 99 | title, 100 | title_link: buildUrl, 101 | text, 102 | fallback: text, 103 | fields: [ 104 | { 105 | title: 'Build number', 106 | value: buildNumber, 107 | short: true 108 | }, 109 | { 110 | title: 'Version', 111 | value: version, 112 | short: true 113 | }, 114 | { 115 | title: 'Repository and branch', 116 | value: `https://github.com/draios/grafana-sysdig-datasource/tree/${branchName}`, 117 | short: false 118 | }, 119 | { 120 | title: 'Latest commit', 121 | value: `https://github.com/draios/grafana-sysdig-datasource/commit/${gitCommitHash}`, 122 | short: false 123 | } 124 | ], 125 | footer: 'Built by Jenkins', 126 | ts: startTime / 1000 127 | } 128 | ] 129 | }; 130 | 131 | const postData = JSON.stringify(json); 132 | const urlObj = url.parse(slackUrl); 133 | const options = { 134 | hostname: urlObj.hostname, 135 | path: urlObj.pathname, 136 | method: 'POST', 137 | headers: { 138 | 'Content-Type': 'application/json', 139 | 'Content-Length': Buffer.byteLength(postData) 140 | } 141 | }; 142 | const req = https.request(options, (res) => { 143 | res.on('end', () => { 144 | process.exit(0); 145 | }); 146 | }); 147 | 148 | req.on('error', (e) => { 149 | console.error(`Slack notification failed: ${e.message}`); 150 | process.exit(1); 151 | }); 152 | 153 | req.write(postData); 154 | req.end(); 155 | 156 | function analyzeChange(previousResult, result) { 157 | switch (result) { 158 | case 'SUCCESS': 159 | case 'FIXED': 160 | switch (previousResult) { 161 | case 'SUCCESS': 162 | case 'FIXED': 163 | return { 164 | isFirstBuild: false, 165 | isFirstFailure: false, 166 | isFirstSuccess: false, 167 | isSuccessful: true 168 | }; 169 | case 'FAILURE': 170 | case 'ABORTED': 171 | case 'UNSTABLE': 172 | return { 173 | isFirstBuild: false, 174 | isFirstFailure: false, 175 | isFirstSuccess: true, 176 | isSuccessful: true 177 | }; 178 | 179 | case 'NONE': 180 | return { 181 | isFirstBuild: true, 182 | isFirstFailure: false, 183 | isFirstSuccess: true, 184 | isSuccessful: true 185 | }; 186 | 187 | default: 188 | return { 189 | isFirstBuild: false, 190 | isFirstFailure: false, 191 | isFirstSuccess: false, 192 | isSuccessful: true 193 | }; 194 | } 195 | 196 | case 'FAILURE': 197 | case 'ABORTED': 198 | case 'UNSTABLE': 199 | switch (previousResult) { 200 | case 'SUCCESS': 201 | case 'FIXED': 202 | return { 203 | isFirstBuild: false, 204 | isFirstFailure: true, 205 | isFirstSuccess: false, 206 | isSuccessful: false 207 | }; 208 | case 'FAILURE': 209 | case 'ABORTED': 210 | case 'UNSTABLE': 211 | return { 212 | isFirstBuild: false, 213 | isFirstFailure: false, 214 | isFirstSuccess: false, 215 | isSuccessful: false 216 | }; 217 | 218 | case 'NONE': 219 | return { 220 | isFirstBuild: true, 221 | isFirstFailure: false, 222 | isFirstSuccess: true, 223 | isSuccessful: true 224 | }; 225 | 226 | default: 227 | return { 228 | isFirstBuild: false, 229 | isFirstFailure: false, 230 | isFirstSuccess: false, 231 | isSuccessful: false 232 | }; 233 | } 234 | 235 | default: 236 | return { 237 | isFirstBuild: false, 238 | isFirstFailure: false, 239 | isFirstSuccess: false, 240 | isSuccessful: false 241 | }; 242 | } 243 | } 244 | -------------------------------------------------------------------------------- /build/publish.sh: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2018 Draios Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | #!/bin/bash 17 | 18 | # Env parameters 19 | # - BUILD_CONTAINER (default: true) 20 | # - ENVIRONMENT (default: development) 21 | # - GIT_BRANCH (default: dev) 22 | # - BUILD_NUMBER (default: 42) 23 | 24 | setup_env() { 25 | echo "Prepare environment..." 26 | 27 | set +u 28 | 29 | # 30 | # Set default variables 31 | # 32 | if [ -z ${BUILD_CONTAINER} ]; then 33 | BUILD_CONTAINER=true 34 | fi 35 | if [ -z ${CLEANUP} ]; then 36 | CLEANUP=true 37 | fi 38 | if [ -z ${ENVIRONMENT} ]; then 39 | ENVIRONMENT=development 40 | fi 41 | if [ -z ${GIT_BRANCH} ]; then 42 | GIT_BRANCH=dev 43 | fi 44 | if [ -z ${BUILD_NUMBER} ]; then 45 | BUILD_NUMBER=42 46 | fi 47 | 48 | set -u 49 | 50 | GIT_BRANCH_NAME=$(echo ${GIT_BRANCH} | cut -d"/" -f2) 51 | 52 | if [ "${GIT_BRANCH_NAME}" = "master" ]; then 53 | ENVIRONMENT=production 54 | fi 55 | 56 | USER_VERSION=`cat VERSION` 57 | if [ "${ENVIRONMENT}" = "production" ]; then 58 | VERSION=${USER_VERSION} 59 | else 60 | VERSION=${USER_VERSION}.${BUILD_NUMBER} 61 | fi 62 | GRAFANA_VERSION=`cat VERSION_GRAFANA` 63 | 64 | FILE_NAME_PREFIX="grafana-sysdig-datasource" 65 | BUILD_FILE_NAME="${FILE_NAME_PREFIX}-v${USER_VERSION}.${BUILD_NUMBER}" 66 | BUILD_FILE_NAME_LATEST="${FILE_NAME_PREFIX}-v${USER_VERSION}" 67 | 68 | DOCKER_IMAGE_TAG=sysdiglabs/grafana 69 | if [ "${ENVIRONMENT}" = "production" ]; then 70 | DOCKER_IMAGE_VERSION=${GRAFANA_VERSION}-sysdig-${USER_VERSION} 71 | DOCKER_IMAGE_VERSION_LATEST="latest" 72 | else 73 | DOCKER_IMAGE_VERSION=${GRAFANA_VERSION}-sysdig-${USER_VERSION}.${BUILD_NUMBER}-${GIT_BRANCH_NAME} 74 | DOCKER_IMAGE_VERSION_LATEST="dev" 75 | fi 76 | 77 | S3_BUCKET="s3://download.draios.com" 78 | 79 | if [ "${ENVIRONMENT}" = "production" ]; then 80 | S3_DEST="stable/grafana-sysdig-datasource" 81 | elif [[ "$GIT_BRANCH_NAME" == 'dev' ]]; then 82 | S3_DEST="dev/grafana-sysdig-datasource" 83 | else 84 | S3_DEST="dev/grafana-sysdig-datasource/${GIT_BRANCH_NAME}" 85 | fi 86 | } 87 | 88 | publish_artifacts() { 89 | echo "Uploading artifacts to S3..." 90 | 91 | aws s3 cp out/${BUILD_FILE_NAME}.zip ${S3_BUCKET}/${S3_DEST}/${BUILD_FILE_NAME}.zip --acl public-read 92 | aws s3 cp out/${BUILD_FILE_NAME}.tgz ${S3_BUCKET}/${S3_DEST}/${BUILD_FILE_NAME}.tgz --acl public-read 93 | 94 | aws s3 cp out/${BUILD_FILE_NAME}.zip ${S3_BUCKET}/${S3_DEST}/${BUILD_FILE_NAME_LATEST}.zip --acl public-read 95 | aws s3 cp out/${BUILD_FILE_NAME}.tgz ${S3_BUCKET}/${S3_DEST}/${BUILD_FILE_NAME_LATEST}.tgz --acl public-read 96 | 97 | if [ "${BUILD_CONTAINER}" = "true" ]; then 98 | if [ "${ENVIRONMENT}" = "production" ] || [ "${GIT_BRANCH_NAME}" = "dev" ]; then 99 | echo "Publishing image to Docker hub..." 100 | 101 | docker login -u=${DOCKER_USERNAME} -p=${DOCKER_PASSWORD} 102 | docker push ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION} 103 | docker push ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION_LATEST} 104 | fi 105 | fi 106 | } 107 | 108 | cleanup() { 109 | echo "Cleaning up..." 110 | 111 | docker rmi ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION} || echo "Image ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION} not found!" 112 | docker rmi ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION_LATEST} || echo "Image ${DOCKER_IMAGE_TAG}:${DOCKER_IMAGE_VERSION_LATEST} not found!" 113 | } 114 | 115 | set -ex 116 | setup_env 117 | publish_artifacts 118 | cleanup 119 | set +ex 120 | 121 | echo "Done!" 122 | -------------------------------------------------------------------------------- /build/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | setup_env() { 4 | echo "Prepare environment..." 5 | 6 | GRAFANA_VERSION=`cat VERSION_GRAFANA` 7 | 8 | DIST_PATH="dist" 9 | 10 | PLUGIN_DIR="grafana-data-${GRAFANA_VERSION}/plugins" 11 | 12 | USER_ID=$(id -u) 13 | 14 | # Disabling interactive progress bar, and spinners gains 2x performances 15 | # as stated on https://twitter.com/gavinjoyce/status/691773956144119808 16 | npm config set progress false 17 | npm config set spin false 18 | } 19 | 20 | start() { 21 | echo "Cleaning up..." 22 | 23 | rm -rf ${PLUGIN_DIR} 24 | docker stop grafana-dev > /dev/null || true 25 | docker rm grafana-dev > /dev/null || true 26 | 27 | echo "Building..." 28 | 29 | npm run build 30 | 31 | mkdir -p ${PLUGIN_DIR}/sysdig 32 | 33 | cp -R ${DIST_PATH}/. ${PLUGIN_DIR}/sysdig 34 | rm -rf ${PLUGIN_DIR}/sysdig/test 35 | 36 | echo "Build complete." 37 | 38 | echo "Starting Grafana ${GRAFANA_VERSION} docker container with Sysdig plugin..." 39 | 40 | mkdir -p grafana-data-${GRAFANA_VERSION} 41 | 42 | docker run -p 3000:3000 --user $USER_ID -v "${PWD}/grafana-data-${GRAFANA_VERSION}:/var/lib/grafana" \ 43 | -e GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS=sysdig \ 44 | --rm grafana/grafana:${GRAFANA_VERSION} 45 | 46 | } 47 | 48 | set -ex 49 | setup_env 50 | start 51 | -------------------------------------------------------------------------------- /datasource-settings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/draios/grafana-sysdig-datasource/c81c2812446c8a86bf3da62712eaa0df55b45142/datasource-settings.png -------------------------------------------------------------------------------- /deployment/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG GRAFANA_VERSION="latest" 2 | 3 | FROM grafana/grafana:${GRAFANA_VERSION} 4 | 5 | ADD sysdig /var/lib/grafana/plugins/sysdig 6 | -------------------------------------------------------------------------------- /deployment/build-image.sh: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2018 Draios Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | #!/bin/bash 17 | 18 | set -x 19 | 20 | BUILD_BASE_URL=$1 21 | VERSION=$2 22 | TAG=$3 23 | GRAFANA_VERSION=$4 24 | 25 | BUILD_URL=${BUILD_BASE_URL}/grafana-sysdig-datasource-v${VERSION}.tgz 26 | 27 | # 28 | # Download and extract plugin 29 | # 30 | curl ${BUILD_URL} -o plugin.tgz 31 | tar zxf plugin.tgz -C deployment 32 | 33 | # 34 | # Build image 35 | # 36 | docker build --build-arg GRAFANA_VERSION=${GRAFANA_VERSION} -t ${TAG}:${GRAFANA_VERSION}-sysdig-${VERSION} deployment 37 | 38 | # 39 | # Cleanup artifacts extracted from plugin.tgz file 40 | # 41 | rm -rf deployment/sysdig 42 | rm plugin.tgz 43 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sysdig", 3 | "private": true, 4 | "description": "", 5 | "scripts": { 6 | "build": "./node_modules/grunt-cli/bin/grunt", 7 | "test": "./node_modules/grunt-cli/bin/grunt mochaTest", 8 | "clean": "rm -rf dist", 9 | "start": "./build/start.sh" 10 | }, 11 | "repository": { 12 | "type": "git", 13 | "url": "git+https://github.com/draios/grafana-sysdig-data-source.git" 14 | }, 15 | "author": "", 16 | "license": "", 17 | "bugs": { 18 | "url": "https://github.com/draios/grafana-sysdig-data-source/issues" 19 | }, 20 | "engines": { 21 | "node": ">=6.10.0" 22 | }, 23 | "devDependencies": { 24 | "babel": "~6.23.0", 25 | "chai": "~4.2.0", 26 | "eslint": "~4.19.1", 27 | "grunt": "^1.0.4", 28 | "grunt-babel": "~7.0.0", 29 | "grunt-cli": "~1.3.2", 30 | "grunt-contrib-clean": "~2.0.0", 31 | "grunt-contrib-copy": "~1.0.0", 32 | "grunt-contrib-uglify": "~4.0.1", 33 | "grunt-contrib-watch": "~1.1.0", 34 | "grunt-execute": "~0.2.2", 35 | "grunt-mocha-test": "~0.13.3", 36 | "grunt-replace": "~1.0.1", 37 | "grunt-systemjs-builder": "~1.0.0", 38 | "jsdom": "~14.0.0", 39 | "load-grunt-tasks": "~4.0.0", 40 | "prunk": "~1.3.1", 41 | "q": "~1.5.1", 42 | "yarn": "~1.17.3" 43 | }, 44 | "dependencies": { 45 | "babel-plugin-transform-es2015-for-of": "~6.23.0", 46 | "babel-plugin-transform-es2015-modules-systemjs": "~6.24.1", 47 | "babel-preset-es2015": "~6.24.1", 48 | "lodash": "~4.17.13", 49 | "mocha": "~6.1.2" 50 | }, 51 | "homepage": "https://github.com/draios/grafana-sysdig-data-source#readme" 52 | } 53 | -------------------------------------------------------------------------------- /spec/.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "globals": { 3 | "beforeEach": true, 4 | "describe": true, 5 | "expect": true, 6 | "global": true, 7 | "it": true 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /spec/cache_spec.js: -------------------------------------------------------------------------------- 1 | import Cache from '../cache'; 2 | 3 | describe('Cache', () => { 4 | it('should store items', async () => { 5 | const cache = new Cache(); 6 | 7 | await cache.get('test-1', () => 'res-1'); 8 | const res1 = await cache.get('test-1'); 9 | expect(res1).to.be.equal('res-1'); 10 | 11 | await cache.get('test-2', () => 'res-2'); 12 | const res2 = await cache.get('test-2'); 13 | expect(res2).to.be.equal('res-2'); 14 | }); 15 | 16 | it('should store up to MAX items', async () => { 17 | const cache = new Cache(2); 18 | 19 | await cache.get('test-1', () => 'res-1'); 20 | await cache.get('test-2', () => 'res-2'); 21 | await cache.get('test-3', () => 'res-3'); 22 | await cache.get('test-4', () => 'res-4'); 23 | 24 | const array = cache.toArray(); 25 | expect(array).to.have.length(2); 26 | expect(array[0]).to.be.equal('res-3'); 27 | expect(array[1]).to.be.equal('res-4'); 28 | }); 29 | 30 | it('should store up to MAX recently read items', async () => { 31 | const cache = new Cache(2); 32 | 33 | await cache.get('test-1', () => 'res-1'); 34 | await cache.get('test-2', () => 'res-2'); 35 | await cache.get('test-1'); 36 | await cache.get('test-4', () => 'res-4'); 37 | 38 | const array = cache.toArray(); 39 | expect(array).to.have.length(2); 40 | expect(array[0]).to.be.equal('res-1'); 41 | expect(array[1]).to.be.equal('res-4'); 42 | }); 43 | 44 | it('should evict expired items', async () => { 45 | const cache = new Cache(10, 10); 46 | 47 | cache.now = () => 0; 48 | await cache.get('test-1', () => 'res-1'); 49 | 50 | cache.now = () => 10; 51 | await cache.get('test-2', () => 'res-2'); 52 | 53 | cache.now = () => 20; 54 | await cache.get('test-3', () => 'res-3'); 55 | 56 | const array = cache.toArray(); 57 | expect(array).to.have.length(2); 58 | expect(array[0]).to.be.equal('res-2'); 59 | expect(array[1]).to.be.equal('res-3'); 60 | }); 61 | 62 | it('should evict expired items', async () => { 63 | const cache = new Cache(10, 10); 64 | 65 | cache.now = () => 0; 66 | await cache.get('test-1', () => 'res-1'); 67 | 68 | cache.now = () => 10; 69 | await cache.get('test-2', () => 'res-2'); 70 | 71 | cache.now = () => 20; 72 | await cache.get('test-3', () => 'res-3'); 73 | 74 | const array = cache.toArray(); 75 | expect(array).to.have.length(2); 76 | expect(array[0]).to.be.equal('res-2'); 77 | expect(array[1]).to.be.equal('res-3'); 78 | }); 79 | 80 | it('should evict expired items based on creation time', async () => { 81 | const cache = new Cache(10, 10); 82 | 83 | cache.now = () => 0; 84 | await cache.get('test-1', () => 'res-1'); 85 | 86 | cache.now = () => 10; 87 | await cache.get('test-1'); 88 | 89 | cache.now = () => 20; 90 | await cache.get('test-3', () => 'res-3'); 91 | 92 | const array = cache.toArray(); 93 | expect(array).to.have.length(1); 94 | expect(array[0]).to.be.equal('res-3'); 95 | }); 96 | }); 97 | -------------------------------------------------------------------------------- /spec/datasource_spec.js: -------------------------------------------------------------------------------- 1 | import { SysdigDatasource } from '../datasource'; 2 | 3 | describe('SysdigDatasource', () => { 4 | var ctx = {}; 5 | 6 | beforeEach(() => { 7 | ctx.backendSrv = {}; 8 | ctx.templateSrv = {}; 9 | ctx.ds = new SysdigDatasource({}, ctx.backendSrv, ctx.templateSrv); 10 | }); 11 | 12 | it('should return an empty array when no targets are set', async () => { 13 | const result = await ctx.ds.query({ targets: [] }); 14 | expect(result.data).to.have.length(0); 15 | }); 16 | }); 17 | -------------------------------------------------------------------------------- /spec/formatter_service_spec.js: -------------------------------------------------------------------------------- 1 | import FormatterService from '../formatter_service'; 2 | 3 | describe('FormatterService', () => { 4 | it('format non-empty value', () => { 5 | expect(FormatterService.formatLabelValue('test')).to.be.equal('test'); 6 | }); 7 | 8 | it('format empty value', () => { 9 | expect(FormatterService.formatLabelValue(null)).to.be.equal('n/a'); 10 | expect(FormatterService.formatLabelValue(undefined)).to.be.equal('n/a'); 11 | }); 12 | 13 | it('should get series name for non-segmented query', () => { 14 | expect( 15 | FormatterService.getSeriesName({}, { target: 'metric', segmentBy: [] }, false, true, []) 16 | ).to.be.equal('metric'); 17 | }); 18 | 19 | it('should get series name for segmented query (single-target)', () => { 20 | expect( 21 | FormatterService.getSeriesName( 22 | { k: 'value' }, 23 | { target: 'metric', segmentBy: ['segment'] }, 24 | false, 25 | true, 26 | ['k'] 27 | ) 28 | ).to.be.equal('value'); 29 | }); 30 | 31 | it('should get series name for segmented query (multi-target)', () => { 32 | expect( 33 | FormatterService.getSeriesName( 34 | { k: 'value' }, 35 | { target: 'metric', segmentBy: ['segment'] }, 36 | false, 37 | false, 38 | ['k'] 39 | ) 40 | ).to.be.equal('metric (value)'); 41 | }); 42 | 43 | it('should get series name for non-segmented table query', () => { 44 | expect( 45 | FormatterService.getSeriesName({}, { target: 'metric', segmentBy: [] }, true, true, []) 46 | ).to.be.equal('metric'); 47 | }); 48 | 49 | it('should get series name for segmented table query', () => { 50 | expect( 51 | FormatterService.getSeriesName( 52 | { k: 'value' }, 53 | { target: 'metric', segmentBy: ['segment'] }, 54 | true, 55 | true, 56 | ['k'] 57 | ) 58 | ).to.be.equal('segment'); 59 | }); 60 | 61 | it('should get series name with {{metric}} alias', () => { 62 | expect( 63 | FormatterService.getSeriesName( 64 | { k: 'value' }, 65 | { alias: '{{metric}}', target: 'metric', segmentBy: ['segment'] }, 66 | false, 67 | true, 68 | ['k'] 69 | ) 70 | ).to.be.equal('metric'); 71 | expect( 72 | FormatterService.getSeriesName( 73 | {}, 74 | { alias: '{{metric}}', target: 'metric', segmentBy: [] }, 75 | false, 76 | true, 77 | [] 78 | ) 79 | ).to.be.equal('metric'); 80 | expect( 81 | FormatterService.getSeriesName( 82 | { k: 'value' }, 83 | { alias: '{{metric}}', target: 'metric', segmentBy: ['segment'] }, 84 | true, 85 | true, 86 | ['k'] 87 | ) 88 | ).to.be.equal('metric'); 89 | }); 90 | 91 | it('should get series name with {{segment_name}} alias', () => { 92 | expect( 93 | FormatterService.getSeriesName( 94 | { k: 'value' }, 95 | { alias: '{{segment_name}}', target: 'metric', segmentBy: ['segment'] }, 96 | false, 97 | true, 98 | ['k'] 99 | ) 100 | ).to.be.equal('segment'); 101 | expect( 102 | FormatterService.getSeriesName( 103 | { k: 'value' }, 104 | { alias: '{{segment_name}}', target: 'metric', segmentBy: ['segment'] }, 105 | true, 106 | true, 107 | ['k'] 108 | ) 109 | ).to.be.equal('segment'); 110 | 111 | // fallback 112 | expect( 113 | FormatterService.getSeriesName( 114 | {}, 115 | { alias: '{{segment_name}}', target: 'metric', segmentBy: [] }, 116 | false, 117 | true, 118 | [] 119 | ) 120 | ).to.be.equal('[all]'); 121 | }); 122 | 123 | it('should get series name with {{segment_value}} alias', () => { 124 | expect( 125 | FormatterService.getSeriesName( 126 | { k: 'value' }, 127 | { alias: '{{segment_value}}', target: 'metric', segmentBy: ['segment'] }, 128 | false, 129 | true, 130 | ['k'] 131 | ) 132 | ).to.be.equal('value'); 133 | expect( 134 | FormatterService.getSeriesName( 135 | { k: 'value' }, 136 | { alias: '{{segment_value}}', target: 'metric', segmentBy: ['segment'] }, 137 | true, 138 | true, 139 | ['k'] 140 | ) 141 | ).to.be.equal('value'); 142 | 143 | // fallback 144 | expect( 145 | FormatterService.getSeriesName( 146 | {}, 147 | { alias: '{{segment_value}}', target: 'metric', segmentBy: [] }, 148 | false, 149 | true, 150 | [] 151 | ) 152 | ).to.be.equal('[all]'); 153 | }); 154 | 155 | it('should get series name with {{segment_value:x:y}} alias', () => { 156 | expect( 157 | FormatterService.getSeriesName( 158 | { k: 'value' }, 159 | { alias: '{{segment_value:2}}', target: 'metric', segmentBy: ['segment'] }, 160 | false, 161 | true, 162 | ['k'] 163 | ) 164 | ).to.be.equal('va..'); 165 | expect( 166 | FormatterService.getSeriesName( 167 | { k: 'value' }, 168 | { alias: '{{segment_value::2}}', target: 'metric', segmentBy: ['segment'] }, 169 | false, 170 | true, 171 | ['k'] 172 | ) 173 | ).to.be.equal('..ue'); 174 | expect( 175 | FormatterService.getSeriesName( 176 | { k: 'value' }, 177 | { alias: '{{segment_value:2:2}}', target: 'metric', segmentBy: ['segment'] }, 178 | false, 179 | true, 180 | ['k'] 181 | ) 182 | ).to.be.equal('va..ue'); 183 | }); 184 | 185 | it('should get series name with invalid regular expression', () => { 186 | expect( 187 | FormatterService.getSeriesName( 188 | { k: 'value-123' }, 189 | { 190 | alias: '{{segment_value /(\\d+$/}}', 191 | target: 'metric', 192 | segmentBy: ['segment'] 193 | }, 194 | false, 195 | true, 196 | ['k'] 197 | ) 198 | ).to.be.equal('value-123'); 199 | expect( 200 | FormatterService.getSeriesName( 201 | { k: 'value-123' }, 202 | { 203 | alias: '{{segment_value /\\d+/}}', 204 | target: 'metric', 205 | segmentBy: ['segment'] 206 | }, 207 | false, 208 | true, 209 | ['k'] 210 | ) 211 | ).to.be.equal('value-123'); 212 | }); 213 | 214 | it('should get series name with invalid syntax', () => { 215 | expect( 216 | FormatterService.getSeriesName( 217 | { k: 'value-123' }, 218 | { 219 | alias: '{{segment_value //}}', 220 | target: 'metric', 221 | segmentBy: ['segment'] 222 | }, 223 | false, 224 | true, 225 | ['k'] 226 | ) 227 | ).to.be.equal('{{segment_value //}}'); 228 | expect( 229 | FormatterService.getSeriesName( 230 | { k: 'value-123' }, 231 | { 232 | alias: '{{segment_value /}}', 233 | target: 'metric', 234 | segmentBy: ['segment'] 235 | }, 236 | false, 237 | true, 238 | ['k'] 239 | ) 240 | ).to.be.equal('{{segment_value /}}'); 241 | }); 242 | }); 243 | -------------------------------------------------------------------------------- /spec/metrics_service_spec.js: -------------------------------------------------------------------------------- 1 | import MetricsService from '../metrics_service'; 2 | 3 | describe('MetricsService', () => { 4 | let backendMock; 5 | let backendRequestArgs; 6 | let backendResponseStubs; 7 | 8 | beforeEach(() => { 9 | MetricsService.reset(); 10 | 11 | backendRequestArgs = []; 12 | backendResponseStubs = []; 13 | 14 | backendMock = { 15 | url: 'dummy://localhost', 16 | apiToken: '42', 17 | backendSrv: { 18 | datasourceRequest(options) { 19 | backendRequestArgs.push(options); 20 | 21 | return new Promise((resolve) => 22 | resolve(backendResponseStubs[backendRequestArgs.length - 1]) 23 | ); 24 | } 25 | } 26 | }; 27 | }); 28 | 29 | it('should set proper default GET parameters', async () => { 30 | backendResponseStubs = [ 31 | { 32 | data: { 33 | metricDescriptors: [] 34 | } 35 | } 36 | ]; 37 | 38 | const metrics = await MetricsService.findMetrics(backendMock, {}); 39 | expect(metrics).to.be.an('array'); 40 | expect(metrics).to.have.length(0); 41 | 42 | expect(backendRequestArgs).to.have.length(1); 43 | expect(backendRequestArgs[0].url).to.match(/filter=&/); 44 | }); 45 | 46 | it('should not make the same request twice', async () => { 47 | backendResponseStubs = [ 48 | { 49 | data: { 50 | metricDescriptors: [] 51 | } 52 | } 53 | ]; 54 | 55 | await MetricsService.findMetrics(backendMock, {}); 56 | await MetricsService.findMetrics(backendMock, {}); 57 | 58 | expect(backendRequestArgs).to.have.length(1); 59 | }); 60 | 61 | it('should set filter when match is specified', async () => { 62 | backendResponseStubs = [ 63 | { 64 | data: { 65 | metricDescriptors: [] 66 | } 67 | } 68 | ]; 69 | 70 | await MetricsService.findMetrics(backendMock, { match: 'test' }); 71 | 72 | expect(backendRequestArgs[0].url).to.match(/filter=test&/); 73 | }); 74 | 75 | it('should use separate caches for different backends', async () => { 76 | backendResponseStubs = [ 77 | { 78 | data: { 79 | metricDescriptors: [] 80 | } 81 | }, 82 | { 83 | data: { 84 | metricDescriptors: [] 85 | } 86 | } 87 | ]; 88 | 89 | await Promise.all([ 90 | MetricsService.findMetrics(backendMock, { match: 'test' }), 91 | MetricsService.findMetrics( 92 | Object.assign({}, backendMock, { url: 'dummy://localhost-2' }), 93 | { match: 'test' } 94 | ) 95 | ]); 96 | 97 | expect(backendRequestArgs).to.have.length(2); 98 | expect(backendRequestArgs[0].url).not.to.be.equal(backendRequestArgs[1].url); 99 | }); 100 | }); 101 | -------------------------------------------------------------------------------- /spec/test-main.js: -------------------------------------------------------------------------------- 1 | import prunk from 'prunk'; 2 | import { JSDOM } from 'jsdom'; 3 | import chai from 'chai'; 4 | 5 | // Mock Grafana modules that are not available outside of the core project 6 | // Required for loading module.js 7 | prunk.mock('./css/query-editor.css!', 'no css, dude.'); 8 | prunk.mock('./css/config-editor.css!', 'no css, dude.'); 9 | prunk.mock('app/plugins/sdk', { 10 | QueryCtrl: null 11 | }); 12 | 13 | // Setup jsdom 14 | // Required for loading angularjs 15 | global.document = new JSDOM(''); 16 | global.window = global.document.parentWindow; 17 | 18 | // Setup Chai 19 | chai.should(); 20 | global.assert = chai.assert; 21 | global.expect = chai.expect; 22 | -------------------------------------------------------------------------------- /src/api_service.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | export default class ApiService { 17 | static send(backend, options) { 18 | const headers = { 19 | 'Content-Type': 'application/json', 20 | 'X-Sysdig-Product': 'SDC', 21 | Authorization: `Bearer ${backend.apiToken}` 22 | }; 23 | 24 | return backend.backendSrv.datasourceRequest( 25 | Object.assign({}, options, { 26 | headers, 27 | url: `${backend.url}/${options.url}`, 28 | method: options.method || 'GET' 29 | }) 30 | ); 31 | } 32 | 33 | static async fetchDefaultDashboards(backend) { 34 | try { 35 | // 36 | // Try latest version 37 | // 38 | const result = await this.send(backend, { 39 | url: 'api/v2/defaultDashboards?excludeMissing=true' 40 | }); 41 | 42 | if (result.data.defaultDashboards) { 43 | return { 44 | defaultDashboards: result.data.defaultDashboards, 45 | version: 'v2' 46 | }; 47 | } else { 48 | // 49 | // dev version of v2 detected, fallback to v1 50 | // (api/v2/defaultDashboards returns an array and not and object with defaultDashboards array) 51 | // NOTE: This is useful until onprem version X and SaaS version Y need to be supported 52 | // 53 | throw { 54 | status: 404 55 | }; 56 | } 57 | } catch (ex) { 58 | // 59 | // Check that latest version is not supported 60 | // 61 | if (ex.status === 404) { 62 | // 63 | // Try previous version 64 | // (supported from v1245 released on 5/11/2018) 65 | // 66 | const result = await this.send(backend, { 67 | url: 'api/defaultDashboards?excludeMissing=true' 68 | }); 69 | 70 | return { 71 | defaultDashboards: result.data.defaultDashboards, 72 | version: 'v1' 73 | }; 74 | } 75 | } 76 | } 77 | 78 | static async fetchDashboards(backend) { 79 | try { 80 | // 81 | // Try latest version 82 | // 83 | const result = await ApiService.send(backend, { 84 | url: 'api/v2/dashboards' 85 | }); 86 | 87 | if (Array.isArray(result.data.dashboards) && result.data.dashboards.length > 0) { 88 | return { 89 | dashboards: result.data.dashboards, 90 | version: 'v2' 91 | }; 92 | } else { 93 | // 94 | // probable dev version of v2 detected, fallback to v1 95 | // (api/v2/dashboards was not documented or used, it's supposed to be empty -- NOTE: could lead to false positive in case there are no dashboards to import) 96 | // NOTE: This is useful until onprem version X and SaaS version Y need to be supported 97 | // 98 | throw { 99 | status: 404 100 | }; 101 | } 102 | } catch (ex) { 103 | // 104 | // Check that latest version is not supported 105 | // 106 | if (ex.status === 404) { 107 | // 108 | // Try previous version 109 | // (supported from v1245 released on 5/11/2018) 110 | // 111 | const result = await ApiService.send(backend, { 112 | url: 'ui/dashboards' 113 | }); 114 | 115 | return { 116 | dashboards: result.data.dashboards, 117 | version: 'v1' 118 | }; 119 | } 120 | } 121 | } 122 | 123 | static async fetchMetricsDescriptors(backend, options) { 124 | const metricTypes = options.areLabelsIncluded ? [] : ['counter', 'gauge', 'histogram']; 125 | 126 | try { 127 | // 128 | // Try latest version 129 | // 130 | const typesFilter = options.areLabelsIncluded ? [] : options.plottableMetricTypes; 131 | 132 | const response = await ApiService.send(backend, { 133 | url: `api/v2/metrics/descriptors?offset=0&limit=100&filter=${options.match || 134 | ''}&types=${encodeURIComponent( 135 | typesFilter.join(',') 136 | )}&metricTypes=${encodeURIComponent(metricTypes.join(','))}` 137 | }); 138 | 139 | return response.data.metricDescriptors; 140 | } catch (ex) { 141 | // 142 | // Check that latest version is not supported 143 | // 144 | if (ex.status === 500) { 145 | // 146 | // Try previous version 147 | // 148 | const response = await ApiService.send(backend, { 149 | url: 'api/data/metrics?light=true' 150 | }); 151 | 152 | return Object.values(response.data).map((d) => 153 | Object.assign({}, d, { 154 | timeAggregations: d.aggregations, 155 | groupAggregations: getGroupAggregations(d, metricTypes) 156 | }) 157 | ); 158 | } else { 159 | throw ex; 160 | } 161 | } 162 | } 163 | 164 | static async fetchLabelDescriptors(backend, options) { 165 | try { 166 | // 167 | // Try latest version 168 | // 169 | const result = await this.send(backend, { 170 | url: `api/v2/labels/descriptors?offset=0&limit=100&filter=${options.match || 171 | ''}&pids=${options.metric || ''}&scope=` 172 | }); 173 | 174 | return result.data.labelDescriptors; 175 | } catch (ex) { 176 | // 177 | // Check that latest version is not supported 178 | // 179 | if (ex.status === 404) { 180 | // 181 | // Try previous version 182 | // 183 | if (options.metric) { 184 | try { 185 | const result = await this.send(backend, { 186 | url: `api/data/metrics/${options.metric}/segmentationMetrics` 187 | }); 188 | 189 | if (result.data.segmentationMetrics) { 190 | return result.data.segmentationMetrics.map((d) => ({ id: d })); 191 | } else { 192 | return []; 193 | } 194 | } catch (ex) { 195 | // 196 | // Previous versions no longer supported 197 | // 198 | } 199 | } else { 200 | return []; 201 | } 202 | } else { 203 | throw ex; 204 | } 205 | } 206 | } 207 | } 208 | 209 | function getGroupAggregations(metric, metricTypes) { 210 | if (metric.groupAggregations && metric.groupAggregations.length > 0) { 211 | return metric.groupAggregations; 212 | } else if (metricTypes.indexOf(metric.metricType) >= 0) { 213 | return ['avg', 'sum', 'min', 'max']; 214 | } else { 215 | return []; 216 | } 217 | } 218 | -------------------------------------------------------------------------------- /src/cache.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | export default class Cache { 17 | constructor(maxCount = 10, expiration = Number.MAX_VALUE) { 18 | this.map = {}; 19 | this.list = []; 20 | 21 | this.maxCount = maxCount; 22 | this.expiration = expiration; 23 | } 24 | 25 | getItemId(id) { 26 | return id; 27 | } 28 | 29 | async get(id, loader) { 30 | const data = this.read(id); 31 | if (data !== undefined) { 32 | return data; 33 | } else { 34 | const promise = this.fetch(loader); 35 | 36 | // store promise as data first... 37 | this.write(id, promise); 38 | 39 | try { 40 | // wait for data... 41 | const data = await promise; 42 | 43 | // and finally store data in the cache 44 | this.write(id, data); 45 | 46 | return data; 47 | } catch (ex) { 48 | // delete pending item 49 | const itemId = this.getItemId(id); 50 | const item = this.map[itemId]; 51 | if (item !== undefined) { 52 | this.list = [...this.list.filter((d) => d !== item)]; 53 | delete this.map[itemId]; 54 | } 55 | 56 | throw ex; 57 | } 58 | } 59 | } 60 | 61 | read(id) { 62 | const itemId = this.getItemId(id); 63 | 64 | this.expireItems(); 65 | const item = this.map[itemId]; 66 | 67 | if (item) { 68 | item.lastAccess = this.now(); 69 | 70 | // keep list sorted by creation time 71 | this.list = [...this.list.filter((d) => d !== item), item]; 72 | 73 | return item.data; 74 | } else { 75 | return undefined; 76 | } 77 | } 78 | 79 | fetch(loader) { 80 | return loader(); 81 | } 82 | 83 | write(id, data) { 84 | const itemId = this.getItemId(id); 85 | 86 | const item = this.createItem(itemId, data); 87 | 88 | // replace existing items (used to replace promise with data) 89 | const previousItem = this.map[itemId]; 90 | if (previousItem !== undefined) { 91 | this.list = [...this.list.filter((d) => d !== previousItem)]; 92 | delete this.map[itemId]; 93 | } 94 | 95 | this.list.push(item); 96 | this.map[itemId] = item; 97 | 98 | this.evictItems(); 99 | 100 | return item.data; 101 | } 102 | 103 | createItem(itemId, data) { 104 | const now = this.now(); 105 | 106 | return { 107 | id: itemId, 108 | data, 109 | createdOn: now, 110 | lastAccess: now 111 | }; 112 | } 113 | 114 | expireItems() { 115 | if (this.expiration !== Number.MAX_VALUE) { 116 | const limit = this.now() - this.expiration; 117 | const removed = []; 118 | for (let i = this.list.length - 1; i >= 0; i--) { 119 | const item = this.list[i]; 120 | 121 | if (item.createdOn < limit) { 122 | delete this.map[item.id]; 123 | removed.push(i); 124 | } 125 | } 126 | 127 | this.list = this.list.filter((d, i) => removed.indexOf(i) === -1); 128 | } 129 | } 130 | 131 | evictItems() { 132 | if (this.list.length > this.maxCount) { 133 | const removed = []; 134 | for (let i = this.list.length - 1 - this.maxCount; i >= 0; i--) { 135 | const item = this.list[i]; 136 | 137 | delete this.map[item.id]; 138 | removed.push(i); 139 | } 140 | 141 | this.list = this.list.filter((d, i) => removed.indexOf(i) === -1); 142 | } 143 | } 144 | 145 | toArray() { 146 | return this.list.map((item) => item.data); 147 | } 148 | 149 | now() { 150 | return Date.now(); 151 | } 152 | } 153 | -------------------------------------------------------------------------------- /src/config_ctrl.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | import DashboardsService from './dashboards_service'; 17 | import './css/config-editor.css!'; 18 | 19 | const CLOUD_URL = 'https://app.sysdigcloud.com'; 20 | const DEFAULT_ONPREM_URL = 'https://your-sysdig.local'; 21 | 22 | export class SysdigConfigCtrl { 23 | /** @ngInject */ 24 | constructor($q, backendSrv) { 25 | this.planOptions = [ 26 | { id: 'cloud', text: 'Basic/Pro Cloud' }, 27 | { id: 'onprem', text: 'Pro Software' } 28 | ]; 29 | 30 | this.dashboardSets = [ 31 | { 32 | id: 'DEFAULT', 33 | title: 'Default dashboards', 34 | importStatus: 'none', 35 | importMessage: null 36 | }, 37 | { id: 'PRIVATE', title: 'My dashboards', importStatus: 'none', importMessage: null }, 38 | { id: 'SHARED', title: 'Shared dashboards', importStatus: 'none', importMessage: null } 39 | ]; 40 | 41 | this.current.access = 'proxy'; 42 | 43 | const isUrlNotEmpty = this.current.url && /^\s*$/.test(this.current.url) === false; 44 | this.current.url = isUrlNotEmpty ? this.current.url : CLOUD_URL; 45 | this.isOnprem = this.current.url !== CLOUD_URL; 46 | this.plan = this.isOnprem ? this.planOptions[1] : this.planOptions[0]; 47 | 48 | this.$q = $q; 49 | this.backendSrv = backendSrv; 50 | } 51 | 52 | getBackendConfiguration() { 53 | return { 54 | backendSrv: this.backendSrv, 55 | withCredentials: this.current.withCredentials, 56 | headers: { 57 | 'Content-Type': 'application/json', 58 | 'X-Sysdig-Product': 'SDC', 59 | Authorization: `Bearer ${this.current.jsonData.apiToken}` 60 | }, 61 | apiToken: this.current.jsonData.apiToken, 62 | url: `/api/datasources/proxy/${this.current.id}` 63 | }; 64 | } 65 | 66 | changePlan() { 67 | this.isOnprem = this.plan.id === 'onprem'; 68 | 69 | if (this.isOnprem && this.current.url === CLOUD_URL) { 70 | this.current.url = DEFAULT_ONPREM_URL; 71 | } 72 | } 73 | 74 | isDashboardsImportDisabled() { 75 | return this.current.id === undefined || this.current.jsonData.apiToken === undefined; 76 | } 77 | 78 | importDashboards(dashboardSetId) { 79 | this.testing = null; 80 | 81 | const dashboardSet = this.dashboardSets.filter((set) => set.id === dashboardSetId)[0]; 82 | dashboardSet.importStatus = 'executing'; 83 | dashboardSet.importMessage = null; 84 | 85 | this.$q 86 | .when( 87 | DashboardsService.importFromSysdig( 88 | this.getBackendConfiguration(), 89 | this.current.name, 90 | dashboardSetId 91 | ) 92 | ) 93 | .then(() => { 94 | dashboardSet.importStatus = 'success'; 95 | }) 96 | .catch((error) => { 97 | dashboardSet.importStatus = 'error'; 98 | dashboardSet.importMessage = error; 99 | }); 100 | } 101 | 102 | deleteDashboards() { 103 | DashboardsService.delete(this.backendSrv); 104 | } 105 | } 106 | 107 | SysdigConfigCtrl.templateUrl = 'partials/config.html'; 108 | -------------------------------------------------------------------------------- /src/css/config-editor.css: -------------------------------------------------------------------------------- 1 | /* 2 | 3 | Copyright 2018 Draios Inc. 4 | 5 | Licensed under the Apache License, Version 2.0 (the "License"); 6 | you may not use this file except in compliance with the License. 7 | You may obtain a copy of the License at 8 | 9 | http://www.apache.org/licenses/LICENSE-2.0 10 | 11 | Unless required by applicable law or agreed to in writing, software 12 | distributed under the License is distributed on an "AS IS" BASIS, 13 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | See the License for the specific language governing permissions and 15 | limitations under the License. 16 | 17 | */ 18 | .sysdig-config-editor-info { 19 | margin-top: 1.5rem; 20 | margin-bottom: 1rem; 21 | } 22 | .sysdig-config-editor-info + .sysdig-config-editor-info { 23 | margin-top: 0.5rem; 24 | } 25 | 26 | h3 + .sysdig-config-editor-info, 27 | h4 + .sysdig-config-editor-info { 28 | margin-top: 0; 29 | } 30 | -------------------------------------------------------------------------------- /src/css/query-editor.css: -------------------------------------------------------------------------------- 1 | /* 2 | 3 | Copyright 2018 Draios Inc. 4 | 5 | Licensed under the Apache License, Version 2.0 (the "License"); 6 | you may not use this file except in compliance with the License. 7 | You may obtain a copy of the License at 8 | 9 | http://www.apache.org/licenses/LICENSE-2.0 10 | 11 | Unless required by applicable law or agreed to in writing, software 12 | distributed under the License is distributed on an "AS IS" BASIS, 13 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | See the License for the specific language governing permissions and 15 | limitations under the License. 16 | 17 | */ 18 | .sysdig-query-editor-dropdown--grow { 19 | flex: 1; 20 | } 21 | 22 | .sysdig-query-editor-form--aggregation { 23 | min-width: 90px; /* set to accomodate longest aggreation label */ 24 | } 25 | -------------------------------------------------------------------------------- /src/dashboards_service.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | import ApiService from './api_service'; 17 | import SysdigDashboardHelper from './sysdig_dashboard_helper'; 18 | 19 | export default class DashboardsService { 20 | static async importFromSysdig(backend, datasourceName, dashboardSetId) { 21 | console.info('Sysdig dashboards import: Starting...'); 22 | 23 | if (dashboardSetId === 'DEFAULT') { 24 | const tags = ['Sysdig', 'Default dashboard']; 25 | const fetchResults = await Promise.all([ 26 | ApiService.fetchDefaultDashboards(backend), 27 | ApiService.send(backend, { 28 | url: 'data/drilldownViewsCategories.json' 29 | }) 30 | ]); 31 | 32 | const applicableDashboards = fetchResults[0].defaultDashboards; 33 | 34 | const usedCategories = fetchResults[1].data.drilldownViewsCategories.filter( 35 | (category) => { 36 | return ( 37 | applicableDashboards.find( 38 | (dashboard) => dashboard.category === category.id 39 | ) !== undefined 40 | ); 41 | } 42 | ); 43 | 44 | const categories = usedCategories; 45 | const defaultDashboards = applicableDashboards; 46 | const version = fetchResults[0].version; 47 | 48 | const convertedDashboards = defaultDashboards 49 | .map(convertDashboard.bind(null, datasourceName, version, categories, tags)) 50 | .filter((dashboard) => dashboard !== null); 51 | 52 | const options = { 53 | overwrite: true 54 | }; 55 | 56 | try { 57 | const result = await saveDashboards( 58 | backend.backendSrv, 59 | convertedDashboards, 60 | options 61 | ); 62 | 63 | console.info('Sysdig dashboards import: Completed'); 64 | 65 | return result; 66 | } catch (error) { 67 | console.info('Sysdig dashboards import: Failed', error); 68 | 69 | throw error; 70 | } 71 | } else { 72 | let tags; 73 | switch (dashboardSetId) { 74 | case 'PRIVATE': 75 | tags = ['Sysdig', 'Private dashboard']; 76 | break; 77 | case 'SHARED': 78 | tags = ['Sysdig', 'Shared dashboard']; 79 | break; 80 | default: 81 | throw { 82 | name: 'Invalid argument', 83 | message: `Invalid dashboard set ID ('${dashboardSetId}')` 84 | }; 85 | } 86 | 87 | const fetchResult = await ApiService.fetchDashboards(backend); 88 | 89 | const convertedDashboards = fetchResult.dashboards 90 | .filter( 91 | SysdigDashboardHelper.filterDashboardBySetId.bind( 92 | null, 93 | fetchResult.version, 94 | dashboardSetId 95 | ) 96 | ) 97 | .map(convertDashboard.bind(null, datasourceName, fetchResult.version, [], tags)) 98 | .filter((dashboard) => dashboard !== null); 99 | 100 | const options = { 101 | overwrite: true 102 | }; 103 | 104 | try { 105 | const saveResult = await saveDashboards( 106 | backend.backendSrv, 107 | convertedDashboards, 108 | options 109 | ); 110 | 111 | console.info('Sysdig dashboards import: Completed'); 112 | 113 | return saveResult; 114 | } catch (error) { 115 | console.info('Sysdig dashboards import: Failed', error); 116 | 117 | throw error; 118 | } 119 | } 120 | 121 | function convertDashboard(datasourceName, version, categories, tags, dashboard) { 122 | try { 123 | return SysdigDashboardHelper.convertToGrafana(version, dashboard, { 124 | datasourceName, 125 | categories, 126 | tags 127 | }); 128 | } catch (error) { 129 | console.error( 130 | 'An error occurred during the dashboard conversion', 131 | error, 132 | arguments 133 | ); 134 | return null; 135 | } 136 | } 137 | 138 | async function saveDashboards(backendSrv, dashboards, options) { 139 | if (dashboards.length > 0) { 140 | const dashboard = dashboards[0]; 141 | 142 | await backendSrv.saveDashboard(dashboard, options); 143 | 144 | console.log(`Sysdig dashboards import: Imported '${dashboard.title}'`); 145 | 146 | return saveDashboards(backendSrv, dashboards.slice(1), options); 147 | } else { 148 | return {}; 149 | } 150 | } 151 | } 152 | 153 | static async delete(backendSrv) { 154 | return backendSrv 155 | .search({ 156 | type: 'dash-db', 157 | tags: ['Sysdig', 'sysdig'] 158 | }) 159 | .then(filterSysdigDashboards) 160 | .then((dashboards) => { 161 | console.log(`Sysdig dashboards: Delete ${dashboards.length} dashboards...`); 162 | 163 | return removeDashboards(backendSrv, dashboards); 164 | }); 165 | 166 | function filterSysdigDashboards(dashboards) { 167 | // NOTE: Up to Grafana v6.1, search over 2 tags doesn't work, the list will include dashboards without tags as well 168 | // Current workaround is to filter based on tags returned by each dashboard configuration 169 | return dashboards.filter( 170 | (dashboard) => 171 | dashboard.tags && 172 | (dashboard.tags.indexOf('sysdig') >= 0 || dashboard.tags.indexOf('Sysdig') >= 0) 173 | ); 174 | } 175 | } 176 | } 177 | 178 | async function removeDashboards(backendSrv, dashboards) { 179 | if (dashboards.length > 0) { 180 | return removeNextDashboard(backendSrv, dashboards[0], dashboards.slice(1)); 181 | } else { 182 | return; 183 | } 184 | } 185 | 186 | async function removeNextDashboard(backendSrv, dashboard, nextDashboards) { 187 | await backendSrv.deleteDashboard(dashboard.uid); 188 | 189 | try { 190 | await removeDashboards(backendSrv, nextDashboards); 191 | } catch (error) { 192 | console.error('Error deleting dashboard', dashboard.uid, error); 193 | await removeDashboards(backendSrv, nextDashboards); 194 | } 195 | } 196 | -------------------------------------------------------------------------------- /src/data_service.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | import _ from 'lodash'; 17 | import ApiService from './api_service'; 18 | import TimeService from './time_service'; 19 | import FormatterService from './formatter_service'; 20 | 21 | let fetchQueue; 22 | 23 | export default class DataService { 24 | static async fetch(backend, query, userTime) { 25 | const queue = this.setupTokenRequestQueue(backend.apiToken); 26 | const batch = this.setupDataBatchQueue(queue, backend, userTime); 27 | 28 | const promise = new Promise((resolve, reject) => { 29 | batch.requests.push({ 30 | query, 31 | promise: { resolve, reject } 32 | }); 33 | 34 | // 35 | // Debounce fetch so that all panels' requests can be batched together 36 | // Note that this function will be called synchronously once per panel 37 | // 38 | const scheduleFetchFn = _.debounce(this.scheduleFetch.bind(this), 0); 39 | scheduleFetchFn(); 40 | }); 41 | 42 | return promise; 43 | } 44 | 45 | static setupDataBatchQueue(queue, backend, userTime) { 46 | const batchId = getBatchId(userTime); 47 | 48 | if (queue[batchId] === undefined) { 49 | queue[batchId] = { 50 | backend, 51 | userTime, 52 | requests: [] 53 | }; 54 | } 55 | 56 | return queue[batchId]; 57 | } 58 | 59 | static async scheduleFetch() { 60 | const queues = Object.values(fetchQueue); 61 | 62 | // clear queue, requests will be now processed 63 | fetchQueue = {}; 64 | 65 | queues.forEach((queue) => { 66 | Object.values(queue).forEach((batch) => this.fetchBatch(batch)); 67 | }); 68 | } 69 | 70 | static async fetchBatch(batch) { 71 | TimeService.validateTimeWindow(batch.backend, batch.userTime) 72 | .then((requestTime) => { 73 | // 74 | // get list of data requests to batch 75 | // 76 | const apiRequests = batch.requests.reduce((acc, item) => { 77 | return [...acc, ...getRequests(item.query, requestTime)]; 78 | }, []); 79 | 80 | // 81 | // break list into 4-request chunks 82 | // 83 | const maxRequestCountPerChunk = 4; 84 | const chunks = apiRequests.reduce((acc, request) => { 85 | if ( 86 | acc.length === 0 || 87 | acc[acc.length - 1].length === maxRequestCountPerChunk 88 | ) { 89 | acc.push([request]); 90 | } else { 91 | acc[acc.length - 1].push(request); 92 | } 93 | 94 | return acc; 95 | }, []); 96 | 97 | if (requestTime) { 98 | // 99 | // send all batch requests 100 | // 101 | return Promise.all( 102 | chunks.map((chunk) => 103 | ApiService.send(batch.backend, { 104 | url: `api/data/batch`, 105 | data: { requests: chunk }, 106 | method: 'POST' 107 | }) 108 | ) 109 | ); 110 | } else { 111 | // 112 | // pretend the backend returned all empty datasets 113 | // 114 | return chunks.map((chunk) => ({ 115 | data: { 116 | responses: chunk.map(() => ({ data: [] })) 117 | } 118 | })); 119 | } 120 | }) 121 | .then( 122 | (chunks) => { 123 | // 124 | // flatten responses 125 | // 126 | let responses = chunks.reduce( 127 | (acc, chunk) => [...acc, ...chunk.data.responses], 128 | [] 129 | ); 130 | 131 | // 132 | // process and resolve each query with its response(s) 133 | // 134 | batch.requests.forEach((item) => { 135 | const targetResponseCount = item.query.targets.length; 136 | const targetResponses = responses.slice(0, targetResponseCount); 137 | 138 | const parseResult = parseResponses(item.query, targetResponses); 139 | const failedResults = parseResult.data.filter((d) => d.error); 140 | if ( 141 | parseResult.data.length > 0 && 142 | failedResults.length === parseResult.data.length 143 | ) { 144 | const error = failedResults[0].error; 145 | item.promise.reject({ 146 | message: `${error.reason} (${error.message})` 147 | }); 148 | } else { 149 | item.promise.resolve(parseResult); 150 | } 151 | 152 | responses = responses.slice(targetResponseCount); 153 | }); 154 | }, 155 | (error) => { 156 | // time window not available 157 | batch.requests.forEach((request) => { 158 | request.promise.reject(error); 159 | }); 160 | } 161 | ); 162 | 163 | // 164 | // TODO 165 | // 166 | // 1. Handle 200 OK with error response 167 | // { 168 | // "responses" : [ { 169 | // "errors" : [ { 170 | // "reason" : "Metric not found", 171 | // "message" : "'sysdigcloud-backend.events_dropped_total' is not a Sysdig Cloud metric", 172 | // "field" : "metrics", 173 | // "rejectedValue" : [ { 174 | // "groupAggregation" : null, 175 | // "alias" : "k0", 176 | // "aggregations" : { 177 | // "time" : null, 178 | // "group" : null 179 | // }, 180 | // "timeAggregation" : null, 181 | // "metric" : "timestamp" 182 | // }, { 183 | // "groupAggregation" : "concat", 184 | // "alias" : "v0", 185 | // "aggregations" : { 186 | // "time" : "concat", 187 | // "group" : "concat" 188 | // }, 189 | // "timeAggregation" : "concat", 190 | // "metric" : "sysdigcloud-backend.events_dropped_total" 191 | // } ] 192 | // } ] 193 | // } ] 194 | // } 195 | // 196 | // 2. Handle error like 500 Internal Server Error 197 | // 198 | } 199 | 200 | static setupTokenRequestQueue(apiToken) { 201 | if (fetchQueue === undefined) { 202 | fetchQueue = {}; 203 | } 204 | 205 | if (fetchQueue[apiToken] === undefined) { 206 | fetchQueue[apiToken] = {}; 207 | } 208 | 209 | return fetchQueue[apiToken]; 210 | } 211 | } 212 | 213 | function getBatchId(userTime) { 214 | return `${userTime.from} - ${userTime.to} - ${userTime.sampling}`; 215 | } 216 | 217 | function getRequests(options, requestTime) { 218 | const isTabularFormat = options.targets[0].isTabularFormat; 219 | return options.targets.map((target) => getRequest(target, requestTime, isTabularFormat)); 220 | } 221 | 222 | function getRequest(target, requestTime, isTabularFormat) { 223 | if (requestTime) { 224 | return { 225 | format: { 226 | type: 'data' 227 | }, 228 | time: getTime(), 229 | metrics: getMetrics(), 230 | sort: getSort(isTabularFormat), 231 | paging: getPaging(), 232 | scope: target.filter, 233 | group: { 234 | aggregations: { 235 | v0: target.timeAggregation 236 | }, 237 | groupAggregations: { 238 | v0: target.groupAggregation 239 | }, 240 | by: getGroupBy(), 241 | configuration: { 242 | groups: [] 243 | } 244 | } 245 | }; 246 | } else { 247 | return null; 248 | } 249 | 250 | function getTime() { 251 | return { 252 | from: requestTime.from * 1000000, 253 | to: requestTime.to * 1000000, 254 | sampling: 255 | (target.isSingleDataPoint 256 | ? requestTime.to - requestTime.from 257 | : requestTime.sampling) * 1000000 258 | }; 259 | } 260 | function getMetrics() { 261 | if (target.isSingleDataPoint) { 262 | const metrics = { 263 | v0: target.target 264 | }; 265 | 266 | target.segmentBy.forEach((segmentBy, i) => { 267 | metrics[`k${i}`] = segmentBy; 268 | }); 269 | 270 | return metrics; 271 | } else { 272 | const metrics = { 273 | k0: 'timestamp', 274 | v0: target.target 275 | }; 276 | 277 | target.segmentBy.forEach((segmentBy, i) => { 278 | metrics[`k${i + 1}`] = segmentBy; 279 | }); 280 | 281 | return metrics; 282 | } 283 | } 284 | 285 | function getSort(isTabularFormat) { 286 | const sortDirection = target.sortDirection || 'desc'; 287 | 288 | let sort; 289 | 290 | if (isTabularFormat === false) { 291 | sort = [{ v0: sortDirection }, { k0: sortDirection }]; 292 | 293 | if (target.segmentBy.length > 0) { 294 | sort.push({ k1: sortDirection }); 295 | } 296 | } else { 297 | // sort table by first label, let Grafana to sort the final table then 298 | sort = [{ k0: sortDirection }]; 299 | } 300 | 301 | return sort; 302 | } 303 | 304 | function getPaging() { 305 | return { 306 | from: 0, 307 | to: target.pageLimit - 1 308 | }; 309 | } 310 | 311 | function getGroupBy() { 312 | if (target.isSingleDataPoint) { 313 | const groupBy = []; 314 | 315 | target.segmentBy.forEach((segmentBy, i) => { 316 | groupBy.push({ 317 | metric: `k${i}` 318 | }); 319 | }); 320 | 321 | return groupBy; 322 | } else { 323 | const groupBy = [ 324 | { 325 | metric: 'k0', 326 | value: requestTime.sampling * 1000000 327 | } 328 | ]; 329 | 330 | target.segmentBy.forEach((segmentBy, i) => { 331 | groupBy.push({ 332 | metric: `k${i + 1}` 333 | }); 334 | }); 335 | 336 | return groupBy; 337 | } 338 | } 339 | } 340 | 341 | function parseResponses(options, response) { 342 | const isTabularFormat = options.targets[0].isTabularFormat; 343 | const isSingleTarget = options.targets.length === 1; 344 | const data = options.targets.map((target, i) => { 345 | const isSingleDataPoint = target.isSingleDataPoint; 346 | 347 | if (response[i].data) { 348 | const map = response[i].data.reduce((acc, d) => { 349 | const keys = response[i].group.by 350 | .map((group) => group['metric']) 351 | // assume timestamp is always the first one, ie. k0 352 | .slice(isSingleDataPoint ? 0 : 1); 353 | 354 | let t; 355 | if (target.segmentBy.length > 0) { 356 | const segmentNames = keys 357 | .map((segment) => FormatterService.formatLabelValue(d[segment])) 358 | .join(' - '); 359 | 360 | if (isTabularFormat || isSingleTarget) { 361 | t = segmentNames; 362 | } else { 363 | t = `${FormatterService.formatLabelValue(target.target)} (${segmentNames})`; 364 | } 365 | } else { 366 | t = FormatterService.formatLabelValue(target.target); 367 | } 368 | 369 | if (acc[t] === undefined) { 370 | acc[t] = { 371 | target: FormatterService.getSeriesName( 372 | d, 373 | target, 374 | isTabularFormat, 375 | isSingleTarget, 376 | keys 377 | ), 378 | datapoints: [] 379 | }; 380 | } 381 | 382 | if (isTabularFormat) { 383 | acc[t].datapoints.push([ 384 | ...keys.map((key) => d[key]), 385 | d.v0, 386 | response[i].time.from 387 | ]); 388 | } else if (isSingleDataPoint) { 389 | acc[t].datapoints.push([d.v0, response[i].time.from]); 390 | } else { 391 | acc[t].datapoints.push([d.v0, d.k0 / 1000]); 392 | } 393 | 394 | return acc; 395 | }, {}); 396 | 397 | if (isSingleDataPoint) { 398 | return Object.values(map).sort((a, b) => { 399 | if (a.datapoints[0][0] === b.datapoints[0][0]) { 400 | return a.target.localeCompare(b.target); 401 | } else { 402 | if (target.sortDirection === 'desc') { 403 | return b.datapoints[0][0] - a.datapoints[0][0]; 404 | } else { 405 | return a.datapoints[0][0] - b.datapoints[0][0]; 406 | } 407 | } 408 | }); 409 | } else { 410 | return Object.values(map).sort((a, b) => a.target.localeCompare(b.target)); 411 | } 412 | } else { 413 | return { 414 | target: target.target, 415 | error: response[i].errors[0] 416 | }; 417 | } 418 | }); 419 | 420 | if (isTabularFormat && data.length > 0) { 421 | const failures = data.filter((d) => d.error); 422 | if (failures.length > 0) { 423 | return { data: failures }; 424 | } 425 | 426 | const targetsDataset = data[0]; 427 | const segments = options.targets[0].segmentBy; 428 | const metrics = options.targets.map((target) => target.target); 429 | 430 | const tabularDataset = Object.assign({}, targetsDataset, { 431 | type: 'table', 432 | columns: [ 433 | ...segments.map((segmentBy) => ({ text: segmentBy })), 434 | ...metrics.map((metric) => ({ text: metric })) 435 | ], 436 | rows: targetsDataset.map((referenceRow, i) => { 437 | const referenceData = referenceRow.datapoints[0]; 438 | 439 | return [ 440 | ...referenceData.slice(0, segments.length), 441 | referenceData[segments.length], 442 | ...data.slice(1).map((d) => { 443 | if (d[i].target === referenceRow.target) { 444 | return d[i].datapoints[0][segments.length]; 445 | } else { 446 | // datasets could have different sets of segments; currently, no merge is performed 447 | return null; 448 | } 449 | }) 450 | ]; 451 | }) 452 | }); 453 | 454 | return { 455 | data: [Object.assign({}, data[0], tabularDataset)] 456 | }; 457 | } else { 458 | return { 459 | data: _.flatten(data) 460 | }; 461 | } 462 | } 463 | -------------------------------------------------------------------------------- /src/datasource.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | import _ from 'lodash'; 17 | import DataService from './data_service'; 18 | import ApiService from './api_service'; 19 | import MetricsService from './metrics_service'; 20 | import TemplatingService from './templating_service'; 21 | import FormatterService from './formatter_service'; 22 | 23 | export const DEFAULT_PAGE_LIMIT = 20; 24 | 25 | const SORT_OPTIONS = { 26 | asc: 'asc', 27 | bottom: 'asc', 28 | desc: 'desc', 29 | top: 'desc' 30 | }; 31 | 32 | export class SysdigDatasource { 33 | constructor(instanceSettings, backendSrv, templateSrv) { 34 | this.name = instanceSettings.name; 35 | this.backendSrv = backendSrv; 36 | this.templateSrv = templateSrv; 37 | this.url = instanceSettings.url; 38 | this.access = 'proxy'; 39 | 40 | this.apiToken = instanceSettings.jsonData ? instanceSettings.jsonData.apiToken : ''; 41 | this.headers = { 42 | 'Content-Type': 'application/json', 43 | 'X-Sysdig-Product': 'SDC', 44 | Authorization: `Bearer ${this.apiToken}` 45 | }; 46 | } 47 | 48 | getBackendConfiguration() { 49 | return { 50 | backendSrv: this.backendSrv, 51 | withCredentials: this.withCredentials, 52 | headers: this.headers, 53 | apiToken: this.apiToken, 54 | url: this.url 55 | }; 56 | } 57 | 58 | async testDatasource() { 59 | const response = await ApiService.send(this.getBackendConfiguration(), { 60 | url: 'api/login' 61 | }); 62 | 63 | if (response.status === 200) { 64 | return { 65 | status: 'success', 66 | message: 'Data source is working', 67 | title: 'Success' 68 | }; 69 | } 70 | } 71 | 72 | async query(options) { 73 | const query = this.buildQueryParameters(options); 74 | query.targets = query.targets.filter((t) => !t.hide); 75 | 76 | if (query.targets.length <= 0) { 77 | return { data: [] }; 78 | } 79 | 80 | return DataService.fetch( 81 | this.getBackendConfiguration(), 82 | query, 83 | convertRangeToUserTime(options.range, query.intervalMs) 84 | ); 85 | } 86 | 87 | buildQueryParameters(options) { 88 | //remove placeholder targets 89 | options.targets = _.filter(options.targets, (target) => { 90 | return target.target !== 'select metric'; 91 | }); 92 | 93 | const targets = _.map(options.targets, (target, i, targets) => { 94 | if (target.target === undefined) { 95 | // here's the query control panel sending the first request with empty configuration 96 | return Object.assign({}, target, { 97 | target: 'cpu.used.percent', 98 | timeAggregation: 'timeAvg', 99 | groupAggregation: 'avg', 100 | filter: undefined, 101 | pageLimit: DEFAULT_PAGE_LIMIT, 102 | segmentBy: [] 103 | }); 104 | } else { 105 | const isTabularFormat = targets[0].isTabularFormat; 106 | const targetOptions = { 107 | segmentBy: isTabularFormat === false ? target.segmentBy : targets[0].segmentBy, 108 | filter: isTabularFormat === false ? target.filter : targets[0].filter, 109 | 110 | // pagination configuration is set for first target only 111 | pageLimit: targets[0].pageLimit, 112 | sortDirection: targets[0].sortDirection, 113 | 114 | // "single data point" configuration is set for first target only 115 | isSingleDataPoint: isTabularFormat || targets[0].isSingleDataPoint 116 | }; 117 | 118 | if (targetOptions.segmentBy && Array.isArray(targetOptions.segmentBy) === false) { 119 | // backwards compatibility: up to v0.3 one segmentation was supported only 120 | targetOptions.segmentBy = [targetOptions.segmentBy]; 121 | } 122 | 123 | return Object.assign({}, target, targetOptions, { 124 | segmentBy: targetOptions.segmentBy 125 | ? targetOptions.segmentBy.map((segmentBy) => 126 | this.resolveTemplate(segmentBy, true, options) 127 | ) 128 | : [], 129 | filter: this.resolveTemplate(targetOptions.filter, true, options), 130 | 131 | pageLimit: this.resolveTemplate( 132 | targetOptions.pageLimit, 133 | true, 134 | options, 135 | (d) => Number.parseInt(d) || DEFAULT_PAGE_LIMIT 136 | ), 137 | sortDirection: this.resolveTemplate( 138 | targetOptions.sortDirection, 139 | true, 140 | options, 141 | (d) => SORT_OPTIONS[(d || 'top').toLowerCase()] || SORT_OPTIONS['top'] 142 | ), 143 | 144 | target: this.resolveTemplate(target.target, true, options), 145 | timeAggregation: this.resolveTemplate(target.timeAggregation, true, options), 146 | groupAggregation: this.resolveTemplate(target.groupAggregation, true, options), 147 | 148 | alias: this.resolveTemplate(target.alias, true, options) 149 | }); 150 | } 151 | }); 152 | 153 | options.targets = targets; 154 | 155 | return options; 156 | } 157 | 158 | resolveTemplate(input, isSingleMatch, options, parser) { 159 | const normParser = parser || ((d) => d); 160 | 161 | if (typeof input === 'string') { 162 | return normParser( 163 | TemplatingService.replace(this.templateSrv, input, (options || {}).scopedVars) 164 | ); 165 | } else { 166 | return normParser(input); 167 | } 168 | } 169 | 170 | async metricFindQuery(query, options) { 171 | const normOptions = Object.assign( 172 | { areLabelsIncluded: false, range: null, variable: null, match: '' }, 173 | options 174 | ); 175 | 176 | if (query) { 177 | // 178 | // variable query 179 | // 180 | const result = await MetricsService.queryMetrics( 181 | this.getBackendConfiguration(), 182 | this.templateSrv, 183 | query, 184 | { userTime: convertRangeToUserTime(normOptions.range) } 185 | ); 186 | 187 | return result 188 | .sort(this.getLabelValuesSorter(normOptions.variable.sort)) 189 | .map((labelValue) => ({ 190 | text: FormatterService.formatLabelValue(labelValue) 191 | })); 192 | } else { 193 | // 194 | // panel configuration query 195 | // 196 | const result = await MetricsService.findMetrics(this.getBackendConfiguration(), { 197 | areLabelsIncluded: normOptions.areLabelsIncluded, 198 | match: normOptions.match 199 | }); 200 | 201 | // filter out all tags/labels/other string metrics 202 | 203 | if (normOptions.areLabelsIncluded) { 204 | return result; 205 | } else { 206 | return result.filter((metric) => metric.isNumeric); 207 | } 208 | } 209 | } 210 | 211 | async findSegmentBy(metric, query) { 212 | if (metric) { 213 | return MetricsService.findSegmentations(this.getBackendConfiguration(), { 214 | metric, 215 | match: this.resolveTemplate(query, true) 216 | }); 217 | } else { 218 | return []; 219 | } 220 | } 221 | 222 | getLabelValuesSorter(mode) { 223 | switch (mode) { 224 | case 0: // disabled 225 | case 1: // alphabetical (asc) 226 | return (a, b) => { 227 | if (a === null) return -1; 228 | else if (b === null) return 1; 229 | else return a.localeCompare(b); 230 | }; 231 | 232 | case 3: // numerical (asc) 233 | return (a, b) => { 234 | if (a === null) return -1; 235 | else if (b === null) return 1; 236 | else return a - b; 237 | }; 238 | 239 | case 2: // alphabetical (desc) 240 | return (a, b) => { 241 | if (a === null) return -1; 242 | else if (b === null) return 1; 243 | else return a.localeCompare(b); 244 | }; 245 | 246 | case 4: // numerical (desc) 247 | return (a, b) => { 248 | if (a === null) return -1; 249 | else if (b === null) return 1; 250 | else return a - b; 251 | }; 252 | 253 | case 5: // alphabetical, case insensitive (asc) 254 | return (a, b) => { 255 | if (a === null) return -1; 256 | else if (b === null) return 1; 257 | else return a.localeCompare(b); 258 | }; 259 | 260 | case 6: // alphabetical, case insensitive (desc) 261 | return (a, b) => { 262 | if (a === null) return -1; 263 | else if (b === null) return 1; 264 | else return a.toLowerCase().localeCompare(b.toLowerCase()); 265 | }; 266 | } 267 | } 268 | 269 | async annotationQuery() { 270 | // const query = this.templateSrv.replace(options.annotation.query, {}, 'glob'); 271 | // const annotationQuery = { 272 | // range: options.range, 273 | // annotation: { 274 | // name: options.annotation.name, 275 | // datasource: options.annotation.datasource, 276 | // enable: options.annotation.enable, 277 | // iconColor: options.annotation.iconColor, 278 | // query: query 279 | // }, 280 | // rangeRaw: options.rangeRaw 281 | // }; 282 | 283 | // TODO Not supported yet 284 | return []; 285 | } 286 | } 287 | 288 | function convertRangeToUserTime(range, intervalMs) { 289 | if (range) { 290 | const userTime = { 291 | from: Math.trunc(range.from.valueOf() / 1000), 292 | to: Math.trunc(range.to.valueOf() / 1000) 293 | }; 294 | 295 | if (intervalMs) { 296 | userTime.sampling = Math.max(Math.trunc(intervalMs / 1000), 1); 297 | } 298 | 299 | return userTime; 300 | } else { 301 | return null; 302 | } 303 | } 304 | -------------------------------------------------------------------------------- /src/formatter_service.js: -------------------------------------------------------------------------------- 1 | export default class FormatterService { 2 | static formatLabelValue(labelValue) { 3 | return labelValue || FormatterService.NULL_TEXT; 4 | } 5 | 6 | static getSeriesName(dataPoint, target, isTabularFormat, isSingleTarget, keys) { 7 | let alias; 8 | if (target.alias) { 9 | alias = target.alias; 10 | } else { 11 | if (target.segmentBy.length === 0) { 12 | // single entity 13 | alias = '{{metric}}'; 14 | } else if (isTabularFormat === true) { 15 | alias = '{{segment_name}}'; 16 | } else if (isSingleTarget === true) { 17 | alias = '{{segment_value}}'; 18 | } else { 19 | alias = '{{metric}} ({{segment_value}})'; 20 | } 21 | } 22 | 23 | const pattern = /\{\{((?:metric|segment_name|segment_value))(?::(\d*))?(?::(\d*))?(?:\s\/([^/]+)\/)?\}\}/g; 24 | 25 | return alias.replace(pattern, (match, token, startTrim, endTrim, regexpString) => { 26 | const startTrimIndex = Number.parseInt(startTrim); 27 | const endTrimIndex = Number.parseInt(endTrim); 28 | 29 | let output; 30 | const trimmedGroup = token.trim(); 31 | if (trimmedGroup.startsWith('metric')) { 32 | output = target.target; 33 | } 34 | 35 | if (trimmedGroup.startsWith('segment_name')) { 36 | if (target.segmentBy.length > 0) { 37 | output = target.segmentBy.join(' - '); 38 | } else { 39 | return '[all]'; 40 | } 41 | } 42 | 43 | if (trimmedGroup.startsWith('segment_value')) { 44 | if (target.segmentBy.length > 0) { 45 | output = keys 46 | .map((segment) => FormatterService.formatLabelValue(dataPoint[segment])) 47 | .join(' - '); 48 | } else { 49 | return '[all]'; 50 | } 51 | } 52 | 53 | if (startTrimIndex) { 54 | if (endTrimIndex) { 55 | output = 56 | output.substring(0, startTrimIndex) + 57 | '..' + 58 | output.substring(output.length - endTrimIndex); 59 | } else { 60 | output = output.substring(0, startTrimIndex) + '..'; 61 | } 62 | } else if (endTrimIndex) { 63 | output = '..' + output.substring(output.length - endTrimIndex); 64 | } 65 | 66 | if (regexpString) { 67 | try { 68 | // 69 | // First, compile regular expression. Failures will invalidate the pattern entirely 70 | // 71 | const regexp = new RegExp(regexpString); 72 | 73 | // 74 | // Then, execute pattern against the current name 75 | // 76 | const matches = regexp.exec(output); 77 | 78 | if (matches && matches.length > 1) { 79 | // 80 | // And finally, joins all captured groups 81 | // 82 | output = matches.slice(1).join(''); 83 | } 84 | } catch (ex) { 85 | // noop 86 | } 87 | } 88 | 89 | return output; 90 | }); 91 | } 92 | } 93 | 94 | FormatterService.NULL_TEXT = 'n/a'; 95 | -------------------------------------------------------------------------------- /src/img/sysdig_logo.svg: -------------------------------------------------------------------------------- 1 | Artboard 1 -------------------------------------------------------------------------------- /src/metrics_service.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | import _ from 'lodash'; 17 | import ApiService from './api_service'; 18 | import TimeService from './time_service'; 19 | import TemplatingService from './templating_service'; 20 | import Cache from './cache'; 21 | 22 | export default class MetricsService { 23 | static async findMetrics(backend, options) { 24 | const normOptions = Object.assign( 25 | { 26 | areLabelsIncluded: false, 27 | match: null, 28 | plottableMetricTypes: ['%', 'byte', 'int', 'double', 'number', 'relativeTime'] 29 | }, 30 | options 31 | ); 32 | 33 | if (normOptions.match && normOptions.match.trim() === '') { 34 | normOptions.match = null; 35 | } 36 | 37 | return getMetricsCache(backend).values.get(normOptions, async () => { 38 | const response = await ApiService.fetchMetricsDescriptors(backend, normOptions); 39 | 40 | return response 41 | .map((metric) => 42 | _.assign(metric, { 43 | isNumeric: normOptions.plottableMetricTypes.indexOf(metric.type) >= 0 44 | }) 45 | ) 46 | .sort((a, b) => a.id.localeCompare(b.id)); 47 | }); 48 | } 49 | 50 | static async findSegmentations(backend, options) { 51 | const normOptions = Object.assign({ metric: false, match: null }, options); 52 | 53 | if (normOptions.match && normOptions.match.trim() === '') { 54 | normOptions.match = null; 55 | } 56 | 57 | return getMetricsCache(backend).labels.get(normOptions, async () => { 58 | const result = await ApiService.fetchLabelDescriptors(backend, normOptions); 59 | 60 | return result.sort((a, b) => a.id.localeCompare(b.id)); 61 | }); 62 | } 63 | 64 | static async findSegmentValues(backend, filter, queryOptions, userTime) { 65 | let evaluateUserTime; 66 | if (userTime === null) { 67 | const { timelines } = await TimeService.queryTimelines(backend); 68 | if (timelines.agents.filter((t) => t.from !== null && t.to !== null).length > 0) { 69 | evaluateUserTime = { 70 | from: (timelines.agents[0].to - timelines.agents[0].sampling) / 1000000, 71 | to: timelines.agents[0].to / 1000000, 72 | sampling: timelines.agents[0].sampling / 1000000 73 | }; 74 | } else { 75 | throw 'Unable to query metrics (data not available)'; 76 | } 77 | } else { 78 | evaluateUserTime = userTime; 79 | } 80 | 81 | return TimeService.validateTimeWindow(backend, evaluateUserTime).then((requestTime) => { 82 | return ApiService.send(backend, { 83 | method: 'POST', 84 | url: 'api/data/entity/metadata', 85 | data: { 86 | time: { 87 | from: requestTime.from * 1000000, 88 | to: requestTime.to * 1000000 89 | }, 90 | metrics: [queryOptions.labelName], 91 | filter, 92 | paging: { from: queryOptions.from, to: queryOptions.to } 93 | } 94 | }); 95 | }); 96 | } 97 | 98 | static async queryMetrics(backend, templateSrv, query, options) { 99 | let queryOptions; 100 | if ((queryOptions = TemplatingService.validateLabelValuesQuery(query)) !== null) { 101 | // 102 | // return list of label values 103 | // 104 | return this.findSegmentValues( 105 | backend, 106 | TemplatingService.resolveQueryVariables(queryOptions.filter, templateSrv), 107 | queryOptions, 108 | options.userTime 109 | ).then((result) => result.data.data.map((d) => d[queryOptions.labelName])); 110 | } else if ((queryOptions = TemplatingService.validateLabelNamesQuery(query)) !== null) { 111 | // 112 | // return list of label names 113 | // 114 | return this.findSegmentations(backend, { match: queryOptions.pattern }).then((result) => 115 | result.map((metric) => metric.id) 116 | ); 117 | } else if ((queryOptions = TemplatingService.validateMetricsQuery(query)) !== null) { 118 | // 119 | // return list of metric names 120 | // 121 | return this.findMetrics(backend, { match: queryOptions.pattern }).then((result) => 122 | result.map((metric) => metric.id) 123 | ); 124 | } else { 125 | return []; 126 | } 127 | } 128 | 129 | static reset() { 130 | resetMetricsCache(); 131 | } 132 | } 133 | 134 | class MetricsCache extends Cache { 135 | constructor() { 136 | super(10, 60000); 137 | } 138 | 139 | getItemId(id) { 140 | return Object.keys(id) 141 | .map((k) => id[k]) 142 | .join(','); 143 | } 144 | } 145 | 146 | let metricsCaches; 147 | 148 | function getMetricsCache(backend) { 149 | if (metricsCaches === undefined) { 150 | metricsCaches = {}; 151 | } 152 | 153 | if (metricsCaches[backend.url] === undefined) { 154 | metricsCaches[backend.url] = { 155 | values: new MetricsCache(), 156 | labels: new MetricsCache() 157 | }; 158 | } 159 | 160 | return metricsCaches[backend.url]; 161 | } 162 | 163 | function resetMetricsCache() { 164 | metricsCaches = undefined; 165 | } 166 | -------------------------------------------------------------------------------- /src/module.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | import { SysdigDatasource } from './datasource'; 17 | import { SysdigDatasourceQueryCtrl } from './query_ctrl'; 18 | import { SysdigConfigCtrl } from './config_ctrl'; 19 | 20 | class GenericQueryOptionsCtrl {} 21 | GenericQueryOptionsCtrl.templateUrl = 'partials/query.options.html'; 22 | 23 | class GenericAnnotationsQueryCtrl {} 24 | GenericAnnotationsQueryCtrl.templateUrl = 'partials/annotations.editor.html'; 25 | 26 | export { 27 | SysdigDatasource as Datasource, 28 | SysdigDatasourceQueryCtrl as QueryCtrl, 29 | SysdigConfigCtrl as ConfigCtrl, 30 | GenericQueryOptionsCtrl as QueryOptionsCtrl, 31 | GenericAnnotationsQueryCtrl as AnnotationsQueryCtrl 32 | }; 33 | -------------------------------------------------------------------------------- /src/partials/annotations.editor.html: -------------------------------------------------------------------------------- 1 | 18 |
Query
19 |
20 |
21 | 22 |
23 |
24 | 25 | 26 | -------------------------------------------------------------------------------- /src/partials/config.html: -------------------------------------------------------------------------------- 1 | 18 | 19 | 20 | 21 |

Sysdig API Settings

22 | 23 | 24 |
25 |
26 | 27 |
28 | 29 |
30 |
31 | 32 | 33 |
34 |
35 | 36 | 38 | 39 | 40 |

41 | Sysdig API Server URL 42 |

43 |
44 |
45 |
46 | 51 |
52 |
53 | 54 |
55 |
56 | Note: All requests will be made from the browser to Grafana backend/server which in turn will forward the 57 | requests to the Sysdig backend/server and by that circumvent possible Cross-Origin Resource Sharing (CORS) requirements. 58 |
59 |
Make sure Sysdig backend/server is accessible from the Grafana backend/server. 60 |
61 |
62 |
63 | 64 |
65 |

Auth

66 | 67 | 68 |

69 | Enter the API token. You can find your API Token in 70 | Settings > User Profile > Sysdig Monitor API page. 71 |

72 |
73 | 74 | 75 | 76 |
77 | 78 | 79 |

80 | Other HTTP settings for the Pro Software deployment of Sysdig. 81 |

82 |
83 | 84 |
85 | 87 |
88 | 89 | 90 |
91 | 93 |
94 | 95 | 96 |
97 | 98 |
99 |
100 | 102 |
103 |
104 | 105 | 106 |
107 |
108 |
TLS Auth Details
109 | TLS Certs are encrypted and stored in the Grafana database. 110 |
111 |
112 |
113 |
114 | 115 |
116 |
117 | 118 |
119 | 120 |
121 | 122 | reset 123 |
124 |
125 |
126 | 127 |
128 |
129 |
130 | 131 |
132 |
133 | 135 |
136 |
137 | 138 | reset 139 |
140 |
141 | 142 |
143 |
144 | 145 |
146 |
147 | 149 |
150 |
151 | 152 | reset 153 |
154 |
155 |
156 |
157 |
158 | 159 | -------------------------------------------------------------------------------- /src/partials/query.editor.html: -------------------------------------------------------------------------------- 1 | 18 | 19 | 20 |
21 |
22 | 25 |
26 | 28 | 29 |
30 |
31 | 32 |
33 | 36 |
37 | 39 | 40 |
41 |
42 | 43 |
44 | 47 |
48 | 50 | 51 |
52 |
53 |
54 | 55 | 56 |
57 |
58 | 61 | 62 | 63 |
64 | 66 | 67 |
68 | 69 |
70 | 73 | 76 |
77 |
78 |
79 | 80 | 81 |
82 |
83 | 86 |
87 | 88 |
89 |
90 |
91 | 92 | 93 |
94 |
95 | 98 |
99 | 101 | 102 |
103 |
104 | 106 |
107 |
108 |
109 | 112 |
113 |
114 | 116 |
117 |
118 |
119 | 120 | 121 |
122 |
123 | 126 |
127 | 128 |
129 |
130 |
131 | 132 | 133 |
134 |
135 |
136 | -------------------------------------------------------------------------------- /src/partials/query.options.html: -------------------------------------------------------------------------------- 1 | 18 |
19 |
20 |
21 |
22 | -------------------------------------------------------------------------------- /src/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "datasource", 3 | "id": "sysdig", 4 | "name": "Sysdig", 5 | 6 | "metrics": true, 7 | "alerting": false, 8 | "annotations": false, 9 | 10 | "info": { 11 | "description": "Sysdig Datasource for Grafana", 12 | "author": { 13 | "name": "Sysdig", 14 | "url": "https://sysdig.com" 15 | }, 16 | "logos": { 17 | "small": "img/sysdig_logo.svg", 18 | "large": "img/sysdig_logo.svg" 19 | }, 20 | "links": [ 21 | { "name": "GitHub", "url": "https://github.com/draios/grafana-sysdig-datasource" }, 22 | { 23 | "name": "Apache 2.0", 24 | "url": "https://www.apache.org/licenses/LICENSE-2.0" 25 | } 26 | ], 27 | "version": "@@version", 28 | "updated": "2018-05-03" 29 | }, 30 | 31 | "dependencies": { 32 | "grafanaVersion": "4.6.x", 33 | "plugins": [] 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/query_ctrl.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | import { QueryCtrl } from 'app/plugins/sdk'; 17 | import './css/query-editor.css!'; 18 | import { DEFAULT_PAGE_LIMIT } from './datasource'; 19 | 20 | const DEFAULT_TIME_AGGREGATIONS = [ 21 | { value: 'timeAvg', text: 'Time average' }, 22 | { value: 'rateOfChange', text: 'Rate of change' }, 23 | { value: 'avg', text: 'Average' }, 24 | { value: 'sum', text: 'Sum' }, 25 | { value: 'min', text: 'Min' }, 26 | { value: 'max', text: 'Max' }, 27 | { value: 'count', text: 'Count' }, 28 | { value: 'concat', text: 'Concat' }, 29 | { value: 'distinct', text: 'Distinct' } 30 | ]; 31 | const DEFAULT_TIME_AGGREGATIONS_MAP = DEFAULT_TIME_AGGREGATIONS.reduce((acc, d) => { 32 | acc[d.value] = d; 33 | return acc; 34 | }, {}); 35 | 36 | const DEFAULT_GROUP_AGGREGATIONS = [ 37 | { value: 'avg', text: 'Average' }, 38 | { value: 'sum', text: 'Sum' }, 39 | { value: 'min', text: 'Min' }, 40 | { value: 'max', text: 'Max' }, 41 | { value: 'count', text: 'Count' }, 42 | { value: 'concat', text: 'Concat' }, 43 | { value: 'distinct', text: 'Distinct' } 44 | ]; 45 | const DEFAULT_GROUP_AGGREGATIONS_MAP = DEFAULT_GROUP_AGGREGATIONS.reduce((acc, d) => { 46 | acc[d.value] = d; 47 | return acc; 48 | }, {}); 49 | 50 | export class SysdigDatasourceQueryCtrl extends QueryCtrl { 51 | constructor($scope, $injector) { 52 | super($scope, $injector); 53 | 54 | this.scope = $scope; 55 | this.target.target = this.target.target || 'cpu.used.percent'; 56 | this.target.timeAggregation = this.target.timeAggregation || 'timeAvg'; 57 | this.target.groupAggregation = this.target.groupAggregation || 'avg'; 58 | 59 | if (this.target.segmentBy) { 60 | if (Array.isArray(this.target.segmentBy) === false) { 61 | this.target.segmentBy = [this.target.segmentBy]; 62 | } 63 | } else { 64 | this.target.segmentBy = []; 65 | } 66 | 67 | this.target.sortDirection = this.target.sortDirection || 'desc'; 68 | this.target.isSingleDataPoint = this.target.isTabularFormat; 69 | 70 | this.segmentByItems = this.calculateSegmentByItems(); 71 | } 72 | 73 | isFirstTarget() { 74 | return this.panel.targets.indexOf(this.target) === 0; 75 | } 76 | 77 | isTabularFormat() { 78 | return this.panel.targets[0].isTabularFormat; 79 | } 80 | 81 | getLimitPlaceholder() { 82 | return `${DEFAULT_PAGE_LIMIT} (element count)`; 83 | } 84 | 85 | getVariableItems() { 86 | return this.datasource.templateSrv.variables.map((variable) => { 87 | const text = `\${${variable.name}}`; 88 | return { text, value: text }; 89 | }); 90 | } 91 | 92 | getMetricOptions(query) { 93 | let parseMetric; 94 | let options = { 95 | areLabelsIncluded: this.isTabularFormat(), 96 | match: query 97 | }; 98 | 99 | if (!this.isTabularFormat()) { 100 | parseMetric = (m) => ({ text: m.id, value: m.id }); 101 | } else { 102 | parseMetric = (m) => { 103 | if (m.isNumeric) { 104 | return { text: `(#) ${m.id}`, value: m.id }; 105 | } else { 106 | return { text: `(A) ${m.id}`, value: m.id }; 107 | } 108 | }; 109 | } 110 | 111 | return this.datasource.metricFindQuery(null, options).then((data) => { 112 | return [...this.getVariableItems(), ...data.map(parseMetric)]; 113 | }); 114 | } 115 | 116 | getAggregationOptions() { 117 | return this.datasource.metricFindQuery(null, { match: this.target.target }).then((data) => { 118 | return data.length > 0 ? data[0] : null; 119 | }); 120 | } 121 | 122 | getTimeAggregationOptions() { 123 | return this.getAggregationOptions().then((m) => { 124 | if (m) { 125 | return getAggregationList( 126 | m.timeAggregations, 127 | DEFAULT_TIME_AGGREGATIONS, 128 | DEFAULT_TIME_AGGREGATIONS_MAP 129 | ); 130 | } else { 131 | return []; 132 | } 133 | }); 134 | } 135 | 136 | getGroupAggregationOptions() { 137 | return this.getAggregationOptions().then((m) => { 138 | if (m) { 139 | return getAggregationList( 140 | m.groupAggregations, 141 | DEFAULT_GROUP_AGGREGATIONS, 142 | DEFAULT_GROUP_AGGREGATIONS_MAP 143 | ); 144 | } else { 145 | return []; 146 | } 147 | }); 148 | } 149 | 150 | getSortDirectionOptions() { 151 | return [ 152 | { value: 'desc', text: 'Top' }, 153 | { value: 'asc', text: 'Bottom' } 154 | ]; 155 | } 156 | 157 | getSegmentByOptions(item, query) { 158 | return this.datasource 159 | .findSegmentBy( 160 | this.target.target, 161 | query !== 'select metric' && query !== '' ? query : null 162 | ) 163 | .then((data) => { 164 | return [ 165 | { text: 'no segmentation', value: null }, 166 | ...this.getVariableItems(), 167 | ...data.map((m) => ({ text: m.id, value: m.id })) 168 | ]; 169 | }); 170 | } 171 | 172 | removeSegmentBy(item) { 173 | const index = this.segmentByItems.indexOf(item); 174 | 175 | // remove segmentation from list 176 | this.target.segmentBy = [ 177 | ...this.target.segmentBy.slice(0, index), 178 | ...this.target.segmentBy.slice(index + 1) 179 | ]; 180 | 181 | // update UI list 182 | this.segmentByItems = this.calculateSegmentByItems(); 183 | 184 | // update data 185 | this.panelCtrl.refresh(); 186 | } 187 | 188 | addSegmentBy(item) { 189 | const index = this.segmentByItems.indexOf(item); 190 | 191 | // add new item after the one where + has been clicked 192 | this.segmentByItems = [ 193 | ...this.segmentByItems.slice(0, index + 1), 194 | { 195 | isFirst: false, 196 | canAdd: true, 197 | segmentBy: null 198 | }, 199 | ...this.segmentByItems.slice(index + 1) 200 | ]; 201 | 202 | // don't update the UI: the change is temporary until the user picks a segmentation 203 | } 204 | 205 | onChangeParameter() { 206 | this.panelCtrl.refresh(); 207 | 208 | this.target.segmentBy = this.segmentByItems 209 | .filter((item) => item.segmentBy !== null) 210 | .map((item) => item.segmentBy); 211 | 212 | this.segmentByItems = this.calculateSegmentByItems(); 213 | } 214 | 215 | calculateSegmentByItems() { 216 | if (this.target.segmentBy.length === 0) { 217 | return [ 218 | { 219 | isFirst: true, 220 | canAdd: false, 221 | segmentBy: null 222 | } 223 | ]; 224 | } else { 225 | return this.target.segmentBy.map((segmentBy, i) => ({ 226 | isFirst: i === 0, 227 | canAdd: i === this.target.segmentBy.length - 1, 228 | segmentBy 229 | })); 230 | } 231 | } 232 | 233 | toggleEditorMode() { 234 | // noop 235 | } 236 | 237 | onChangeTabularFormat() { 238 | this.target.isSingleDataPoint = this.target.isTabularFormat; 239 | this.refresh(); 240 | } 241 | } 242 | 243 | SysdigDatasourceQueryCtrl.templateUrl = 'partials/query.editor.html'; 244 | 245 | function getAggregationList(aggregations, knownList, knownMap) { 246 | return aggregations 247 | .map((d) => { 248 | const descr = knownMap[d]; 249 | 250 | return descr || { text: d, value: d }; 251 | }) 252 | .sort((a, b) => { 253 | const indexA = knownList.indexOf(a); 254 | const indexB = knownList.indexOf(b); 255 | 256 | if (indexA !== indexB) { 257 | return indexA - indexB; 258 | } else { 259 | return a.text.localeCompare(b.text); 260 | } 261 | }); 262 | } 263 | -------------------------------------------------------------------------------- /src/sysdig_dashboard_helper.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | /* global grafanaBootData */ 17 | 18 | export default class SysdigDashboardHelper { 19 | static convertToGrafana(version, sysdigDashboard, options) { 20 | return SysdigDashboardHelper.getHelper(version).convertToGrafana(sysdigDashboard, options); 21 | } 22 | 23 | static filterDashboardBySetId(version, setId, dashboard) { 24 | return SysdigDashboardHelper.getHelper(version).filterDashboardBySetId(setId, dashboard); 25 | } 26 | 27 | static getHelper(version) { 28 | if (version === 'v1') { 29 | return SysdigDashboardHelperV1; 30 | } else if (version === 'v2') { 31 | return SysdigDashboardHelperV2; 32 | } else { 33 | throw { 34 | name: 'Invalid parameter', 35 | message: `Invalid dashboard version ${version}` 36 | }; 37 | } 38 | } 39 | } 40 | 41 | class SysdigDashboardHelperV1 { 42 | static convertToGrafana(sysdigDashboard, options) { 43 | const panels = (sysdigDashboard.items || sysdigDashboard.widgets) 44 | .map((panel, index) => { 45 | const builder = this.getPanelBuilder(panel); 46 | return builder.build(this.getParsers(), sysdigDashboard, options, panel, index); 47 | }) 48 | .filter((r) => r !== null); 49 | 50 | const isRowMandatory = getGrafanaVersion().indexOf('4.') === 0; 51 | let dashboardPanelsConfiguration; 52 | if (isRowMandatory) { 53 | // convert grid layout to row spans 54 | panels.forEach((panel) => { 55 | panel.span = panel.gridPos.w / 2; 56 | }); 57 | 58 | // define rows 59 | dashboardPanelsConfiguration = { 60 | rows: panels.reduce((acc, panel) => { 61 | if (acc.length === 0) { 62 | return [ 63 | { 64 | panels: [panel] 65 | } 66 | ]; 67 | } else if (acc[acc.length - 1].panels[0].gridPos.x < panel.gridPos.x) { 68 | acc[acc.length - 1].panels.push(panel); 69 | } else { 70 | acc.push({ 71 | panels: [panel] 72 | }); 73 | } 74 | 75 | return acc; 76 | }, []) 77 | }; 78 | 79 | // remove grid layout 80 | panels.forEach((panel) => { 81 | delete panel.gridPos; 82 | }); 83 | } else { 84 | dashboardPanelsConfiguration = { panels }; 85 | } 86 | 87 | let categoryTags; 88 | if (sysdigDashboard.category) { 89 | categoryTags = sysdigDashboard.category 90 | .split('.') 91 | .reduce((acc, part) => { 92 | if (acc === null) { 93 | return [part]; 94 | } else { 95 | return [...acc, `${acc[acc.length - 1]}.${part}`]; 96 | } 97 | }, null) 98 | .map((categoryId) => { 99 | const category = options.categories.find( 100 | (category) => category.id === categoryId 101 | ); 102 | 103 | if (category) { 104 | return category.name; 105 | } else { 106 | return null; 107 | } 108 | }) 109 | .filter((category) => category !== null); 110 | } else { 111 | categoryTags = []; 112 | } 113 | 114 | return Object.assign( 115 | { 116 | schemaVersion: 6, 117 | version: 0, 118 | title: sysdigDashboard.name, 119 | tags: [...(options.tags || []), ...categoryTags], 120 | timezone: 'browser', 121 | time: { 122 | // default Sysdig: last 1 hour 123 | from: 'now-1h', 124 | to: 'now' 125 | }, 126 | graphTooltip: 1 // shared crosshair 127 | }, 128 | dashboardPanelsConfiguration 129 | ); 130 | } 131 | 132 | static getPanelBuilder(panel) { 133 | switch (panel.showAs) { 134 | case 'timeSeries': 135 | return TimeSeriesBuilder; 136 | 137 | case 'timeSeriesArea': 138 | return TimeSeriesAreaBuilder; 139 | 140 | case 'histogram': 141 | return HistogramBuilder; 142 | 143 | case 'top': 144 | return BarChartBuilder; 145 | 146 | case 'summary': 147 | return NumberBuilder; 148 | 149 | case 'table': 150 | return TableBuilder; 151 | 152 | case 'text': 153 | return TextBuilder; 154 | 155 | default: 156 | console.warn(`${panel.showAs} panels cannot be exported to Grafana`); 157 | return DefaultBuilder; 158 | } 159 | } 160 | 161 | static filterDashboardBySetId(setId, dashboard) { 162 | switch (setId) { 163 | case 'PRIVATE': 164 | return dashboard.isShared === false; 165 | case 'SHARED': 166 | return dashboard.isShared === true; 167 | } 168 | } 169 | 170 | static getParsers() { 171 | return { 172 | parseMetric: this.parseMetric 173 | }; 174 | } 175 | 176 | static parseMetric(metric) { 177 | return Object.assign({}, metric, { 178 | id: metric.metricId.replace(/%25/g, '.'), 179 | timeAggregation: metric.timeAggregation || metric.aggregation 180 | }); 181 | } 182 | } 183 | 184 | class SysdigDashboardHelperV2 extends SysdigDashboardHelperV1 { 185 | static filterDashboardBySetId(setId, dashboard) { 186 | switch (setId) { 187 | case 'PRIVATE': 188 | return dashboard.shared === false; 189 | case 'SHARED': 190 | return dashboard.shared === true; 191 | } 192 | } 193 | 194 | static parseMetric(metric) { 195 | return Object.assign({}, metric, { 196 | id: metric.id.replace(/%25/g, '.'), 197 | timeAggregation: metric.timeAggregation || metric.aggregation 198 | }); 199 | } 200 | } 201 | 202 | function getGrafanaVersion() { 203 | return grafanaBootData && 204 | grafanaBootData.settings && 205 | grafanaBootData.settings.buildInfo && 206 | grafanaBootData.settings.buildInfo.version 207 | ? grafanaBootData.settings.buildInfo.version 208 | : 'n.a.'; 209 | } 210 | 211 | const GRAFANA_COLUMN_COUNT = 24; 212 | const SYSDIG_COLUMN_COUNT = 12; 213 | 214 | class BaseBuilder { 215 | static getPanelType() { 216 | return null; 217 | } 218 | 219 | static isSingleDataPoint() { 220 | return false; 221 | } 222 | 223 | static isTabularFormat() { 224 | return false; 225 | } 226 | 227 | static getTargetGridLayout(sysdigDashboard, sysdigPanel) { 228 | let layout; 229 | if (sysdigDashboard.items) { 230 | const index = (sysdigDashboard.items || sysdigDashboard.widgets).indexOf(sysdigPanel); 231 | layout = sysdigDashboard.layout[index]; 232 | } else { 233 | layout = sysdigPanel.gridConfiguration; 234 | } 235 | 236 | // keep w/h ratio similar to Sysdig by reducing height by 80% 237 | return { 238 | h: Math.ceil((layout.size_y / SYSDIG_COLUMN_COUNT) * GRAFANA_COLUMN_COUNT * 0.8), 239 | w: (layout.size_x / SYSDIG_COLUMN_COUNT) * GRAFANA_COLUMN_COUNT, 240 | x: ((layout.col - 1) / SYSDIG_COLUMN_COUNT) * GRAFANA_COLUMN_COUNT, 241 | y: Math.floor(((layout.row - 1) / SYSDIG_COLUMN_COUNT) * GRAFANA_COLUMN_COUNT * 0.8) 242 | }; 243 | } 244 | 245 | static getTargetFilter(sysdigDashboard, sysdigPanel) { 246 | return sysdigPanel.scope || sysdigDashboard.filterExpression; 247 | } 248 | 249 | static getBasePanelConfiguration(sysdigDashboard, options, sysdigPanel, index) { 250 | return { 251 | type: this.getPanelType(), 252 | datasource: options.datasourceName, 253 | id: index, 254 | title: sysdigPanel.name, 255 | gridPos: this.getTargetGridLayout(sysdigDashboard, sysdigPanel) 256 | }; 257 | } 258 | 259 | static getValueFormat(valueMetric, metrics) { 260 | const metricConfiguration = _.find(metrics, (m) => m.id === valueMetric.id); 261 | 262 | if (metricConfiguration === undefined) { 263 | // metric not found, return default format 264 | return 'short'; 265 | } else { 266 | // NOTE: For unit mapping, refer to public/app/core/utils/kbn.ts 267 | const isRate = valueMetric.aggregation === 'timeAvg'; 268 | switch (metricConfiguration.type) { 269 | case 'string': 270 | case 'providerServiceEnum': 271 | case 'bool': 272 | return 'none'; 273 | 274 | case 'int': 275 | case 'number': 276 | case 'double': 277 | return 'short'; 278 | 279 | case 'byte': 280 | if (isRate) { 281 | return 'Bps'; 282 | } else { 283 | return 'bytes'; 284 | } 285 | 286 | case 'relativeTime': 287 | return 'ns'; 288 | 289 | case '%': 290 | case 'ratio': 291 | return 'percent'; 292 | 293 | case 'date': 294 | case 'dateTime': 295 | case 'absoluteTime': 296 | return 'dateTimeAsIso'; 297 | 298 | default: 299 | return 'short'; 300 | } 301 | } 302 | } 303 | } 304 | 305 | class TimeSeriesBuilder extends BaseBuilder { 306 | static getPanelType() { 307 | return 'graph'; 308 | } 309 | 310 | static build(parsers, sysdigDashboard, options, sysdigPanel, index) { 311 | return Object.assign( 312 | {}, 313 | this.getBasePanelConfiguration(sysdigDashboard, options, sysdigPanel, index), 314 | { 315 | targets: this.buildTargets(parsers, sysdigDashboard, sysdigPanel), 316 | legend: { 317 | show: false // retain Sysdig layout 318 | }, 319 | yaxes: this.buildPanelYAxes(parsers, sysdigDashboard, sysdigPanel, options) 320 | } 321 | ); 322 | } 323 | 324 | static getValues(parsers, sysdigDashboard, sysdigPanel) { 325 | const values = sysdigPanel.metrics.map(parsers.parseMetric).filter((metric) => { 326 | return metric.id !== 'timestamp' && metric.timeAggregation !== undefined; 327 | }); 328 | if (values.length === 0) { 329 | console.warn('Expected at least one value metric'); 330 | } 331 | 332 | return values; 333 | } 334 | 335 | static getKeys(parsers, sysdigDashboard, sysdigPanel) { 336 | const keys = sysdigPanel.metrics.map(parsers.parseMetric).filter((metric) => { 337 | return metric.id !== 'timestamp' && metric.timeAggregation === undefined; 338 | }); 339 | if (keys.length > 1) { 340 | console.warn('Expected at most one key metric'); 341 | } 342 | 343 | return keys; 344 | } 345 | 346 | static buildTargets(parsers, sysdigDashboard, sysdigPanel) { 347 | const values = this.getValues(parsers, sysdigDashboard, sysdigPanel); 348 | const keys = this.getKeys(parsers, sysdigDashboard, sysdigPanel); 349 | 350 | return values.map((value, i) => { 351 | return { 352 | refId: i.toString(), 353 | isSingleDataPoint: this.isSingleDataPoint(), 354 | isTabularFormat: this.isTabularFormat(), 355 | target: value.id, 356 | timeAggregation: value.timeAggregation, 357 | groupAggregation: value.groupAggregation, 358 | segmentBy: keys.length > 0 ? keys.map((key) => key.id) : null, 359 | filter: this.getTargetFilter(sysdigDashboard, sysdigPanel), 360 | sortDirection: this.getTargetSortDirection(sysdigPanel), 361 | pageLimit: this.getTargetPageLimit(sysdigPanel) 362 | }; 363 | }); 364 | } 365 | 366 | static getTargetSortDirection(sysdigPanel) { 367 | const normalizedDisplayOptions = Object.assign( 368 | { 369 | valueLimit: { 370 | direction: null, 371 | count: null 372 | } 373 | }, 374 | sysdigPanel.customDisplayOptions 375 | ); 376 | 377 | return normalizedDisplayOptions.valueLimit.direction || null; 378 | } 379 | 380 | static parseValueLimitCount(sysdigPanel) { 381 | return sysdigPanel.customDisplayOptions && 382 | sysdigPanel.customDisplayOptions.valueLimit && 383 | Number.parseInt(sysdigPanel.customDisplayOptions.valueLimit.count, 10) 384 | ? Number.parseInt(sysdigPanel.customDisplayOptions.valueLimit.count, 10) 385 | : 10; 386 | } 387 | 388 | static getTargetPageLimit(sysdigPanel) { 389 | return this.parseValueLimitCount(sysdigPanel); 390 | } 391 | 392 | static buildPanelYAxes(parsers, sysdigDashboard, sysdigPanel, options) { 393 | const normalizedDisplayOptions = Object.assign({}, sysdigPanel.customDisplayOptions); 394 | 395 | let yAxisLogBase; 396 | if (normalizedDisplayOptions.yAxisScale) { 397 | switch (normalizedDisplayOptions.yAxisScale) { 398 | case 'logarithmic2': 399 | yAxisLogBase = 2; 400 | break; 401 | case 'logarithmic10': 402 | yAxisLogBase = 10; 403 | break; 404 | case 'logarithmic32': 405 | yAxisLogBase = 32; 406 | break; 407 | case 'logarithmic1024': 408 | yAxisLogBase = 1024; 409 | break; 410 | default: 411 | yAxisLogBase = 1; 412 | break; 413 | } 414 | } else { 415 | yAxisLogBase = 1; 416 | } 417 | 418 | const baseAxisConfig = { 419 | label: null, 420 | logBase: 1, 421 | min: null, 422 | max: null, 423 | show: false 424 | }; 425 | 426 | const values = this.getValues(parsers, sysdigDashboard, sysdigPanel); 427 | 428 | return [ 429 | // left axis 430 | _.assign({}, baseAxisConfig, { 431 | format: this.getValueFormat(values[0], options.metrics), 432 | show: true, 433 | min: normalizedDisplayOptions.yAxisLeftDomain 434 | ? normalizedDisplayOptions.yAxisLeftDomain.from 435 | : null, 436 | max: normalizedDisplayOptions.yAxisLeftDomain 437 | ? normalizedDisplayOptions.yAxisLeftDomain.to 438 | : null, 439 | logBase: yAxisLogBase 440 | }), 441 | // right axis 442 | _.assign({}, baseAxisConfig) 443 | ]; 444 | } 445 | } 446 | 447 | class TimeSeriesAreaBuilder extends TimeSeriesBuilder { 448 | static build(...args) { 449 | return Object.assign({}, super.build(...args), { 450 | stack: true, 451 | fill: 7 // similar opacity used by Sysdig Monitor 452 | }); 453 | } 454 | } 455 | 456 | class HistogramBuilder extends TimeSeriesBuilder { 457 | static isSingleDataPoint() { 458 | return true; 459 | } 460 | 461 | static getValueFormat() { 462 | // the axis will count items in each bucket 463 | return 'short'; 464 | } 465 | 466 | static build(parsers, sysdigDashboard, options, sysdigPanel, index) { 467 | return Object.assign( 468 | {}, 469 | super.build(parsers, sysdigDashboard, options, sysdigPanel, index), 470 | { 471 | bars: true, 472 | lines: false, 473 | xaxis: { 474 | buckets: sysdigPanel.customDisplayOptions 475 | ? sysdigPanel.customDisplayOptions.histogram.numberOfBuckets 476 | : 10, 477 | mode: 'histogram' 478 | } 479 | } 480 | ); 481 | } 482 | 483 | static getTargetPageLimit(sysdigPanel) { 484 | // apply a "premium" x10 to limit the effect of data pagination to bucket values 485 | // Grafana will get all the entities and will define buckets on top of that 486 | // However, if pagination limits the number of entries exported via API, bucket values 487 | // will not be correct. 488 | return this.parseValueLimitCount(sysdigPanel) * 10; 489 | } 490 | } 491 | 492 | class BarChartBuilder extends TimeSeriesBuilder { 493 | static isSingleDataPoint() { 494 | return true; 495 | } 496 | 497 | static build(parsers, sysdigDashboard, options, sysdigPanel, index) { 498 | return Object.assign( 499 | {}, 500 | super.build(parsers, sysdigDashboard, options, sysdigPanel, index), 501 | { 502 | bars: true, 503 | lines: false, 504 | xaxis: { 505 | mode: 'series', 506 | values: ['total'] 507 | } 508 | } 509 | ); 510 | } 511 | } 512 | 513 | class NumberBuilder extends BaseBuilder { 514 | static getPanelType() { 515 | return 'singlestat'; 516 | } 517 | 518 | static isSingleDataPoint() { 519 | return true; 520 | } 521 | 522 | static build(parsers, sysdigDashboard, options, sysdigPanel, index) { 523 | const value = this.getValue(parsers, sysdigDashboard, sysdigPanel); 524 | 525 | if (value) { 526 | // TODO set proper format 527 | const format = this.getValueFormat(value, options.metrics); 528 | 529 | return Object.assign( 530 | {}, 531 | this.getBasePanelConfiguration(sysdigDashboard, options, sysdigPanel, index), 532 | { 533 | targets: this.buildTargets(parsers, sysdigDashboard, sysdigPanel), 534 | format 535 | } 536 | ); 537 | } else { 538 | console.warn('number panel configuration not valid (missing value)'); 539 | return this.getBasePanelConfiguration( 540 | sysdigDashboard, 541 | options, 542 | sysdigPanel, 543 | index, 544 | 'singlestat' 545 | ); 546 | } 547 | } 548 | 549 | static getValue(parsers, sysdigDashboard, sysdigPanel) { 550 | const values = sysdigPanel.metrics 551 | .map(parsers.parseMetric) 552 | .filter((metric) => { 553 | return metric.id !== 'timestamp' && metric.timeAggregation !== undefined; 554 | }) 555 | .map(parsers.parseMetric); 556 | if (values.length !== 1) { 557 | console.warn('Expected exactly one value metric'); 558 | } 559 | 560 | return values[0]; 561 | } 562 | 563 | static buildTargets(parsers, sysdigDashboard, sysdigPanel) { 564 | const value = this.getValue(parsers, sysdigDashboard, sysdigPanel); 565 | 566 | return [ 567 | { 568 | refId: '0', 569 | isSingleDataPoint: this.isSingleDataPoint(), 570 | isTabularFormat: this.isTabularFormat(), 571 | segmentBy: null, 572 | filter: this.getTargetFilter(sysdigDashboard, sysdigPanel), 573 | target: value.id, 574 | timeAggregation: value.timeAggregation, 575 | groupAggregation: value.groupAggregation 576 | } 577 | ]; 578 | } 579 | } 580 | 581 | class TableBuilder extends TimeSeriesBuilder { 582 | static getPanelType() { 583 | return 'table'; 584 | } 585 | 586 | static isSingleDataPoint() { 587 | return true; 588 | } 589 | 590 | static isTabularFormat() { 591 | return true; 592 | } 593 | 594 | static build(parsers, sysdigDashboard, options, sysdigPanel, index) { 595 | return Object.assign( 596 | {}, 597 | super.build(parsers, sysdigDashboard, options, sysdigPanel, index), 598 | { 599 | transform: 'timeseries_aggregations', 600 | sort: { 601 | col: 1, 602 | desc: true 603 | }, 604 | styles: [ 605 | ...sysdigPanel.metrics.map(parsers.parseMetric).map((metric) => { 606 | const format = this.getValueFormat(metric, options.metrics); 607 | if (format === 'none') { 608 | return { 609 | pattern: metric.id, 610 | type: 'string' 611 | }; 612 | } else { 613 | return { 614 | pattern: metric.id, 615 | type: 'number', 616 | unit: format, 617 | decimals: 2 618 | }; 619 | } 620 | }), 621 | { 622 | pattern: '/.*/', 623 | type: 'string' 624 | } 625 | ] 626 | } 627 | ); 628 | } 629 | 630 | static buildTargets(parsers, sysdigDashboard, sysdigPanel) { 631 | const keys = this.getKeys(parsers, sysdigDashboard, sysdigPanel); 632 | const filterMetrics = (metric) => metric.timeAggregation !== undefined; 633 | 634 | return sysdigPanel.metrics 635 | .map(parsers.parseMetric) 636 | .filter(filterMetrics) 637 | .map((value, i) => { 638 | return { 639 | refId: i.toString(), 640 | isSingleDataPoint: this.isSingleDataPoint(), 641 | isTabularFormat: this.isTabularFormat(), 642 | target: value.id, 643 | timeAggregation: value.timeAggregation || 'concat', 644 | groupAggregation: value.groupAggregation || 'concat', 645 | segmentBy: keys.length > 0 ? keys.map((key) => key.id) : null, 646 | filter: this.getTargetFilter(sysdigDashboard, sysdigPanel), 647 | sortDirection: this.getTargetSortDirection(sysdigPanel), 648 | pageLimit: this.getTargetPageLimit(sysdigPanel) 649 | }; 650 | }); 651 | } 652 | 653 | static getKeys(parsers, sysdigDashboard, sysdigPanel) { 654 | return sysdigPanel.metrics.map(parsers.parseMetric).filter((metric) => { 655 | return metric.timeAggregation === undefined; 656 | }); 657 | } 658 | } 659 | 660 | class TextBuilder extends BaseBuilder { 661 | static getPanelType() { 662 | return 'text'; 663 | } 664 | 665 | static build(parsers, sysdigDashboard, options, sysdigPanel, index) { 666 | return Object.assign( 667 | {}, 668 | this.getBasePanelConfiguration(sysdigDashboard, options, sysdigPanel, index), 669 | { 670 | mode: 'markdown', 671 | content: this.getContent(sysdigPanel), 672 | transparent: sysdigPanel.hasTransparentBackground === true 673 | } 674 | ); 675 | } 676 | 677 | static getContent(sysdigPanel) { 678 | return sysdigPanel.markdownSource; 679 | } 680 | } 681 | 682 | class DefaultBuilder extends BaseBuilder { 683 | static build(parsers, sysdigDashboard, options, sysdigPanel, index) { 684 | return Object.assign( 685 | {}, 686 | this.getBasePanelConfiguration(sysdigDashboard, options, sysdigPanel, index), 687 | { 688 | mode: 'html', 689 | content: this.getContent(sysdigPanel) 690 | } 691 | ); 692 | } 693 | 694 | static getPanelType() { 695 | return 'text'; 696 | } 697 | 698 | static getContent(sysdigPanel) { 699 | let panelType; 700 | switch (sysdigPanel.showAs) { 701 | case 'timeSeriesArea': 702 | panelType = 'Area'; 703 | break; 704 | case 'top': 705 | panelType = 'Top list'; 706 | break; 707 | case 'histogram': 708 | panelType = 'Histogram'; 709 | break; 710 | case 'map': 711 | panelType = 'Topology'; 712 | break; 713 | case 'summary': 714 | panelType = 'Number'; 715 | break; 716 | case 'table': 717 | panelType = 'Table'; 718 | break; 719 | default: 720 | panelType = sysdigPanel.showAs; 721 | break; 722 | } 723 | 724 | return `
${panelType} cannot be exported from Sysdig Monitor to Grafana.
`; 725 | } 726 | } 727 | -------------------------------------------------------------------------------- /src/templating_service.js: -------------------------------------------------------------------------------- 1 | import FormatterService from './formatter_service'; 2 | 3 | export default class TemplatingService { 4 | static validateLabelValuesQuery(query) { 5 | const parsed = parseFunction(query, 'label_values'); 6 | if (parsed) { 7 | return parseOptions(parsed.options, 'label_values'); 8 | } else { 9 | return null; 10 | } 11 | } 12 | 13 | static validateLabelNamesQuery(query) { 14 | const parsed = parseFunction(query, 'label_names'); 15 | if (parsed) { 16 | return { pattern: parsed.options, regex: new RegExp(parsed.options) }; 17 | } else { 18 | return null; 19 | } 20 | } 21 | 22 | static validateMetricsQuery(query) { 23 | const parsed = parseFunction(query, 'metrics'); 24 | if (parsed) { 25 | return { pattern: parsed.options, regex: new RegExp(parsed.options) }; 26 | } else { 27 | return null; 28 | } 29 | } 30 | 31 | static resolveQueryVariables(query, templateSrv) { 32 | if (query) { 33 | return this.replace(templateSrv, query, null); 34 | } else { 35 | return null; 36 | } 37 | } 38 | 39 | static replace(templateSrv, input, scopedVars) { 40 | return templateSrv.replace(input, scopedVars, (...args) => 41 | this.formatTemplateValue(...args) 42 | ); 43 | } 44 | 45 | static formatTemplateValue(value, variable) { 46 | const format = this.validateLabelValuesQuery(variable.query) 47 | ? formatQuotedValue 48 | : formatValue; 49 | 50 | if (typeof value === 'string') { 51 | // 52 | // single selection 53 | // 54 | return format(value); 55 | } else { 56 | // 57 | // "all" 58 | // 59 | return value.map(format).join(', '); 60 | } 61 | } 62 | } 63 | 64 | function parseFunction(value, functionName) { 65 | if (value) { 66 | const functionPattern = `${functionName}\\((?:(.*))\\)`; 67 | const regex = value.match(`^${functionPattern}$`); 68 | if (regex) { 69 | const options = regex[1]; 70 | 71 | return { options }; 72 | } else { 73 | return null; 74 | } 75 | } else { 76 | return null; 77 | } 78 | } 79 | 80 | function parseOptions(value, functionName) { 81 | switch (functionName) { 82 | case 'label_values': { 83 | const parseConfiguration = { 84 | namelessOption: { 85 | name: 'labelName', 86 | pattern: '([A-Za-z][A-Za-z0-9]*(?:[\\._\\-:][a-zA-Z0-9]+)*)' 87 | }, 88 | namedOptions: [ 89 | { 90 | name: 'filter', 91 | patterns: [`"([^"]+)"`, `'([^']+)'`], 92 | validate: (value) => value.trim(), 93 | defaultValue: null 94 | }, 95 | { 96 | name: 'from', 97 | pattern: '(\\d+)', 98 | validate: (value) => Number(value), 99 | defaultValue: 0 100 | }, 101 | { 102 | name: 'to', 103 | pattern: '(\\d+)', 104 | validate: (value) => Number(value), 105 | defaultValue: undefined 106 | }, 107 | { 108 | name: 'limit', 109 | pattern: '(\\d+)', 110 | validate: (value) => Number(value), 111 | defaultValue: 99 112 | } 113 | ], 114 | validate: (options) => { 115 | // to overrides limit 116 | if (options.to !== undefined && options.limit !== undefined) { 117 | delete options.limit; 118 | } 119 | 120 | // to is always derived from from + limit 121 | if (options.limit !== undefined) { 122 | options.to = options.from + options.limit; 123 | delete options.limit; 124 | } 125 | 126 | // ensure both from+to are always set 127 | if (options.from !== undefined && options.to === undefined) { 128 | options.to = options.from + 99; 129 | } else if (options.to !== undefined && options.from === undefined) { 130 | options.from = options.to - 99; 131 | } 132 | 133 | // don't let download too much data, but not even too few 134 | if (options.from !== undefined && options.to !== undefined) { 135 | options.from = Math.max(options.from, 0); 136 | 137 | options.to = Math.min(options.to, options.from + 1000); 138 | options.to = Math.max(options.to, options.from + 1); 139 | } 140 | 141 | return options; 142 | } 143 | }; 144 | 145 | const functionMatch = value.match( 146 | `^${parseConfiguration.namelessOption.pattern}(?:\\s*,\\s*(.+))?$` 147 | ); 148 | 149 | if (functionMatch) { 150 | const parsedOptions = {}; 151 | parsedOptions[parseConfiguration.namelessOption.name] = functionMatch[1]; 152 | 153 | const namedOptions = functionMatch[2]; 154 | const namedOptionsPattern = parseConfiguration.namedOptions 155 | .reduce((acc, option) => { 156 | if (option.patterns) { 157 | return [ 158 | ...acc, 159 | ...option.patterns.map((pattern) => ({ 160 | name: option.name, 161 | pattern 162 | })) 163 | ]; 164 | } else { 165 | return [...acc, option]; 166 | } 167 | }, []) 168 | .map((option) => { 169 | return `(?:(${option.name})=${option.pattern})`; 170 | }) 171 | .join('|'); 172 | const namedOptionsRegex = RegExp(namedOptionsPattern, 'g'); 173 | const namedOptionsValidators = parseConfiguration.namedOptions.reduce((acc, d) => { 174 | acc[d.name] = d.validate; 175 | return acc; 176 | }, {}); 177 | 178 | let matches; 179 | while ((matches = namedOptionsRegex.exec(namedOptions)) !== null) { 180 | for (let i = 1; i < matches.length; i = i + 2) { 181 | if (matches[i]) { 182 | parsedOptions[matches[i]] = namedOptionsValidators[matches[i]]( 183 | matches[i + 1] 184 | ); 185 | } 186 | } 187 | } 188 | 189 | parseConfiguration.namedOptions.forEach((option) => { 190 | if (parsedOptions[option.name] === undefined) { 191 | parsedOptions[option.name] = option.defaultValue; 192 | } 193 | }); 194 | 195 | const validatedOptions = parseConfiguration.validate(parsedOptions); 196 | 197 | return validatedOptions; 198 | } else { 199 | return null; 200 | } 201 | } 202 | 203 | default: 204 | console.assert( 205 | false, 206 | 'Options are not supported for any variable function other than "label_values"' 207 | ); 208 | return null; 209 | } 210 | } 211 | 212 | function formatValue(value) { 213 | return parseLabelValue(value); 214 | } 215 | 216 | function formatQuotedValue(value) { 217 | const parsed = parseLabelValue(value); 218 | 219 | // encapsulate value within double-quotes to make the output valid with both strings and null values 220 | // also, null values must be returned as "null" strings 221 | return parsed ? `"${parsed}"` : `${parsed}`; 222 | } 223 | 224 | function parseLabelValue(labelValue) { 225 | return labelValue === FormatterService.NULL_TEXT ? null : labelValue; 226 | } 227 | -------------------------------------------------------------------------------- /src/time_service.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright 2018 Draios Inc. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | // 16 | import ApiService from './api_service'; 17 | 18 | export default class DataService { 19 | static async validateTimeWindow(backend, userTime) { 20 | return Promise.all([ 21 | ApiService.send(backend, { 22 | url: `api/history/timelines` 23 | }), 24 | ApiService.send(backend, { 25 | url: `api/v2/history/timelines/alignments` 26 | }) 27 | ]).then((responses) => { 28 | const requestTime = getRequestTime(responses[0].data, responses[1].data, userTime); 29 | 30 | if (requestTime) { 31 | return requestTime; 32 | } else { 33 | throw 'Unable to validate request time'; 34 | } 35 | }); 36 | } 37 | 38 | static async queryTimelines(backend) { 39 | return Promise.all([ 40 | ApiService.send(backend, { 41 | url: `api/history/timelines` 42 | }), 43 | ApiService.send(backend, { 44 | url: `api/v2/history/timelines/alignments` 45 | }) 46 | ]).then((responses) => { 47 | return { 48 | timelines: responses[0].data, 49 | alignments: responses[1].data 50 | }; 51 | }); 52 | } 53 | } 54 | 55 | function getRequestTime(timelines, alignments, userTime) { 56 | console.assert(userTime && userTime.from && userTime.to, 'Argument userTime is missing'); 57 | if (!(userTime && userTime.from && userTime.to)) { 58 | return null; 59 | } 60 | 61 | const fromUs = userTime.from * 1000000; 62 | const toUs = userTime.to * 1000000; 63 | const timespan = toUs - fromUs; 64 | 65 | // 66 | // Find aligment to use given timespan ONLY 67 | // 68 | let validAlignments = alignments.filter((a) => { 69 | return timespan <= a.max * 1000000; 70 | }); 71 | 72 | if (validAlignments.length === 0) { 73 | return null; 74 | } 75 | 76 | // 77 | // Set min sampling 78 | // 79 | const minSampling = validAlignments[0].sampling * 1000000; 80 | 81 | // 82 | // Filter timelines so that sampling is valid, and the requested time window is partially or 83 | // entirely overlapping with a given timeline 84 | // 85 | const validTimelines = timelines.agents.filter((t) => { 86 | return ( 87 | t.from !== null && 88 | t.to !== null && 89 | minSampling <= t.sampling && 90 | ((fromUs <= t.from && toUs >= t.from) || 91 | (fromUs >= t.from && toUs <= t.to) || 92 | (fromUs <= t.to && toUs >= t.to)) 93 | ); 94 | }); 95 | 96 | if (validTimelines.length === 0) { 97 | return null; 98 | } 99 | 100 | // 101 | // Find minimum sampling (ie. highest resolution) available given the timespan and alignments 102 | // 103 | const sampling = validTimelines[0].sampling / 1000000; 104 | 105 | // 106 | // Find aligments to use given timespan AND sampling 107 | // 108 | validAlignments = validAlignments.filter((a) => { 109 | return a.sampling >= sampling; 110 | }); 111 | 112 | if (validAlignments.length === 0) { 113 | return null; 114 | } 115 | 116 | // 117 | // Align time window with required alignment 118 | // 119 | const alignTo = validAlignments[0].alignTo * 1000000; 120 | const alignedFrom = Math.trunc((Math.trunc(fromUs / alignTo) * alignTo) / 1000000); 121 | const alignedTo = Math.trunc((Math.trunc(toUs / alignTo) * alignTo) / 1000000); 122 | 123 | // 124 | // Adjust time window according to timeline (might miss first or last portion) 125 | // 126 | const requestTime = { 127 | from: Math.max(alignedFrom, validTimelines[0].from / 1000000), 128 | to: Math.min(alignedTo, validTimelines[0].to / 1000000) 129 | }; 130 | 131 | if (userTime.sampling) { 132 | // use the highest data resolution available to display data 133 | // this comes from the valid timeline with lowest sampling time 134 | // (NOTE: timelines.agents is assumed to be sorted by `sampling` property in ascending mode) 135 | requestTime.sampling = Math.trunc(sampling); 136 | } 137 | 138 | return requestTime; 139 | } 140 | --------------------------------------------------------------------------------