├── .dockerignore ├── .drone.star ├── .drone.yml ├── .eslintrc ├── .github ├── ISSUE_TEMPLATE │ ├── 1-bug_report.md │ ├── 2-enhancement.md │ └── config.yml └── workflows │ └── snyk.yml ├── .gitignore ├── .nvmrc ├── .prettierignore ├── .prettierrc.js ├── .prettierrc.json ├── CHANGELOG.md ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── debian.Dockerfile ├── default.json ├── dev.json ├── devenv ├── docker │ ├── custom-config │ │ ├── config.json │ │ └── docker-compose.yaml │ ├── drone │ │ └── docker-compose.yaml │ ├── ha │ │ ├── .env │ │ ├── README.md │ │ ├── config.json │ │ ├── docker-compose.yaml │ │ ├── grafana │ │ │ └── provisioning │ │ │ │ ├── dashboards │ │ │ │ ├── dashboards.yaml │ │ │ │ └── general │ │ │ │ │ ├── docker_monitoring.json │ │ │ │ │ ├── image_rendering_service.json │ │ │ │ │ ├── mysql_overview.json │ │ │ │ │ └── nodejs_application.json │ │ │ │ └── datasources │ │ │ │ └── datasources.yaml │ │ └── prometheus │ │ │ └── prometheus.yml │ ├── ratelimiter │ │ ├── config.json │ │ └── docker-compose.yaml │ ├── simple │ │ └── docker-compose.yaml │ ├── test │ │ ├── README.md │ │ ├── dashboards.yaml │ │ └── docker-compose.yaml │ └── tracing │ │ ├── config.json │ │ ├── docker-compose.yaml │ │ ├── grafana-datasources.yaml │ │ ├── prometheus.yaml │ │ └── tempo.yaml └── loadtest │ ├── README.md │ ├── fixtures │ └── graph_panel.json │ ├── modules │ ├── client.js │ └── util.js │ ├── render_test.js │ └── run.sh ├── docs ├── building_from_source.md ├── index.md ├── package_plugin_as_single_executable.md ├── release_new_version.md └── testing.md ├── img └── icon.png ├── jest.config.js ├── mixin ├── .gitignore ├── Makefile ├── README.md ├── alerts │ └── alerts.yaml ├── dashboards │ └── image-render-service.json ├── mixin.libsonnet ├── rules │ └── rules.yaml └── scripts │ ├── build.sh │ ├── common.sh │ ├── format.sh │ └── lint.sh ├── package.json ├── plugin.json ├── plugin_start_darwin_amd64 ├── plugin_start_linux_amd64 ├── proto ├── health.proto ├── pluginv2.proto ├── rendererv2.proto └── sanitizer.proto ├── scripts ├── archive_target.sh ├── build_push_docker.sh ├── clean_target.sh ├── createGcomPluginJson.ts ├── download_chrome.js ├── drone │ ├── common.star │ ├── grabpl.star │ ├── pipeline.star │ ├── promotion.star │ ├── provisioning │ │ ├── dashboards │ │ │ ├── all-panels.json │ │ │ ├── dashboards.yaml │ │ │ └── test-dashboard.json │ │ └── datasources │ │ │ └── datasources.yaml │ ├── utils.star │ └── vault.star ├── generate_md5sum.sh ├── get_gh_token.sh ├── package_target.sh ├── pkg.js ├── publish_github_release.sh ├── push-to-gcom.sh ├── rename_executable.js └── run_tests.sh ├── src ├── app.ts ├── browser │ ├── browser.test.ts │ ├── browser.ts │ ├── clustered.ts │ ├── error.ts │ ├── index.ts │ ├── pdf.ts │ └── reusable.ts ├── config │ ├── config.ts │ ├── rendering.ts │ └── security.ts ├── exit.ts ├── logger.ts ├── node-plugin │ ├── index.ts │ └── lib │ │ ├── server.ts │ │ └── types.ts ├── plugin │ └── v2 │ │ ├── config.ts │ │ ├── grpc_plugin.ts │ │ └── types.ts ├── sanitizer │ ├── Sanitizer.ts │ └── types.ts ├── service │ ├── config.ts │ ├── http-server.integration.test.ts │ ├── http-server.ts │ ├── metrics.ts │ ├── middlewares.ts │ └── ratelimiter.ts ├── tracing.ts └── types.ts ├── tests ├── reporter.js └── testdata │ ├── error.png │ ├── full-page-screenshot.png │ ├── graph.png │ └── table.png ├── tsconfig.json └── yarn.lock /.dockerignore: -------------------------------------------------------------------------------- 1 | .circleci 2 | .github 3 | artifacts 4 | build 5 | devenv 6 | dist 7 | docs 8 | img 9 | node_modules 10 | scripts 11 | .prettierignore 12 | .prettierrc.json 13 | dev.json 14 | Dockerfile 15 | debian.Dockerfile 16 | Makefile 17 | plugin_start* 18 | tslint.json 19 | -------------------------------------------------------------------------------- /.drone.star: -------------------------------------------------------------------------------- 1 | # To generate the .drone.yml file: 2 | # 1. Modify the *.star definitions 3 | # 2. Login to drone and export the env variables (token and server) shown here: https://drone.grafana.net/account 4 | # 3. Run `make drone` 5 | # More information about this process here: https://github.com/grafana/deployment_tools/blob/master/docs/infrastructure/drone/signing.md 6 | 7 | load('scripts/drone/pipeline.star', 'prs_pipeline', 'master_pipeline', 'promotion_pipeline') 8 | load('scripts/drone/vault.star', 'secrets') 9 | 10 | 11 | def main(ctx): 12 | return ( 13 | prs_pipeline() 14 | + master_pipeline() 15 | + promotion_pipeline() 16 | + secrets() 17 | ) 18 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "root": true, 3 | "parser": "@typescript-eslint/parser", 4 | "plugins": [ 5 | "@typescript-eslint" 6 | ], 7 | "extends": ["@grafana/eslint-config"] 8 | } 9 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/1-bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Report a bug 4 | labels: 'type: bug' 5 | --- 6 | 7 | 12 | 13 | **What happened**: 14 | 15 | **What you expected to happen**: 16 | 17 | **How to reproduce it (as minimally and precisely as possible)**: 18 | 19 | **Anything else we need to know?**: 20 | 21 | **Environment**: 22 | - Grafana Image Renderer version: 23 | - Grafana version: 24 | - Installed plugin or remote renderer service: 25 | - OS Grafana Image Renderer is installed on: 26 | - User OS & Browser: 27 | - Others: 28 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/2-enhancement.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Enhancement request 3 | about: Suggest an enhancement or new feature 4 | labels: 'type: feature request' 5 | --- 6 | 7 | 8 | 9 | **What would you like to be added**: 10 | 11 | **Why is this needed**: 12 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Documentation 4 | url: https://grafana.com/docs/grafana/latest/administration/image_rendering 5 | about: For help, please read documentation and troubleshooting guide first. 6 | - name: Questions & Help 7 | url: https://community.grafana.com 8 | about: Please ask and answer questions here. 9 | -------------------------------------------------------------------------------- /.github/workflows/snyk.yml: -------------------------------------------------------------------------------- 1 | name: Snyk Monitor Scanning 2 | on: 3 | release: 4 | types: [published] 5 | push: 6 | branches: 7 | - 'main' 8 | - 'master' 9 | workflow_dispatch: 10 | 11 | permissions: 12 | contents: read 13 | 14 | jobs: 15 | snyk-scan-ci: 16 | uses: 'grafana/security-github-actions/.github/workflows/snyk_monitor.yml@main' 17 | secrets: 18 | SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | lib-cov 2 | *.seed 3 | *.log 4 | *.csv 5 | *.dat 6 | *.out 7 | *.pid 8 | *.gz 9 | *.swp 10 | 11 | pids 12 | logs 13 | results 14 | tmp 15 | 16 | # Build 17 | dist 18 | build 19 | artifacts 20 | 21 | # Dependency directory 22 | node_modules 23 | bower_components 24 | 25 | # Editors 26 | .idea 27 | *.iml 28 | 29 | # OS metadata 30 | .DS_Store 31 | Thumbs.db 32 | 33 | # Ignore built ts files 34 | dist/**/* 35 | 36 | # Ignore output from coverage report 37 | coverage 38 | 39 | scripts/tmp 40 | 41 | tests/testdata/diff_* 42 | 43 | cache 44 | 45 | /devenv/docker/tracing/tempo-data/ -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 20.19.0 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | .git 2 | .github 3 | artifacts 4 | build 5 | dist 6 | docker 7 | docs 8 | node_modules 9 | proto 10 | scripts 11 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | ...require('.prettierrc.json'), 3 | }; 4 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "es5", 3 | "singleQuote": true, 4 | "printWidth": 150 5 | } 6 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Base stage 2 | FROM node:20-alpine AS base 3 | 4 | ENV CHROME_BIN="/usr/bin/chromium-browser" 5 | ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD="true" 6 | 7 | # Folder used by puppeteer to write temporal files 8 | ENV XDG_CONFIG_HOME=/tmp/.chromium 9 | ENV XDG_CACHE_HOME=/tmp/.chromium 10 | 11 | WORKDIR /usr/src/app 12 | 13 | RUN apk --no-cache upgrade && \ 14 | apk add --no-cache udev ttf-opensans unifont chromium chromium-swiftshader ca-certificates dumb-init && \ 15 | # Remove NPM-related files and directories 16 | rm -rf /usr/local/lib/node_modules/npm && \ 17 | rm -rf /usr/local/bin/npm && \ 18 | rm -rf /usr/local/bin/npx && \ 19 | rm -rf /root/.npm && \ 20 | rm -rf /root/.node-gyp && \ 21 | # Clean up 22 | rm -rf /tmp/* 23 | 24 | # Build stage 25 | FROM base AS build 26 | 27 | COPY . ./ 28 | 29 | RUN yarn install --pure-lockfile 30 | RUN yarn run build 31 | 32 | # Production dependencies stage 33 | FROM base AS prod-dependencies 34 | 35 | COPY package.json yarn.lock ./ 36 | RUN yarn install --pure-lockfile --production 37 | 38 | # Final stage 39 | FROM base 40 | 41 | LABEL maintainer="Grafana team " 42 | LABEL org.opencontainers.image.source="https://github.com/grafana/grafana-image-renderer" 43 | 44 | ARG GF_UID="472" 45 | ARG GF_GID="472" 46 | ENV GF_PATHS_HOME="/usr/src/app" 47 | 48 | WORKDIR $GF_PATHS_HOME 49 | 50 | RUN addgroup -S -g $GF_GID grafana && \ 51 | adduser -S -u $GF_UID -G grafana grafana && \ 52 | mkdir -p "$GF_PATHS_HOME" && \ 53 | chown -R grafana:grafana "$GF_PATHS_HOME" 54 | 55 | ENV NODE_ENV=production 56 | 57 | COPY --from=prod-dependencies /usr/src/app/node_modules node_modules 58 | COPY --from=build /usr/src/app/build build 59 | COPY --from=build /usr/src/app/proto proto 60 | COPY --from=build /usr/src/app/default.json config.json 61 | COPY --from=build /usr/src/app/plugin.json plugin.json 62 | 63 | EXPOSE 8081 64 | 65 | USER grafana 66 | 67 | ENTRYPOINT ["dumb-init", "--"] 68 | CMD ["node", "build/app.js", "server", "--config=config.json"] 69 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2017 Grafana Labs 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all clean deps build clean_package package archive build_package docker-alpine docker-debian 2 | 3 | ARCH = darwin-x64-unknown 4 | SKIP_CHROMIUM = 5 | OUT = 6 | DOCKER_TAG = dev 7 | 8 | all: clean build 9 | 10 | clean: 11 | rm -rf build 12 | 13 | deps: node_modules 14 | 15 | node_modules: package.json yarn.lock ## Install node modules. 16 | @echo "install frontend dependencies" 17 | yarn install --pure-lockfile --no-progress 18 | 19 | build: 20 | yarn build 21 | 22 | clean_package: 23 | ./scripts/clean_target.sh ${ARCH} ${OUT} 24 | 25 | package: 26 | ./scripts/package_target.sh ${ARCH} ${SKIP_CHROMIUM} ${OUT} 27 | 28 | archive: 29 | ./scripts/archive_target.sh ${ARCH} ${OUT} 30 | 31 | build_package: clean clean_package build package archive 32 | 33 | docker-alpine: 34 | docker build -t grafana/grafana-image-renderer:${DOCKER_TAG} . 35 | 36 | docker-debian: 37 | docker build -t grafana/grafana-image-renderer:${DOCKER_TAG}-debian -f debian.Dockerfile . 38 | 39 | # This repository's configuration is protected (https://readme.drone.io/signature/). 40 | # Use this make target to regenerate the configuration YAML files when 41 | # you modify starlark files. 42 | drone: 43 | drone starlark --format 44 | drone lint .drone.yml --trusted 45 | drone --server https://drone.grafana.net sign --save grafana/grafana-image-renderer 46 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | A Grafana backend plugin that handles rendering panels and dashboards to PNGs using a headless browser (Chromium). 2 | 3 | ## Requirements 4 | 5 | ### Supported operating systems 6 | 7 | - Linux (x64) 8 | - Windows (x64) 9 | - Mac OS X (x64) 10 | 11 | For Mac ARM64, you need to [build the plugin from source](https://github.com/grafana/grafana-image-renderer/blob/master/docs/building_from_source.md) or use the [remote rendering installation](https://github.com/grafana/grafana-image-renderer?tab=readme-ov-file#remote-rendering-service-installation). 12 | 13 | ### Dependencies 14 | 15 | This plugin is packaged in a single executable with [Node.js](https://nodejs.org/) runtime and [Chromium browser](https://www.chromium.org/Home). 16 | This means that you don't need to have Node.js and Chromium installed in your system for the plugin to function. 17 | 18 | However, the [Chromium browser](https://www.chromium.org/) depends on certain libraries. If you don't have all of those libraries installed in your 19 | system, you may see some errors when you try to render an image. For more information including troubleshooting help, refer to 20 | [Grafana Image Rendering documentation](https://grafana.com/docs/grafana/latest/image-rendering/). 21 | 22 | ### Memory requirements 23 | 24 | Rendering images requires a lot of memory, mainly because Grafana creates browser instances in the background for the actual rendering. 25 | We recommend a minimum of 16GB of free memory on the system rendering images. 26 | 27 | Rendering multiple images in parallel requires an even bigger memory footprint. You can use the remote rendering service in order to render images on a remote system, so your local system resources are not affected. 28 | 29 | ## Plugin installation 30 | 31 | You can install the plugin using Grafana CLI (recommended way) or with Grafana Docker image. 32 | 33 | ### Grafana CLI (recommended) 34 | 35 | ```bash 36 | grafana-cli plugins install grafana-image-renderer 37 | ``` 38 | 39 | ### Grafana Docker image 40 | 41 | This plugin is not compatible with the current Grafana Docker image and requires additional system-level dependencies. We recommend setting up another Docker container for rendering and using remote rendering instead. For instruction, refer to [Run in Docker](#run-in-docker). 42 | 43 | If you still want to install the plugin with the Grafana Docker image, refer to the instructions on building a custom Grafana image in [Grafana Docker documentation](https://grafana.com/docs/grafana/latest/setup-grafana/configure-docker/#build-a-custom-grafana-docker-image). 44 | 45 | ## Remote rendering service installation 46 | 47 | > **Note:** Requires an internet connection. 48 | 49 | You can run this plugin as a remote HTTP rendering service. In this setup, Grafana renders an image by making an HTTP request to the remote rendering service, which in turn renders the image and returns it back in the HTTP response to Grafana. 50 | 51 | You can run the remote HTTP rendering service using Docker or as a standalone Node.js application. 52 | 53 | ### Run in Docker 54 | 55 | Grafana Docker images are published at [Docker Hub](https://hub.docker.com/r/grafana/grafana-image-renderer). 56 | 57 | The following example shows how you can run Grafana and the remote HTTP rendering service in two separate Docker containers using Docker Compose. 58 | 59 | 1. Create a `docker-compose.yml` with the following content: 60 | 61 | ```yaml 62 | version: '2' 63 | 64 | services: 65 | grafana: 66 | image: grafana/grafana:latest 67 | ports: 68 | - '3000:3000' 69 | environment: 70 | GF_RENDERING_SERVER_URL: http://renderer:8081/render 71 | GF_RENDERING_CALLBACK_URL: http://grafana:3000/ 72 | GF_LOG_FILTERS: rendering:debug 73 | renderer: 74 | image: grafana/grafana-image-renderer:latest 75 | ports: 76 | - 8081 77 | ``` 78 | 79 | 1. Next, run docker compose. 80 | 81 | ```bash 82 | docker-compose up 83 | ``` 84 | 85 | ### Run as standalone Node.js application 86 | 87 | The following example describes how to build and run the remote HTTP rendering service as a standalone Node.js application and configure Grafana appropriately. 88 | 89 | 1. Clone the [Grafana image renderer plugin](https://github.com/grafana/grafana-image-renderer/) Git repository. 90 | 1. Install dependencies and build: 91 | 92 | ```bash 93 | yarn install --pure-lockfile 94 | yarn run build 95 | ``` 96 | 97 | 1. Run the server: 98 | - Using default configuration 99 | 100 | ```bash 101 | node build/app.js server 102 | ``` 103 | - Using custom [configuration](https://grafana.com/docs/grafana/latest/image-rendering/#configuration) 104 | 105 | ```bash 106 | node build/app.js server --config=dev.json 107 | ``` 108 | - Using environment variables 109 | 110 | ```bash 111 | HTTP_PORT=8085 LOG_LEVEL=debug node build/app.js server 112 | ``` 113 | 114 | 1. Update Grafana configuration: 115 | 116 | ``` 117 | [rendering] 118 | server_url = http://localhost:8081/render 119 | callback_url = http://localhost:3000/ 120 | ``` 121 | 122 | 1. Restart Grafana. 123 | 124 | ## Security 125 | 126 | Access to the rendering endpoints is restricted to requests providing an auth token. This token should be configured in the Grafana configuration file and the renderer configuration file. This token is important when you run the plugin in remote rendering mode to avoid unauthorized file disclosure (see [CVE-2022-31176](https://github.com/grafana/grafana-image-renderer/security/advisories/GHSA-2cfh-233g-m4c5)). 127 | 128 | See [Grafana Image Rendering documentation](https://grafana.com/docs/grafana/latest/image-rendering/#security) to configure this secret token. The default value `-` is configured on both Grafana and the image renderer when you get started but we strongly recommend you to update this to a more secure value. 129 | 130 | ## Configuration 131 | 132 | For available configuration settings, please refer to [Grafana Image Rendering documentation](https://grafana.com/docs/grafana/latest/image-rendering/#configuration). 133 | 134 | ## Troubleshooting 135 | 136 | For troubleshooting help, refer to 137 | [Grafana Image Rendering troubleshooting documentation](https://grafana.com/docs/grafana/latest/image-rendering/troubleshooting/). -------------------------------------------------------------------------------- /debian.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:20-slim AS base 2 | 3 | ENV CHROME_BIN="/usr/bin/google-chrome-stable" 4 | ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD="true" 5 | 6 | # Folder used by puppeteer to write temporal files 7 | ENV XDG_CONFIG_HOME=/tmp/.chromium 8 | ENV XDG_CACHE_HOME=/tmp/.chromium 9 | 10 | WORKDIR /usr/src/app 11 | 12 | RUN apt-get update 13 | RUN apt-get install -y wget gnupg \ 14 | && wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | gpg --dearmor -o /usr/share/keyrings/googlechrome-linux-keyring.gpg \ 15 | && sh -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/googlechrome-linux-keyring.gpg] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \ 16 | && apt-get update \ 17 | && apt-get install -y google-chrome-stable fonts-ipafont-gothic fonts-wqy-zenhei fonts-thai-tlwg fonts-khmeros fonts-kacst fonts-freefont-ttf libxss1 \ 18 | --no-install-recommends \ 19 | && rm -rf /var/lib/apt/lists/* 20 | 21 | ADD https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64 /usr/local/bin/dumb-init 22 | RUN chmod +x /usr/local/bin/dumb-init 23 | 24 | # Build stage 25 | FROM base AS build 26 | 27 | COPY . ./ 28 | 29 | RUN yarn install --pure-lockfile 30 | RUN yarn run build 31 | 32 | # Production dependencies stage 33 | FROM base AS prod-dependencies 34 | 35 | COPY package.json yarn.lock ./ 36 | RUN yarn install --pure-lockfile --production 37 | 38 | # Final stage 39 | FROM base 40 | LABEL org.opencontainers.image.source="https://github.com/grafana/grafana-image-renderer" 41 | 42 | ENV NODE_ENV=production 43 | 44 | COPY --from=prod-dependencies /usr/src/app/node_modules node_modules 45 | COPY --from=build /usr/src/app/build build 46 | COPY --from=build /usr/src/app/proto proto 47 | COPY --from=build /usr/src/app/default.json config.json 48 | COPY --from=build /usr/src/app/plugin.json plugin.json 49 | 50 | EXPOSE 8081 51 | 52 | ENTRYPOINT ["dumb-init", "--"] 53 | 54 | CMD ["node", "build/app.js", "server", "--config=config.json"] 55 | -------------------------------------------------------------------------------- /default.json: -------------------------------------------------------------------------------- 1 | { 2 | "service": { 3 | "host": null, 4 | "port": 8081, 5 | "protocol": "http", 6 | "certFile": "", 7 | "certKey": "", 8 | 9 | "metrics": { 10 | "enabled": false, 11 | "collectDefaultMetrics": true, 12 | "requestDurationBuckets": [1, 5, 7, 9, 11, 13, 15, 20, 30] 13 | }, 14 | 15 | "logging": { 16 | "level": "info", 17 | "console": { 18 | "json": true, 19 | "colorize": false 20 | } 21 | }, 22 | 23 | "security": { 24 | "authToken": "-" 25 | }, 26 | 27 | "rateLimiter": { 28 | "enabled": false, 29 | "redisHost": null, 30 | "redisPort": null, 31 | "requestsPerSecond": 5 32 | } 33 | }, 34 | "rendering": { 35 | "chromeBin": null, 36 | "args": ["--no-sandbox", "--disable-gpu", "--use-gl=swiftshader"], 37 | "ignoresHttpsErrors": false, 38 | 39 | "timezone": null, 40 | "acceptLanguage": null, 41 | "width": 1000, 42 | "height": 500, 43 | "deviceScaleFactor": 1, 44 | "maxWidth": 3080, 45 | "maxHeight": 3000, 46 | "maxDeviceScaleFactor": 4, 47 | "pageZoomLevel": 1, 48 | "headed": false, 49 | 50 | "mode": "default", 51 | "emulateNetworkConditions": false, 52 | "clustering": { 53 | "monitor": false, 54 | "mode": "browser", 55 | "maxConcurrency": 5, 56 | "timeout": 30 57 | }, 58 | 59 | "verboseLogging": false, 60 | "dumpio": false, 61 | "timingMetrics": false, 62 | 63 | "tracing": { 64 | "url": "", 65 | "serviceName": "" 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /dev.json: -------------------------------------------------------------------------------- 1 | { 2 | "service": { 3 | "host": "localhost", 4 | "port": 8081, 5 | 6 | "metrics": { 7 | "enabled": true, 8 | "collectDefaultMetrics": true, 9 | "requestDurationBuckets": [1, 5, 7, 9, 11, 13, 15, 20, 30] 10 | }, 11 | 12 | "logging": { 13 | "level": "debug", 14 | "console": { 15 | "json": false, 16 | "colorize": true 17 | } 18 | }, 19 | 20 | "rateLimiter": { 21 | "enabled": false, 22 | "requestsPerSecond": 5 23 | } 24 | }, 25 | "rendering": { 26 | "chromeBin": null, 27 | "args": ["--no-sandbox", "--disable-setuid-sandbox", "--disable-gpu", "--use-gl=swiftshader"], 28 | "ignoresHttpsErrors": false, 29 | 30 | "timezone": null, 31 | "acceptLanguage": null, 32 | "width": 1000, 33 | "height": 500, 34 | "deviceScaleFactor": 2, 35 | "maxWidth": 1980, 36 | "maxHeight": 8000, 37 | "maxDeviceScaleFactor": 10, 38 | "headed": false, 39 | 40 | "mode": "default", 41 | "emulateNetworkConditions": false, 42 | "networkConditions": { 43 | "downloadThroughput": 4000000, 44 | "uploadThroughput": 4000000, 45 | "latency": 100, 46 | "offline": false 47 | }, 48 | 49 | "clustering": { 50 | "monitor": true, 51 | "mode": "browser", 52 | "maxConcurrency": 5, 53 | "timeout": 30 54 | }, 55 | 56 | "verboseLogging": true, 57 | "dumpio": false, 58 | "timingMetrics": true, 59 | 60 | "tracing": { 61 | "url": "http://localhost:4318/v1/traces", 62 | "serviceName": "" 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /devenv/docker/custom-config/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "service": { 3 | "host": null, 4 | "port": 8081, 5 | 6 | "metrics": { 7 | "enabled": true, 8 | "collectDefaultMetrics": true, 9 | "requestDurationBuckets": [1, 5, 7, 9, 11, 13, 15, 20, 30] 10 | }, 11 | 12 | "logging": { 13 | "level": "debug", 14 | "console": { 15 | "json": true, 16 | "colorize": false 17 | } 18 | } 19 | }, 20 | "rendering": { 21 | "chromeBin": null, 22 | "args": [ 23 | "--no-sandbox", 24 | "--disable-setuid-sandbox", 25 | "--disable-gpu", 26 | "--use-gl=swiftshader" 27 | ], 28 | "ignoresHttpsErrors": false, 29 | 30 | "timezone": null, 31 | "acceptLanguage": null, 32 | "width": 1000, 33 | "height": 500, 34 | "deviceScaleFactor": 1, 35 | "maxWidth": 3080, 36 | "maxHeight": 3000, 37 | "maxDeviceScaleFactor": 4, 38 | 39 | "mode": "clustered", 40 | "clustering": { 41 | "mode": "context", 42 | "maxConcurrency": 5, 43 | "timeout": 30 44 | }, 45 | 46 | "verboseLogging": false, 47 | "dumpio": false, 48 | 49 | "tracing": { 50 | "url": "", 51 | "serviceName": "" 52 | } 53 | } 54 | } -------------------------------------------------------------------------------- /devenv/docker/custom-config/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | services: 4 | grafana: 5 | image: grafana/grafana:latest 6 | ports: 7 | - 3000 8 | environment: 9 | GF_RENDERING_SERVER_URL: http://renderer:8081/render 10 | GF_RENDERING_CALLBACK_URL: http://grafana:3000/ 11 | GF_LOG_FILTERS: rendering:debug 12 | renderer: 13 | image: grafana/grafana-image-renderer:latest 14 | ports: 15 | - 8081 16 | volumes: 17 | - ./config.json:/usr/src/app/config.json 18 | -------------------------------------------------------------------------------- /devenv/docker/drone/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | services: 4 | docker-puppeteer: 5 | image: grafana/docker-puppeteer:pre-node-20 6 | volumes: 7 | - "../../..:/drone/src" 8 | stdin_open: true # docker run -i 9 | tty: true # docker run -t 10 | entrypoint: bash 11 | grafana: 12 | image: grafana/grafana-enterprise:latest 13 | ports: 14 | - 3000:3000 15 | environment: 16 | GF_FEATURE_TOGGLES_ENABLE: renderAuthJWT 17 | volumes: 18 | - ../test/dashboards.yaml:/etc/grafana/provisioning/dashboards/dashboards.yaml 19 | - ../../../scripts/drone/provisioning/dashboards:/usr/share/grafana/dashboards 20 | - ../../../scripts/drone/provisioning/datasources/datasources.yaml:/etc/grafana/provisioning/datasources/datasources.yaml 21 | -------------------------------------------------------------------------------- /devenv/docker/ha/.env: -------------------------------------------------------------------------------- 1 | GRAFANA_VERSION=latest 2 | RENDERER_VERSION=dev 3 | RENDERING_MODE=default 4 | RENDERING_CLUSTERING_MODE=browser 5 | RENDERING_CLUSTERING_MAX_CONCURRENCY=5 -------------------------------------------------------------------------------- /devenv/docker/ha/README.md: -------------------------------------------------------------------------------- 1 | # Grafana Image Rendering High Availability (HA) test setup 2 | 3 | A set of docker compose services which together creates a Grafana Image Rendering HA test setup with capability of easily 4 | scaling up/down number of Grafana image renderer instances. 5 | 6 | Included services 7 | 8 | * Grafana 9 | * Grafana image renderer service 10 | * Mysql - Grafana configuration database 11 | * Prometheus - Monitoring of Grafana and used as data source of provisioned alert rules 12 | * cAdvisor - Docker monitoring 13 | * Nginx - Reverse proxy for Grafana, Grafana image renderer and Prometheus. Enables browsing Grafana/renderer/Prometheus UI using a hostname 14 | 15 | ## Prerequisites 16 | 17 | ### Build grafana image renderer docker container 18 | 19 | Build a Grafana image renderer docker container and tag it as grafana/grafana-image-renderer:dev. 20 | 21 | ```bash 22 | $ cd 23 | $ docker build -t grafana/grafana-image-renderer:dev . 24 | ``` 25 | 26 | ### Virtual host names 27 | 28 | #### Alternative 1 - Use dnsmasq 29 | 30 | ```bash 31 | $ sudo apt-get install dnsmasq 32 | $ echo 'address=/loc/127.0.0.1' | sudo tee /etc/dnsmasq.d/dnsmasq-loc.conf > /dev/null 33 | $ sudo /etc/init.d/dnsmasq restart 34 | $ ping whatever.loc 35 | PING whatever.loc (127.0.0.1) 56(84) bytes of data. 36 | 64 bytes from localhost (127.0.0.1): icmp_seq=1 ttl=64 time=0.076 ms 37 | --- whatever.loc ping statistics --- 38 | 1 packet transmitted, 1 received, 0% packet loss, time 1998ms 39 | ``` 40 | 41 | #### Alternative 2 - Manually update /etc/hosts 42 | 43 | Update your `/etc/hosts` to be able to access Grafana and/or Prometheus UI using a hostname. 44 | 45 | ```bash 46 | $ cat /etc/hosts 47 | 127.0.0.1 grafana.loc 48 | 127.0.0.1 renderer.loc 49 | 127.0.0.1 prometheus.loc 50 | ``` 51 | 52 | ## Start services 53 | 54 | ```bash 55 | $ cd /devenv/docker/ha 56 | $ docker-compose up -d 57 | ``` 58 | 59 | Browse 60 | * http://grafana.loc/ 61 | * http://renderer.loc/ 62 | * http://prometheus.loc/ 63 | 64 | Check for any errors 65 | 66 | ```bash 67 | $ docker-compose logs | grep error 68 | ``` 69 | 70 | You can also provide environment variables for Grafana and Grafana image renderer docker image versions 71 | 72 | ```bash 73 | $ GRAFANA_VERSION=6.5.0 RENDERER_VERSION=1.0.7 docker-compose up -d 74 | ``` 75 | 76 | ### Scale renderer instances up/down 77 | 78 | Scale number of image renderer instances to `` 79 | 80 | ```bash 81 | $ docker-compose up --scale renderer= -d 82 | # for example 3 instances 83 | $ docker-compose up --scale renderer=3 -d 84 | ``` 85 | -------------------------------------------------------------------------------- /devenv/docker/ha/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "service": { 3 | "port": 8081, 4 | "metrics": { 5 | "enabled": true, 6 | "collectDefaultMetrics": true, 7 | "requestDurationBuckets": [1, 5, 7, 9, 11, 13, 15, 20, 30] 8 | } 9 | }, 10 | "rendering": { 11 | "timezone": null, 12 | "chromeBin": null, 13 | "args": ["--no-sandbox", "--disable-gpu"], 14 | "ignoresHttpsErrors": false, 15 | "mode": "default", 16 | "clustering": { 17 | "mode": "browser", 18 | "maxConcurrency": 5, 19 | "timeout": 30 20 | }, 21 | "timingMetrics": true 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /devenv/docker/ha/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "2.1" 2 | 3 | services: 4 | nginx-proxy: 5 | image: jwilder/nginx-proxy 6 | ports: 7 | - "80:80" 8 | volumes: 9 | - /var/run/docker.sock:/tmp/docker.sock:ro 10 | depends_on: 11 | db: 12 | condition: service_healthy 13 | 14 | db: 15 | image: mysql:5.6 16 | environment: 17 | MYSQL_ROOT_PASSWORD: rootpass 18 | MYSQL_DATABASE: grafana 19 | MYSQL_USER: grafana 20 | MYSQL_PASSWORD: password 21 | command: [mysqld, --character-set-server=utf8mb4, --collation-server=utf8mb4_unicode_ci, --innodb_monitor_enable=all, --max-connections=1001] 22 | ports: 23 | - 3306 24 | healthcheck: 25 | test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] 26 | timeout: 10s 27 | retries: 10 28 | 29 | mysqld-exporter: 30 | image: prom/mysqld-exporter 31 | environment: 32 | - DATA_SOURCE_NAME=root:rootpass@(db:3306)/ 33 | ports: 34 | - 9104 35 | depends_on: 36 | db: 37 | condition: service_healthy 38 | 39 | grafana: 40 | image: grafana/grafana:${GRAFANA_VERSION}-ubuntu 41 | volumes: 42 | - ./grafana/provisioning/:/etc/grafana/provisioning/ 43 | environment: 44 | - VIRTUAL_HOST=grafana.loc 45 | - GF_SERVER_ROOT_URL=http://grafana.loc 46 | - GF_DATABASE_NAME=grafana 47 | - GF_DATABASE_USER=grafana 48 | - GF_DATABASE_PASSWORD=password 49 | - GF_DATABASE_TYPE=mysql 50 | - GF_DATABASE_HOST=db:3306 51 | - GF_DATABASE_MAX_OPEN_CONN=300 52 | - GF_SERVER_ROUTER_LOGGING=true 53 | - GF_LOG_CONSOLE_FORMAT=json 54 | - GF_LOG_FILTERS=rendering:debug 55 | - GF_RENDERING_SERVER_URL=http://renderer:8081/render 56 | - GF_RENDERING_CALLBACK_URL=http://grafana:3000 57 | ports: 58 | - 3000 59 | links: 60 | - nginx-proxy 61 | depends_on: 62 | db: 63 | condition: service_healthy 64 | 65 | renderer: 66 | image: grafana/grafana-image-renderer:${RENDERER_VERSION} 67 | volumes: 68 | - ./config.json:/usr/src/app/config.json 69 | environment: 70 | - VIRTUAL_HOST=renderer.loc 71 | - ENABLE_METRICS=true 72 | - RENDERING_MODE=${RENDERING_MODE} 73 | - RENDERING_CLUSTERING_MODE=${RENDERING_CLUSTERING_MODE} 74 | - RENDERING_CLUSTERING_MAX_CONCURRENCY=${RENDERING_CLUSTERING_MAX_CONCURRENCY} 75 | ports: 76 | - 8081 77 | # mem_limit: 500mb 78 | # memswap_limit: 1gb 79 | 80 | prometheus: 81 | image: prom/prometheus:v2.14.0 82 | volumes: 83 | - ./prometheus/:/etc/prometheus/ 84 | environment: 85 | - VIRTUAL_HOST=prometheus.loc 86 | ports: 87 | - 9090 88 | 89 | cadvisor: 90 | image: google/cadvisor:latest 91 | environment: 92 | - VIRTUAL_HOST=cadvisor.loc 93 | ports: 94 | - 8080 95 | volumes: 96 | - /:/rootfs:ro 97 | - /var/run:/var/run:ro 98 | - /sys:/sys:ro 99 | - /var/lib/docker/:/var/lib/docker:ro 100 | - /dev/disk:/dev/disk/:ro 101 | -------------------------------------------------------------------------------- /devenv/docker/ha/grafana/provisioning/dashboards/dashboards.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | providers: 4 | - name: 'General' 5 | type: file 6 | options: 7 | path: /etc/grafana/provisioning/dashboards/general 8 | -------------------------------------------------------------------------------- /devenv/docker/ha/grafana/provisioning/datasources/datasources.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: Testdata 5 | isDefault: true 6 | type: testdata 7 | 8 | - name: Prometheus 9 | type: prometheus 10 | access: proxy 11 | url: http://prometheus:9090 12 | jsonData: 13 | timeInterval: 10s 14 | queryTimeout: 30s 15 | httpMethod: POST 16 | -------------------------------------------------------------------------------- /devenv/docker/ha/prometheus/prometheus.yml: -------------------------------------------------------------------------------- 1 | # my global config 2 | global: 3 | scrape_interval: 10s # By default, scrape targets every 15 seconds. 4 | evaluation_interval: 10s # By default, scrape targets every 15 seconds. 5 | # scrape_timeout is set to the global default (10s). 6 | 7 | scrape_configs: 8 | - job_name: 'prometheus' 9 | static_configs: 10 | - targets: ['localhost:9090'] 11 | 12 | - job_name: 'grafana' 13 | dns_sd_configs: 14 | - names: 15 | - 'grafana' 16 | type: 'A' 17 | port: 3000 18 | refresh_interval: 10s 19 | 20 | - job_name: 'renderer' 21 | dns_sd_configs: 22 | - names: 23 | - 'renderer' 24 | type: 'A' 25 | port: 8081 26 | refresh_interval: 10s 27 | 28 | - job_name: 'mysql' 29 | dns_sd_configs: 30 | - names: 31 | - 'mysqld-exporter' 32 | type: 'A' 33 | port: 9104 34 | refresh_interval: 10s 35 | 36 | - job_name: 'cadvisor' 37 | dns_sd_configs: 38 | - names: 39 | - 'cadvisor' 40 | type: 'A' 41 | port: 8080 42 | refresh_interval: 10s 43 | -------------------------------------------------------------------------------- /devenv/docker/ratelimiter/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "service": { 3 | "host": null, 4 | "port": 8081, 5 | 6 | "metrics": { 7 | "enabled": true, 8 | "collectDefaultMetrics": true, 9 | "requestDurationBuckets": [1, 5, 7, 9, 11, 13, 15, 20, 30] 10 | }, 11 | 12 | "logging": { 13 | "level": "debug", 14 | "console": { 15 | "json": true, 16 | "colorize": false 17 | } 18 | }, 19 | 20 | "rateLimiter": { 21 | "enabled": true, 22 | "redisHost": "redis", 23 | "redisPort": 6379, 24 | "requestsPerSecond": 5 25 | } 26 | }, 27 | "rendering": { 28 | "chromeBin": null, 29 | "args": [ 30 | "--no-sandbox", 31 | "--disable-setuid-sandbox", 32 | "--disable-gpu", 33 | "--use-gl=swiftshader" 34 | ], 35 | "ignoresHttpsErrors": false, 36 | 37 | "timezone": null, 38 | "acceptLanguage": null, 39 | "width": 1000, 40 | "height": 500, 41 | "deviceScaleFactor": 1, 42 | "maxWidth": 3080, 43 | "maxHeight": 3000, 44 | "maxDeviceScaleFactor": 4, 45 | 46 | "mode": "clustered", 47 | "clustering": { 48 | "mode": "context", 49 | "maxConcurrency": 5, 50 | "timeout": 30 51 | }, 52 | 53 | "verboseLogging": false, 54 | "dumpio": false 55 | } 56 | } -------------------------------------------------------------------------------- /devenv/docker/ratelimiter/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | grafana: 3 | image: grafana/grafana:latest 4 | ports: 5 | - 3000:3000 6 | environment: 7 | GF_RENDERING_SERVER_URL: http://renderer:8081/render 8 | GF_RENDERING_CALLBACK_URL: http://grafana:3000/ 9 | GF_LOG_FILTERS: rendering:debug 10 | renderer: 11 | image: grafana/grafana-image-renderer:latest 12 | ports: 13 | - 8081 14 | volumes: 15 | - ./config.json:/usr/src/app/config.json 16 | redis: 17 | image: redis:latest 18 | ports: 19 | - 6379 20 | 21 | 22 | -------------------------------------------------------------------------------- /devenv/docker/simple/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | services: 4 | grafana: 5 | image: grafana/grafana:latest 6 | ports: 7 | - 3000 8 | environment: 9 | GF_RENDERING_SERVER_URL: http://renderer:8081/render 10 | GF_RENDERING_CALLBACK_URL: http://grafana:3000/ 11 | GF_LOG_FILTERS: rendering:debug 12 | renderer: 13 | image: grafana/grafana-image-renderer:latest 14 | ports: 15 | - 8081 16 | environment: 17 | ENABLE_METRICS: 'true' 18 | -------------------------------------------------------------------------------- /devenv/docker/test/README.md: -------------------------------------------------------------------------------- 1 | This Docker environment is used to run E2E tests for the image renderer locally. 2 | -------------------------------------------------------------------------------- /devenv/docker/test/dashboards.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | providers: 4 | - name: 'Tests' 5 | folder: 'tests' 6 | folderUid: '' 7 | type: file 8 | allowUiUpdates: false 9 | updateIntervalSeconds: 60 10 | options: 11 | path: dashboards 12 | -------------------------------------------------------------------------------- /devenv/docker/test/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | services: 4 | grafana: 5 | image: grafana/grafana-enterprise:latest 6 | ports: 7 | - 3000:3000 8 | environment: 9 | GF_FEATURE_TOGGLES_ENABLE: renderAuthJWT 10 | volumes: 11 | - ./dashboards.yaml:/etc/grafana/provisioning/dashboards/dashboards.yaml 12 | - ../../../scripts/drone/provisioning/dashboards:/usr/share/grafana/dashboards 13 | - ../../../scripts/drone/provisioning/datasources/datasources.yaml:/etc/grafana/provisioning/datasources/datasources.yaml 14 | -------------------------------------------------------------------------------- /devenv/docker/tracing/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "service": { 3 | "host": null, 4 | "port": 8081, 5 | 6 | "metrics": { 7 | "enabled": true, 8 | "collectDefaultMetrics": true, 9 | "requestDurationBuckets": [1, 5, 7, 9, 11, 13, 15, 20, 30] 10 | }, 11 | 12 | "logging": { 13 | "level": "debug", 14 | "console": { 15 | "json": true, 16 | "colorize": false 17 | } 18 | } 19 | }, 20 | "rendering": { 21 | "chromeBin": null, 22 | "args": [ 23 | "--no-sandbox" 24 | ], 25 | "ignoresHttpsErrors": false, 26 | 27 | "timezone": null, 28 | "acceptLanguage": null, 29 | "width": 1000, 30 | "height": 500, 31 | "deviceScaleFactor": 1, 32 | "maxWidth": 3080, 33 | "maxHeight": 3000, 34 | "maxDeviceScaleFactor": 4, 35 | 36 | "mode": "clustered", 37 | "clustering": { 38 | "mode": "context", 39 | "maxConcurrency": 5, 40 | "timeout": 30 41 | }, 42 | 43 | "verboseLogging": false, 44 | "dumpio": false, 45 | 46 | "tracing": { 47 | "url": " http://tempo:4318/v1/traces", 48 | "serviceName": "" 49 | } 50 | } 51 | } -------------------------------------------------------------------------------- /devenv/docker/tracing/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | 3 | # Tempo runs as user 10001, and docker compose creates the volume as root. 4 | # As such, we need to chown the volume in order for Tempo to start correctly. 5 | init: 6 | image: &tempoImage grafana/tempo:latest 7 | user: root 8 | entrypoint: 9 | - "chown" 10 | - "10001:10001" 11 | - "/var/tempo" 12 | volumes: 13 | - ./tempo-data:/var/tempo 14 | 15 | memcached: 16 | image: memcached:1.6.29 17 | container_name: memcached 18 | ports: 19 | - "11211:11211" 20 | environment: 21 | - MEMCACHED_MAX_MEMORY=64m # Set the maximum memory usage 22 | - MEMCACHED_THREADS=4 # Number of threads to use 23 | 24 | tempo: 25 | image: *tempoImage 26 | command: [ "-config.file=/etc/tempo.yaml" ] 27 | volumes: 28 | - ./tempo.yaml:/etc/tempo.yaml 29 | - ./tempo-data:/var/tempo 30 | ports: 31 | - "14268:14268" # jaeger ingest 32 | - "3200:3200" # tempo 33 | - "9095:9095" # tempo grpc 34 | - "4317:4317" # otlp grpc 35 | - "4318:4318" # otlp http 36 | - "9411:9411" # zipkin 37 | depends_on: 38 | - init 39 | - memcached 40 | 41 | prometheus: 42 | image: prom/prometheus:latest 43 | command: 44 | - --config.file=/etc/prometheus.yaml 45 | - --web.enable-remote-write-receiver 46 | - --enable-feature=exemplar-storage 47 | - --enable-feature=native-histograms 48 | volumes: 49 | - ./prometheus.yaml:/etc/prometheus.yaml 50 | ports: 51 | - "9090:9090" 52 | 53 | grafana: 54 | image: grafana/grafana:latest 55 | volumes: 56 | - ./grafana-datasources.yaml:/etc/grafana/provisioning/datasources/datasources.yaml 57 | environment: 58 | GF_RENDERING_SERVER_URL: http://renderer:8081/render 59 | GF_RENDERING_CALLBACK_URL: http://grafana:3000/ 60 | GF_RENDERING_TRACING_URL: http://tempo:4318/v1/traces 61 | GF_LOG_FILTERS: rendering:debug 62 | GF_INSTALL_PLUGINS: https://storage.googleapis.com/integration-artifacts/grafana-exploretraces-app/grafana-exploretraces-app-latest.zip;grafana-traces-app 63 | GF_TRACING_OPENTELEMETRY_OTLP_ADDRESS: tempo:4317 64 | ports: 65 | - "3000:3000" 66 | renderer: 67 | image: grafana/grafana-image-renderer:latest 68 | ports: 69 | - 8081:8081 70 | volumes: 71 | - ./config.json:/usr/src/app/config.json -------------------------------------------------------------------------------- /devenv/docker/tracing/grafana-datasources.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: Prometheus 5 | type: prometheus 6 | uid: prometheus 7 | access: proxy 8 | orgId: 1 9 | url: http://prometheus:9090 10 | basicAuth: false 11 | isDefault: false 12 | version: 1 13 | editable: false 14 | jsonData: 15 | httpMethod: GET 16 | - name: Tempo 17 | type: tempo 18 | access: proxy 19 | orgId: 1 20 | url: http://tempo:3200 21 | basicAuth: false 22 | isDefault: true 23 | version: 1 24 | editable: false 25 | apiVersion: 1 26 | uid: tempo 27 | jsonData: 28 | httpMethod: GET 29 | serviceMap: 30 | datasourceUid: prometheus 31 | streamingEnabled: 32 | search: true 33 | 34 | -------------------------------------------------------------------------------- /devenv/docker/tracing/prometheus.yaml: -------------------------------------------------------------------------------- 1 | global: 2 | scrape_interval: 15s 3 | evaluation_interval: 15s 4 | 5 | scrape_configs: 6 | - job_name: 'prometheus' 7 | static_configs: 8 | - targets: [ 'localhost:9090' ] 9 | - job_name: 'tempo' 10 | static_configs: 11 | - targets: [ 'tempo:3200' ] -------------------------------------------------------------------------------- /devenv/docker/tracing/tempo.yaml: -------------------------------------------------------------------------------- 1 | stream_over_http_enabled: true 2 | server: 3 | http_listen_port: 3200 4 | log_level: info 5 | 6 | 7 | cache: 8 | background: 9 | writeback_goroutines: 5 10 | caches: 11 | - roles: 12 | - frontend-search 13 | memcached: 14 | addresses: dns+memcached:11211 15 | 16 | query_frontend: 17 | search: 18 | duration_slo: 5s 19 | throughput_bytes_slo: 1.073741824e+09 20 | metadata_slo: 21 | duration_slo: 5s 22 | throughput_bytes_slo: 1.073741824e+09 23 | trace_by_id: 24 | duration_slo: 100ms 25 | metrics: 26 | max_duration: 120h # maximum duration of a metrics query, increase for local setups 27 | query_backend_after: 5m 28 | duration_slo: 5s 29 | throughput_bytes_slo: 1.073741824e+09 30 | 31 | distributor: 32 | receivers: # this configuration will listen on all ports and protocols that tempo is capable of. 33 | jaeger: # the receives all come from the OpenTelemetry collector. more configuration information can 34 | protocols: # be found there: https://github.com/open-telemetry/opentelemetry-collector/tree/main/receiver 35 | thrift_http: # 36 | endpoint: "tempo:14268" # for a production deployment you should only enable the receivers you need! 37 | grpc: 38 | endpoint: "tempo:14250" 39 | thrift_binary: 40 | endpoint: "tempo:6832" 41 | thrift_compact: 42 | endpoint: "tempo:6831" 43 | zipkin: 44 | endpoint: "tempo:9411" 45 | otlp: 46 | protocols: 47 | grpc: 48 | endpoint: "tempo:4317" 49 | http: 50 | endpoint: "tempo:4318" 51 | opencensus: 52 | endpoint: "tempo:55678" 53 | 54 | ingester: 55 | max_block_duration: 5m # cut the headblock when this much time passes. this is being set for demo purposes and should probably be left alone normally 56 | 57 | compactor: 58 | compaction: 59 | block_retention: 24h # overall Tempo trace retention. set for demo purposes 60 | 61 | metrics_generator: 62 | registry: 63 | external_labels: 64 | source: tempo 65 | cluster: docker-compose 66 | storage: 67 | path: /var/tempo/generator/wal 68 | remote_write: 69 | - url: http://prometheus:9090/api/v1/write 70 | send_exemplars: true 71 | traces_storage: 72 | path: /var/tempo/generator/traces 73 | processor: 74 | local_blocks: 75 | filter_server_spans: false 76 | flush_to_storage: true 77 | 78 | storage: 79 | trace: 80 | backend: local # backend configuration to use 81 | wal: 82 | path: /var/tempo/wal # where to store the wal locally 83 | local: 84 | path: /var/tempo/blocks 85 | 86 | overrides: 87 | defaults: 88 | metrics_generator: 89 | processors: [service-graphs, span-metrics, local-blocks] # enables metrics generator 90 | generate_native_histograms: both 91 | 92 | -------------------------------------------------------------------------------- /devenv/loadtest/README.md: -------------------------------------------------------------------------------- 1 | # Image rendering load test 2 | 3 | Runs load tests and checks using [k6](https://k6.io/). 4 | 5 | ## Prerequisites 6 | 7 | Docker 8 | 9 | ## Run 10 | 11 | Run load test for 15 minutes using 2 virtual users and targeting http://localhost:3000. 12 | 13 | ```bash 14 | $ ./run.sh 15 | ``` 16 | 17 | Run load test for custom duration: 18 | 19 | ```bash 20 | $ ./run.sh -d 10s 21 | ``` 22 | 23 | Run only 1 iteration of the load test (useful for testing): 24 | 25 | ```bash 26 | $ ./run.sh -i 1 27 | 28 | Run load test for custom target url: 29 | 30 | ```bash 31 | $ ./run.sh -u http://grafana.loc 32 | ``` 33 | 34 | Run load test for 10 virtual users: 35 | 36 | ```bash 37 | $ ./run.sh -v 10 38 | ``` 39 | 40 | Example output: 41 | 42 | ```bash 43 | > ./run.sh -d 15m -v 10 -u http://grafana.loc 44 | 45 | /\ |‾‾| /‾‾/ /‾/ 46 | /\ / \ | |_/ / / / 47 | / \/ \ | | / ‾‾\ 48 | / \ | |‾\ \ | (_) | 49 | / __________ \ |__| \__\ \___/ .io 50 | 51 | execution: local 52 | output: - 53 | script: src/render_test.js 54 | 55 | duration: 15m0s, iterations: - 56 | vus: 10, max: 10 57 | 58 | done [==========================================================] 15m0s / 15m0s 59 | 60 | █ render test 61 | 62 | █ user authenticates thru ui with username and password 63 | 64 | ✓ response status is 200 65 | 66 | █ render graph panel 67 | 68 | ✓ response status is 200 69 | 70 | checks.....................: 100.00% ✓ 726 ✗ 0 71 | data_received..............: 94 MB 104 kB/s 72 | data_sent..................: 170 kB 188 B/s 73 | group_duration.............: avg=12.32s min=78.5ms med=10.17s max=30.26s p(90)=22.21s p(95)=24.46s 74 | http_req_blocked...........: avg=34.73µs min=2.16µs med=5.45µs max=4.04ms p(90)=8.78µs p(95)=15.34µs 75 | http_req_connecting........: avg=9.99µs min=0s med=0s max=1.26ms p(90)=0s p(95)=0s 76 | http_req_duration..........: avg=12.32s min=76.75ms med=10.17s max=30.26s p(90)=22.21s p(95)=24.46s 77 | http_req_receiving.........: avg=2.38ms min=44.7µs med=2.35ms max=16.37ms p(90)=4.64ms p(95)=5.98ms 78 | http_req_sending...........: avg=51.41µs min=10.48µs med=21.5µs max=4.29ms p(90)=38.26µs p(95)=72.63µs 79 | http_req_tls_handshaking...: avg=0s min=0s med=0s max=0s p(90)=0s p(95)=0s 80 | http_req_waiting...........: avg=12.32s min=76.59ms med=10.17s max=30.24s p(90)=22.21s p(95)=24.46s 81 | http_reqs..................: 726 0.806667/s 82 | iteration_duration.........: avg=12.31s min=5.28µs med=10.17s max=30.26s p(90)=22.21s p(95)=24.46s 83 | iterations.................: 726 0.806667/s 84 | vus........................: 10 min=10 max=10 85 | vus_max....................: 10 min=10 max=10 86 | ``` 87 | -------------------------------------------------------------------------------- /devenv/loadtest/fixtures/graph_panel.json: -------------------------------------------------------------------------------- 1 | { 2 | "annotations": { 3 | "list": [ 4 | { 5 | "builtIn": 1, 6 | "datasource": "-- Grafana --", 7 | "enable": true, 8 | "hide": true, 9 | "iconColor": "rgba(0, 211, 255, 1)", 10 | "name": "Annotations & Alerts", 11 | "type": "dashboard" 12 | } 13 | ] 14 | }, 15 | "editable": true, 16 | "gnetId": null, 17 | "graphTooltip": 0, 18 | "links": [], 19 | "panels": [ 20 | { 21 | "aliasColors": {}, 22 | "bars": false, 23 | "dashLength": 10, 24 | "dashes": false, 25 | "datasource": "Testdata", 26 | "decimals": 3, 27 | "fill": 1, 28 | "fillGradient": 2, 29 | "gridPos": { 30 | "h": 11, 31 | "w": 24, 32 | "x": 0, 33 | "y": 0 34 | }, 35 | "hiddenSeries": false, 36 | "id": 1, 37 | "legend": { 38 | "alignAsTable": true, 39 | "avg": true, 40 | "current": true, 41 | "max": true, 42 | "min": true, 43 | "rightSide": true, 44 | "show": true, 45 | "total": true, 46 | "values": true 47 | }, 48 | "lines": true, 49 | "linewidth": 2, 50 | "links": [], 51 | "maxDataPoints": "", 52 | "nullPointMode": "null", 53 | "options": { 54 | "dataLinks": [] 55 | }, 56 | "percentage": false, 57 | "pointradius": 5, 58 | "points": false, 59 | "renderer": "flot", 60 | "seriesOverrides": [], 61 | "spaceLength": 10, 62 | "stack": true, 63 | "steppedLine": false, 64 | "targets": [ 65 | { 66 | "refId": "A", 67 | "scenarioId": "random_walk" 68 | }, 69 | { 70 | "refId": "B", 71 | "scenarioId": "random_walk" 72 | }, 73 | { 74 | "refId": "C", 75 | "scenarioId": "random_walk" 76 | }, 77 | { 78 | "refId": "D", 79 | "scenarioId": "random_walk" 80 | }, 81 | { 82 | "refId": "E", 83 | "scenarioId": "random_walk" 84 | }, 85 | { 86 | "refId": "F", 87 | "scenarioId": "random_walk" 88 | }, 89 | { 90 | "refId": "G", 91 | "scenarioId": "random_walk" 92 | }, 93 | { 94 | "refId": "H", 95 | "scenarioId": "random_walk" 96 | }, 97 | { 98 | "refId": "I", 99 | "scenarioId": "random_walk" 100 | }, 101 | { 102 | "refId": "J", 103 | "scenarioId": "random_walk" 104 | }, 105 | { 106 | "refId": "K", 107 | "scenarioId": "random_walk" 108 | }, 109 | { 110 | "refId": "L", 111 | "scenarioId": "random_walk" 112 | }, 113 | { 114 | "refId": "M", 115 | "scenarioId": "random_walk" 116 | }, 117 | { 118 | "refId": "N", 119 | "scenarioId": "random_walk" 120 | }, 121 | { 122 | "refId": "O", 123 | "scenarioId": "random_walk" 124 | } 125 | ], 126 | "thresholds": [], 127 | "timeFrom": null, 128 | "timeRegions": [], 129 | "timeShift": null, 130 | "title": "Legend Table No Scroll Visible", 131 | "tooltip": { 132 | "shared": true, 133 | "sort": 0, 134 | "value_type": "individual" 135 | }, 136 | "type": "graph", 137 | "xaxis": { 138 | "buckets": null, 139 | "mode": "time", 140 | "name": null, 141 | "show": true, 142 | "values": [] 143 | }, 144 | "yaxes": [ 145 | { 146 | "format": "short", 147 | "label": null, 148 | "logBase": 1, 149 | "max": null, 150 | "min": null, 151 | "show": true 152 | }, 153 | { 154 | "format": "short", 155 | "label": null, 156 | "logBase": 1, 157 | "max": null, 158 | "min": null, 159 | "show": true 160 | } 161 | ], 162 | "yaxis": { 163 | "align": false, 164 | "alignLevel": null 165 | } 166 | } 167 | ], 168 | "schemaVersion": 21, 169 | "style": "dark", 170 | "tags": [], 171 | "templating": { 172 | "list": [] 173 | }, 174 | "time": { 175 | "from": "now-6h", 176 | "to": "now" 177 | }, 178 | "timepicker": {}, 179 | "timezone": "", 180 | "title": "Graph Panel", 181 | "uid": "_CPokraWz", 182 | "version": 1 183 | } -------------------------------------------------------------------------------- /devenv/loadtest/modules/client.js: -------------------------------------------------------------------------------- 1 | import http from "k6/http"; 2 | 3 | export const DatasourcesEndpoint = class DatasourcesEndpoint { 4 | constructor(httpClient) { 5 | this.httpClient = httpClient; 6 | } 7 | 8 | getAll() { 9 | return this.httpClient.get('/datasources'); 10 | } 11 | 12 | getById(id) { 13 | return this.httpClient.get(`/datasources/${id}`); 14 | } 15 | 16 | getByName(name) { 17 | return this.httpClient.get(`/datasources/name/${name}`); 18 | } 19 | 20 | create(payload) { 21 | return this.httpClient.post(`/datasources`, JSON.stringify(payload)); 22 | } 23 | 24 | update(id, payload) { 25 | return this.httpClient.put(`/datasources/${id}`, JSON.stringify(payload)); 26 | } 27 | }; 28 | 29 | export const DashboardsEndpoint = class DashboardsEndpoint { 30 | constructor(httpClient) { 31 | this.httpClient = httpClient; 32 | } 33 | 34 | getAll() { 35 | return this.httpClient.get('/dashboards'); 36 | } 37 | 38 | upsert(payload) { 39 | return this.httpClient.post(`/dashboards/db`, JSON.stringify(payload)); 40 | } 41 | }; 42 | 43 | export const OrganizationsEndpoint = class OrganizationsEndpoint { 44 | constructor(httpClient) { 45 | this.httpClient = httpClient; 46 | } 47 | 48 | getById(id) { 49 | return this.httpClient.get(`/orgs/${id}`); 50 | } 51 | 52 | getByName(name) { 53 | return this.httpClient.get(`/orgs/name/${name}`); 54 | } 55 | 56 | create(name) { 57 | let payload = { 58 | name: name, 59 | }; 60 | return this.httpClient.post(`/orgs`, JSON.stringify(payload)); 61 | } 62 | }; 63 | 64 | export const UIEndpoint = class UIEndpoint { 65 | constructor(httpClient) { 66 | this.httpClient = httpClient; 67 | } 68 | 69 | login(username, pwd) { 70 | const payload = { user: username, password: pwd }; 71 | return this.httpClient.formPost('/login', payload); 72 | } 73 | 74 | renderPanel(orgId, dashboardUid, panelId) { 75 | return this.httpClient.get( 76 | `/render/d-solo/${dashboardUid}/graph-panel`, 77 | { 78 | orgId, 79 | panelId, 80 | width: 1000, 81 | height: 500, 82 | tz: 'Europe/Stockholm', 83 | } 84 | ); 85 | } 86 | } 87 | 88 | export const GrafanaClient = class GrafanaClient { 89 | constructor(httpClient) { 90 | httpClient.onBeforeRequest = (params) => { 91 | if (this.orgId && this.orgId > 0) { 92 | params.headers = params.headers || {}; 93 | params.headers["X-Grafana-Org-Id"] = this.orgId; 94 | } 95 | } 96 | 97 | this.raw = httpClient; 98 | this.dashboards = new DashboardsEndpoint(httpClient.withUrl('/api')); 99 | this.datasources = new DatasourcesEndpoint(httpClient.withUrl('/api')); 100 | this.orgs = new OrganizationsEndpoint(httpClient.withUrl('/api')); 101 | this.ui = new UIEndpoint(httpClient); 102 | } 103 | 104 | loadCookies(cookies) { 105 | for (let [name, value] of Object.entries(cookies)) { 106 | http.cookieJar().set(this.raw.url, name, value); 107 | } 108 | } 109 | 110 | saveCookies() { 111 | return http.cookieJar().cookiesForURL(this.raw.url + '/'); 112 | } 113 | 114 | batch(requests) { 115 | return this.raw.batch(requests); 116 | } 117 | 118 | withOrgId(orgId) { 119 | this.orgId = orgId; 120 | } 121 | } 122 | 123 | export const BaseClient = class BaseClient { 124 | constructor(url, subUrl) { 125 | if (url.endsWith('/')) { 126 | url = url.substring(0, url.length - 1); 127 | } 128 | 129 | if (subUrl.endsWith('/')) { 130 | subUrl = subUrl.substring(0, subUrl.length - 1); 131 | } 132 | 133 | this.url = url + subUrl; 134 | this.onBeforeRequest = () => {}; 135 | } 136 | 137 | withUrl(subUrl) { 138 | let c = new BaseClient(this.url, subUrl); 139 | c.onBeforeRequest = this.onBeforeRequest; 140 | return c; 141 | } 142 | 143 | beforeRequest(params) { 144 | 145 | } 146 | 147 | get(url, queryParams, params) { 148 | params = params || {}; 149 | this.onBeforeRequest(params); 150 | 151 | if (queryParams) { 152 | url += '?' + Array.from(Object.entries(queryParams)).map(([key, value]) => 153 | `${key}=${encodeURIComponent(value)}` 154 | ).join('&'); 155 | } 156 | 157 | return http.get(this.url + url, params); 158 | } 159 | 160 | formPost(url, body, params) { 161 | params = params || {}; 162 | this.beforeRequest(params); 163 | this.onBeforeRequest(params); 164 | return http.post(this.url + url, body, params); 165 | } 166 | 167 | post(url, body, params) { 168 | params = params || {}; 169 | params.headers = params.headers || {}; 170 | params.headers['Content-Type'] = 'application/json'; 171 | 172 | this.beforeRequest(params); 173 | this.onBeforeRequest(params); 174 | return http.post(this.url + url, body, params); 175 | } 176 | 177 | put(url, body, params) { 178 | params = params || {}; 179 | params.headers = params.headers || {}; 180 | params.headers['Content-Type'] = 'application/json'; 181 | 182 | this.onBeforeRequest(params); 183 | return http.put(this.url + url, body, params); 184 | } 185 | 186 | delete(url, params) { 187 | params = params || {}; 188 | this.beforeRequest(params); 189 | this.onBeforeRequest(params); 190 | return http.del(this.url + url, null, params); 191 | } 192 | 193 | batch(requests) { 194 | for (let n = 0; n < requests.length; n++) { 195 | let params = requests[n].params || {}; 196 | params.headers = params.headers || {}; 197 | params.headers['Content-Type'] = 'application/json'; 198 | this.beforeRequest(params); 199 | this.onBeforeRequest(params); 200 | requests[n].params = params; 201 | requests[n].url = this.url + requests[n].url; 202 | if (requests[n].body) { 203 | requests[n].body = JSON.stringify(requests[n].body); 204 | } 205 | } 206 | 207 | return http.batch(requests); 208 | } 209 | } 210 | 211 | export const createClient = (url) => { 212 | return new GrafanaClient(new BaseClient(url, '')); 213 | } 214 | -------------------------------------------------------------------------------- /devenv/loadtest/modules/util.js: -------------------------------------------------------------------------------- 1 | export const createTestOrgIfNotExists = (client) => { 2 | let orgId = 0; 3 | 4 | let res = client.orgs.getByName('k6-image-renderer'); 5 | if (res.status === 404) { 6 | res = client.orgs.create('k6-image-renderer'); 7 | if (res.status !== 200) { 8 | throw new Error('Expected 200 response status when creating org'); 9 | } 10 | return res.json().orgId; 11 | } 12 | 13 | // This can happen e.g. in Hosted Grafana instances, where even admins 14 | // cannot see organisations 15 | if (res.status !== 200) { 16 | console.info(`unable to get orgs from instance, continuing with default orgId ${orgId}`); 17 | return orgId; 18 | } 19 | 20 | return res.json().id; 21 | }; 22 | 23 | export const upsertTestdataDatasource = (client, name) => { 24 | const payload = { 25 | access: 'proxy', 26 | isDefault: false, 27 | name, 28 | type: 'testdata', 29 | }; 30 | 31 | let res = client.datasources.getByName(payload.name); 32 | let id; 33 | if (res.status === 404) { 34 | res = client.datasources.create(payload); 35 | 36 | if (res.status == 200) { 37 | id = res.json().id; 38 | } 39 | } else if (res.status == 200) { 40 | id = res.json().id; 41 | res = client.datasources.update(res.json().id, payload); 42 | } 43 | 44 | if (res.status !== 200) { 45 | throw new Error(`expected 200 response status when creating datasource, got ${res.status}`); 46 | } 47 | 48 | return id; 49 | }; 50 | 51 | export const upsertDashboard = (client, dashboard) => { 52 | const payload = { 53 | dashboard, 54 | overwrite: true, 55 | }; 56 | 57 | let res = client.dashboards.upsert(payload); 58 | 59 | if (res.status !== 200) { 60 | throw new Error(`expected 200 response status when creating dashboards, got ${res.status}`); 61 | } 62 | 63 | return res.json().id; 64 | }; 65 | -------------------------------------------------------------------------------- /devenv/loadtest/render_test.js: -------------------------------------------------------------------------------- 1 | import { check, group } from 'k6'; 2 | import { createClient } from './modules/client.js'; 3 | import { 4 | createTestOrgIfNotExists, 5 | upsertTestdataDatasource, 6 | upsertDashboard, 7 | } from './modules/util.js'; 8 | 9 | export let options = { 10 | noCookiesReset: true, 11 | thresholds: { checks: [ { threshold: 'rate=1', abortOnFail: true } ] }, 12 | }; 13 | 14 | let endpoint = __ENV.URL || 'http://localhost:3000'; 15 | const client = createClient(endpoint); 16 | const dashboard = JSON.parse(open('fixtures/graph_panel.json')); 17 | 18 | export const setup = () => { 19 | group("user authenticates thru ui with username and password", () => { 20 | let res = client.ui.login('admin', 'admin'); 21 | 22 | check(res, { 23 | 'response status is 200': (r) => r.status === 200, 24 | }); 25 | }); 26 | 27 | const orgId = createTestOrgIfNotExists(client); 28 | client.withOrgId(orgId); 29 | upsertTestdataDatasource(client, dashboard.panels[0].datasource); 30 | upsertDashboard(client, dashboard); 31 | 32 | return { 33 | orgId, 34 | cookies: client.saveCookies(), 35 | }; 36 | }; 37 | 38 | export default (data) => { 39 | client.loadCookies(data.cookies); 40 | client.withOrgId(data.orgId); 41 | 42 | group("render test", () => { 43 | group("render graph panel", () => { 44 | const response = client.ui.renderPanel( 45 | data.orgId, 46 | dashboard.uid, 47 | dashboard.panels[0].id, 48 | ); 49 | check(response, { 50 | 'response status is 200': (r) => r.status === 200, 51 | 'response is a PNG': (r) => r.headers['Content-Type'] == 'image/png', 52 | }); 53 | }); 54 | }); 55 | } 56 | 57 | export const teardown = () => {} 58 | -------------------------------------------------------------------------------- /devenv/loadtest/run.sh: -------------------------------------------------------------------------------- 1 | #/bin/bash 2 | 3 | cd "$(dirname $0)" 4 | 5 | run() { 6 | local duration='15m' 7 | local url='http://localhost:3000' 8 | local vus='2' 9 | local iterationsOption='' 10 | 11 | while getopts ":d:i:u:v:" o; do 12 | case "${o}" in 13 | d) 14 | duration=${OPTARG} 15 | ;; 16 | i) 17 | iterationsOption="--iterations ${OPTARG}" 18 | ;; 19 | u) 20 | url=${OPTARG} 21 | ;; 22 | v) 23 | vus=${OPTARG} 24 | ;; 25 | esac 26 | done 27 | shift $((OPTIND-1)) 28 | 29 | docker run \ 30 | -it \ 31 | --network=host \ 32 | --mount type=bind,source=$PWD,destination=/src \ 33 | -e URL=$url \ 34 | --rm \ 35 | loadimpact/k6:master run \ 36 | --vus $vus \ 37 | --duration $duration \ 38 | $iterationsOption \ 39 | //src/render_test.js 40 | } 41 | 42 | run "$@" 43 | -------------------------------------------------------------------------------- /docs/building_from_source.md: -------------------------------------------------------------------------------- 1 | # Build the image renderer from source 2 | 3 | Git clone this repo: 4 | 5 | ```bash 6 | git clone https://github.com/grafana/grafana-image-renderer.git 7 | cd grafana-image-renderer 8 | ``` 9 | 10 | ## Plugin 11 | 12 | 1. Install [Node.js](https://nodejs.org/) and [Yarn](https://yarnpkg.com/en/). 13 | 2. Install dependencies and build: 14 | 15 | ```bash 16 | make deps 17 | 18 | # build and package for Linux x64 19 | make build_package ARCH=linux-x64-glibc 20 | 21 | # build and package for Window x64 22 | make build_package ARCH=win32-x64-unknown 23 | 24 | # build and package for Darwin x64 25 | make build_package ARCH=darwin-x64-unknown 26 | 27 | # build and package for Mac ARM64 28 | make build_package ARCH=darwin-arm64-unknown 29 | 30 | # build and package without including Chromium 31 | make build_package ARCH= SKIP_CHROMIUM=true OUT=plugin--no-chromium 32 | ``` 33 | 34 | 3. Built artifacts can be found in ./artifacts directory 35 | 36 | ## Docker image 37 | 38 | 1. Install Docker 39 | 2. Build Docker image: 40 | 41 | ```bash 42 | docker build -t custom-grafana-image-renderer . 43 | ``` 44 | 45 | ## Local Node.js application using local Chrome/Chromium 46 | 47 | 1. Install [Node.js](https://nodejs.org/) and [Yarn](https://yarnpkg.com/en/). 48 | 2. Install dependencies and build: 49 | 50 | ```bash 51 | make deps 52 | make build 53 | ``` 54 | 55 | 3. Built artifacts are found in ./build -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Developer documentation 2 | Use the following links for instructions on how to: 3 | 4 | - [Build the image renderer from source](building_from_source.md) 5 | - [Package plugin as a single executable](package_plugin_as_single_executable.md) 6 | - [Release and publish a new version](release_new_version.md) 7 | - [Testing instructions](testing.md) -------------------------------------------------------------------------------- /docs/package_plugin_as_single_executable.md: -------------------------------------------------------------------------------- 1 | # Package plugin as a single executable 2 | 3 | This plugin can be packaged into a single executable together with [Node.js](https://nodejs.org/) runtime and [Chromium](https://www.chromium.org/Home) so it doesn't require any additional dependencies to be installed. 4 | 5 | ```bash 6 | make build_package ARCH= 7 | ``` 8 | 9 | Where `` is a combination of 10 | - linux, darwin, win32 11 | - ia32, x64, arm, arm64 12 | - unknown, glibc, musl 13 | 14 | This follows combinations allowed for GRPC plugin and you can see options [here](https://console.cloud.google.com/storage/browser/node-precompiled-binaries.grpc.io/grpc/?project=grpc-testing). 15 | 16 | At least the following combinations have been verified to work: 17 | - linux-x64-glibc 18 | - darwin-x64-unknown 19 | - win32-x64-unknown 20 | -------------------------------------------------------------------------------- /docs/release_new_version.md: -------------------------------------------------------------------------------- 1 | # Release and publish a new version 2 | 3 | 1. Every commit to master is a possible release. 4 | 2. Version in plugin.json is used for deciding which version to release. 5 | 6 | ## Prepare 7 | 8 | 1. Update `version` and `updated` properties in plugin.json. 9 | 2. Update CHANGELOG.md. 10 | 3. Merge/push changes to master. 11 | 4. Commit is built in [Drone](https://drone.grafana.net/grafana/grafana-image-renderer). 12 | 13 | ## Promote release 14 | 15 | 1. Open [Drone](https://drone.grafana.net/grafana/grafana-image-renderer) and find the build for your commit. 16 | 2. Click on the `...` from the top-right corner to display the menu, then click on `Promote`. 17 | 3. Fill the `Create deployment` form with the values below, and click on `Deploy`: 18 | - `Type` = `Promote` 19 | - `Target` = `release` *(write it manually)* 20 | - *(no parameters needed)* 21 | 4. Once you've clicked on `Deploy` it will trigger a new pipeline with the release steps. 22 | 23 | ## Publish plugin to Grafana.com 24 | 25 | Since the [migration to Drone](https://github.com/grafana/grafana-image-renderer/pull/394), this step that historically 26 | was needed to be performed manually is no longer required and is automatically performed by `publish_to_gcom` step. 27 | 28 | **Note:** The step will time out, but the plugin update process will continue in the background. 29 | 30 | ``` 31 | 32 | 504 Gateway Time-out 33 | 34 |

504 Gateway Time-out

35 |
nginx/1.17.9
36 | 37 | 38 | ``` 39 | 40 | ## Deploy into Grafana Cloud 41 | Create a PR in [Deployment Tools](https://github.com/grafana/deployment_tools/blob/master/ksonnet/lib/render-service/images.libsonnet) with the new version. 42 | -------------------------------------------------------------------------------- /docs/testing.md: -------------------------------------------------------------------------------- 1 | # Testing 2 | 3 | In order to run the image-renderer automated test suites, you need to run the following command from the root folder: 4 | 5 | ``` 6 | yarn test 7 | ``` 8 | 9 | This will launch a Grafana instance in Docker and, then, run the test suites. 10 | 11 | _Notes:_ 12 | 13 | If there are some expected changes in the reference image files (located in `/tests/testdata`), run `yarn test-update` and push the updated references. 14 | 15 | If the tests are failing and you want to see the difference between the image you get and the reference image, run `yarn test-diff`. This will generate images (called `diff_.png`) containing the differences in the `/tests/testdata` folder. 16 | 17 | ## Fixing Drone issues 18 | 19 | If tests are successful in your local environement but fail in Drone. You can follow these steps to run the tests in an environment similar to the Drone pipeline. This will mount your local files of the `grafana-image-renderer` repo in the Docker image so any change that happens in the Docker image will be available in your local environment. This allows you to run `yarn test-diff` and `yarn test-update` in Docker and see the results locally. 20 | 21 | 1. Run the Drone environment in Docker: 22 | 23 | ``` 24 | cd ./devenv/docker/drone 25 | docker-compose up 26 | ``` 27 | 28 | 2. Open a terminal within the `drone-docker-puppeteer` container and run the following commands: 29 | 30 | ``` 31 | cd /drone/src 32 | PUPPETEER_CACHE_DIR=/drone/src/cache yarn install --frozen-lockfile --no-progress 33 | PUPPETEER_CACHE_DIR=/drone/src/cache CI=true yarn test-ci 34 | ``` 35 | 36 | _Notes:_ 37 | The tests might take longer in the Docker container. If you run into timeout issues, you can run the test command with the `--testTimeout option`: 38 | ``` 39 | PUPPETEER_CACHE_DIR=/drone/src/cache CI=true yarn test-ci --testTimeout=10000 40 | ``` 41 | -------------------------------------------------------------------------------- /img/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grafana/grafana-image-renderer/dda7d51c4fd3a8ff0544cc12c6b05debe47da41e/img/icon.png -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('ts-jest').JestConfigWithTsJest} */ 2 | module.exports = { 3 | preset: 'ts-jest', 4 | testEnvironment: 'node', 5 | reporters: ['/tests/reporter.js'], 6 | testTimeout: 20000, 7 | }; -------------------------------------------------------------------------------- /mixin/.gitignore: -------------------------------------------------------------------------------- 1 | /alerts.yaml 2 | /rules.yaml 3 | dashboards_out -------------------------------------------------------------------------------- /mixin/Makefile: -------------------------------------------------------------------------------- 1 | all: fmt lint build clean 2 | 3 | fmt: 4 | ./scripts/format.sh 5 | 6 | lint: 7 | ./scripts/lint.sh 8 | 9 | build: 10 | ./scripts/build.sh 11 | 12 | clean: 13 | rm -rf dashboards_out alerts.yaml rules.yaml 14 | -------------------------------------------------------------------------------- /mixin/README.md: -------------------------------------------------------------------------------- 1 | # Grafana Render service Mixin 2 | 3 | To build, you need to have `mixtool` and `jsonnetfmt` installed. If you 4 | have a working Go development environment, it's easiest to run the following: 5 | 6 | ```bash 7 | $ go get github.com/monitoring-mixins/mixtool/cmd/mixtool 8 | $ go get github.com/google/go-jsonnet/cmd/jsonnetfmt 9 | ``` 10 | 11 | You can then build the Prometheus rules files `alerts.yaml` and 12 | `rules.yaml` and a directory `dashboard_out` with the JSON dashboard files 13 | for Grafana render service: 14 | 15 | ```bash 16 | $ make build 17 | ``` 18 | 19 | For more advanced uses of mixins, see 20 | https://github.com/monitoring-mixins/docs. 21 | -------------------------------------------------------------------------------- /mixin/alerts/alerts.yaml: -------------------------------------------------------------------------------- 1 | groups: 2 | - name: GrafanaImageRendererAlerts 3 | rules: 4 | - alert: GrafanaImageRendererAlertsRequestsFailing 5 | for: 5m 6 | expr: | 7 | (100 * namespace_job_statuscode:grafana_image_renderer_service_http_request_duration_seconds_count:rate5m{status_code=~"5.."} 8 | / 9 | namespace_job_statuscode:grafana_image_renderer_service_http_request_duration_seconds_count:rate5m) > 5 10 | labels: 11 | severity: 'warning' 12 | annotations: 13 | message: "'{{ $labels.namespace }}' / '{{ $labels.job }}' is experiencing {{ $value | humanize }}% errors" 14 | -------------------------------------------------------------------------------- /mixin/mixin.libsonnet: -------------------------------------------------------------------------------- 1 | { 2 | grafanaDashboards: { 3 | 'image-render-service.json': (import 'dashboards/image-render-service.json'), 4 | }, 5 | 6 | // Helper function to ensure that we don't override other rules, by forcing 7 | // the patching of the groups list, and not the overall rules object. 8 | local importRules(rules) = { 9 | groups+: std.native('parseYaml')(rules)[0].groups, 10 | }, 11 | 12 | prometheusRules+: importRules(importstr 'rules/rules.yaml'), 13 | 14 | prometheusAlerts+: importRules(importstr 'alerts/alerts.yaml'), 15 | } 16 | -------------------------------------------------------------------------------- /mixin/rules/rules.yaml: -------------------------------------------------------------------------------- 1 | groups: 2 | - name: grafana_rules 3 | rules: 4 | # Sum HTTP requests by status code but ignore ephemeral labels like pod and instance 5 | - record: namespace_job_statuscode:grafana_image_renderer_service_http_request_duration_seconds_count:rate5m 6 | expr: | 7 | sum without(instance, pod) (rate(grafana_image_renderer_service_http_request_duration_seconds_count[5m])) 8 | -------------------------------------------------------------------------------- /mixin/scripts/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eo pipefail 3 | 4 | cd "$(dirname "$0")"/.. 5 | 6 | mixtool generate all mixin.libsonnet 7 | -------------------------------------------------------------------------------- /mixin/scripts/common.sh: -------------------------------------------------------------------------------- 1 | JSONNET_FMT="jsonnetfmt -n 2 --max-blank-lines 2 --string-style s --comment-style s" 2 | -------------------------------------------------------------------------------- /mixin/scripts/format.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eo pipefail 3 | 4 | cd "$(dirname "$0")"/.. 5 | 6 | . scripts/common.sh 7 | 8 | find . -name 'vendor' -prune -o -name '*.libsonnet' -print -o -name '*.jsonnet' -print | \ 9 | xargs -n 1 -- ${JSONNET_FMT} -i 10 | -------------------------------------------------------------------------------- /mixin/scripts/lint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eo pipefail 3 | 4 | cd "$(dirname "$0")"/.. 5 | 6 | . scripts/common.sh 7 | 8 | find . -name 'vendor' -prune -o -name '*.libsonnet' -print -o -name '*.jsonnet' -print | \ 9 | while read f; do \ 10 | ${JSONNET_FMT} "$f" | diff -u "$f" -; \ 11 | done 12 | 13 | mixtool lint mixin.libsonnet 14 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "renderer", 3 | "version": "1.0.0", 4 | "author": "Grafana Labs", 5 | "license": "Apache-2.0", 6 | "repository": { 7 | "type": "git", 8 | "url": "https://github.com/grafana/grafana-image-renderer.git" 9 | }, 10 | "scripts": { 11 | "eslint": "eslint . --ext .ts", 12 | "typecheck": "tsc --noEmit", 13 | "prettier:check": "prettier --list-different \"**/*.ts\"", 14 | "prettier:write": "prettier --list-different \"**/*.ts\" --write", 15 | "precommit": "npm run eslint & npm run typecheck", 16 | "watch": "tsc-watch --onSuccess \"node build/app.js server --config=dev.json\"", 17 | "watch:debug": "tsc-watch --onSuccess \"cross-env DEBUG=puppeteer-cluster:* node build/app.js server --config=dev.json\"", 18 | "build": "tsc", 19 | "start": "node build/app.js server --config=dev.json", 20 | "create-gcom-plugin-json": "ts-node scripts/createGcomPluginJson.ts ./scripts/tmp", 21 | "push-to-gcom": "sh ./scripts/push-to-gcom.sh", 22 | "test-update": "cross-env UPDATE_GOLDEN=true jest", 23 | "test": "sh ./scripts/run_tests.sh", 24 | "test-ci": "jest", 25 | "test-diff": "cross-env SAVE_DIFF=true jest" 26 | }, 27 | "dependencies": { 28 | "@grpc/grpc-js": "^1.8.22", 29 | "@grpc/proto-loader": "^0.7.2", 30 | "@hapi/boom": "^10.0.0", 31 | "@opentelemetry/api": "^1.9.0", 32 | "@opentelemetry/auto-instrumentations-node": "^0.49.0", 33 | "@opentelemetry/exporter-trace-otlp-http": "^0.52.1", 34 | "@opentelemetry/resources": "^1.25.1", 35 | "@opentelemetry/sdk-node": "^0.52.1", 36 | "@opentelemetry/semantic-conventions": "^1.25.1", 37 | "@puppeteer/browsers": "^2.3.1", 38 | "chokidar": "^3.5.2", 39 | "dompurify": "^3.2.4", 40 | "express": "^4.21.1", 41 | "express-prom-bundle": "^6.5.0", 42 | "ioredis": "^5.6.1", 43 | "jimp": "^0.22.12", 44 | "jsdom": "20.0.0", 45 | "lodash": "^4.17.21", 46 | "minimist": "^1.2.6", 47 | "morgan": "^1.9.0", 48 | "multer": "^2.0.0", 49 | "on-finished": "^2.3.0", 50 | "poolpeteer": "^0.24.0", 51 | "prom-client": "^14.1.0", 52 | "puppeteer": "^22.8.2", 53 | "puppeteer-cluster": "^0.24.0", 54 | "rate-limiter-flexible": "^7.0.0", 55 | "unique-filename": "^2.0.1", 56 | "winston": "^3.8.2" 57 | }, 58 | "devDependencies": { 59 | "@grafana/eslint-config": "^6.0.0", 60 | "@types/dompurify": "^3.2.0", 61 | "@types/express": "^4.17.14", 62 | "@types/jest": "^29.5.12", 63 | "@types/jsdom": "20.0.0", 64 | "@types/multer": "^1.4.7", 65 | "@types/node": "^20.17.27", 66 | "@types/pixelmatch": "^5.2.6", 67 | "@types/supertest": "^2.0.15", 68 | "@typescript-eslint/eslint-plugin": "5.37.0", 69 | "@typescript-eslint/parser": "5.37.0", 70 | "@yao-pkg/pkg": "^6.3.0", 71 | "axios": "1.8.2", 72 | "cross-env": "7.0.3", 73 | "eslint": "8.23.1", 74 | "fast-png": "^6.2.0", 75 | "jest": "^29.7.0", 76 | "jsonwebtoken": "^9.0.2", 77 | "lint-staged": "13.0.3", 78 | "prettier": "2.7.1", 79 | "supertest": "^7.0.0", 80 | "ts-jest": "^29.1.1", 81 | "ts-node": "10.9.1", 82 | "tsc-watch": "5.0.3", 83 | "typescript": "4.8.3" 84 | }, 85 | "resolutions": { 86 | "@types/express": "^4.17.14", 87 | "xml2js": "^0.6.2" 88 | }, 89 | "lint-staged": { 90 | "*.ts": [ 91 | "prettier --write" 92 | ] 93 | }, 94 | "pkg": { 95 | "assets": "proto/*" 96 | }, 97 | "bin": "build/app.js", 98 | "engines": { 99 | "node": ">= 20" 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "renderer", 3 | "name": "Grafana Image Renderer", 4 | "id": "grafana-image-renderer", 5 | "backend": true, 6 | "info": { 7 | "description": "Grafana Backend Image Renderer that uses headless chrome to capture images.", 8 | "author": { 9 | "name": "Grafana Labs", 10 | "url": "https://grafana.com" 11 | }, 12 | "keywords": ["renderer", "png", "backend", "image"], 13 | "logos": { 14 | "small": "img/icon.png", 15 | "large": "img/icon.png" 16 | }, 17 | "links": [ 18 | { 19 | "name": "Project site", 20 | "url": "https://github.com/grafana/grafana-image-renderer" 21 | }, 22 | { 23 | "name": "Apache License", 24 | "url": "https://github.com/grafana/grafana-image-renderer/blob/master/LICENSE" 25 | } 26 | ], 27 | "version": "3.12.6", 28 | "updated": "2025-05-23" 29 | }, 30 | "dependencies": { 31 | "grafanaDependency": ">=8.3.11" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /plugin_start_darwin_amd64: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 3 | 4 | node ${DIR}/build/app.js 5 | -------------------------------------------------------------------------------- /plugin_start_linux_amd64: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 3 | 4 | node ${DIR}/build/app.js 5 | -------------------------------------------------------------------------------- /proto/health.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package grpc.health.v1; 4 | 5 | message HealthCheckRequest { 6 | string service = 1; 7 | } 8 | 9 | message HealthCheckResponse { 10 | enum ServingStatus { 11 | UNKNOWN = 0; 12 | SERVING = 1; 13 | NOT_SERVING = 2; 14 | } 15 | ServingStatus status = 1; 16 | } 17 | 18 | service Health { 19 | rpc Check(HealthCheckRequest) returns (HealthCheckResponse); 20 | } -------------------------------------------------------------------------------- /proto/pluginv2.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package pluginv2; 3 | 4 | option go_package = ".;pluginv2"; 5 | 6 | //----------------------------------------------- 7 | // Common 8 | //----------------------------------------------- 9 | 10 | message AppInstanceSettings { 11 | bytes jsonData = 3; 12 | map decryptedSecureJsonData = 4; 13 | int64 lastUpdatedMS = 5; 14 | } 15 | 16 | message DataSourceInstanceSettings { 17 | int64 id = 1; 18 | string name = 2; 19 | string url = 3; 20 | string user = 4; 21 | string database = 5; 22 | bool basicAuthEnabled = 6; 23 | string basicAuthUser = 7; 24 | bytes jsonData = 8; 25 | map decryptedSecureJsonData = 9; 26 | int64 lastUpdatedMS = 10; 27 | } 28 | 29 | message User { 30 | string login = 1; 31 | string name = 2; 32 | string email = 3; 33 | string role = 4; 34 | } 35 | 36 | message PluginContext { 37 | // The Grafana organization id the request originating from. 38 | int64 orgId = 1; 39 | 40 | // The unique identifier of the plugin the request originating from. 41 | string pluginId = 2; 42 | 43 | // The Grafana user the request originating from. 44 | // 45 | // Will not be provided if Grafana backend initiated the request. 46 | User user = 3; 47 | 48 | // App plugin instance settings is the configured app instance settings. 49 | // In Grafana an app instance is an enabled app plugin in a 50 | // Grafana organization. 51 | // 52 | // Will only be set if request targeting an app instance. 53 | AppInstanceSettings appInstanceSettings = 4; 54 | 55 | // Data source instance settings is the configured data source instance 56 | // settings. In Grafana a data source instance is a created data source 57 | // in a Grafana organization. 58 | // 59 | // Will only be set if request targeting a data source instance. 60 | DataSourceInstanceSettings dataSourceInstanceSettings = 5; 61 | } 62 | 63 | //--------------------------------------------------------- 64 | // Resource service enables HTTP-style requests over gRPC. 65 | //--------------------------------------------------------- 66 | 67 | service Resource { 68 | rpc CallResource(CallResourceRequest) returns (stream CallResourceResponse); 69 | } 70 | 71 | message StringList { 72 | repeated string values = 1; 73 | } 74 | 75 | message CallResourceRequest { 76 | PluginContext pluginContext = 1; 77 | string path = 2; 78 | string method = 3; 79 | string url = 4; 80 | map headers = 5; 81 | bytes body = 6; 82 | } 83 | 84 | message CallResourceResponse { 85 | int32 code = 1; 86 | map headers = 2; 87 | bytes body = 3; 88 | } 89 | 90 | //----------------------------------------------- 91 | // Data 92 | //----------------------------------------------- 93 | 94 | service Data { 95 | rpc QueryData(QueryDataRequest) returns (QueryDataResponse); 96 | } 97 | 98 | message TimeRange { 99 | int64 fromEpochMS = 1; 100 | int64 toEpochMS = 2; 101 | } 102 | 103 | message DataQuery { 104 | string refId = 1; 105 | int64 maxDataPoints = 2; 106 | int64 intervalMS = 3; 107 | TimeRange timeRange = 4; 108 | bytes json = 5; 109 | string queryType = 6; 110 | } 111 | 112 | // QueryDataRequest 113 | message QueryDataRequest { 114 | PluginContext pluginContext = 1; 115 | 116 | // Environment info 117 | map headers = 2; 118 | 119 | // List of data queries 120 | repeated DataQuery queries = 3; 121 | } 122 | 123 | message QueryDataResponse { 124 | // Map of refId to response 125 | map responses = 1; 126 | } 127 | 128 | message DataResponse { 129 | // Arrow encoded DataFrames 130 | // Frame has its own meta, warnings, and repeats refId 131 | repeated bytes frames = 1; 132 | string error = 2; 133 | bytes jsonMeta = 3; // Warning: Current ignored by frontend. Would be for metadata about the query. 134 | } 135 | 136 | //----------------------------------------------- 137 | // Diagnostics 138 | //----------------------------------------------- 139 | 140 | service Diagnostics { 141 | rpc CheckHealth(CheckHealthRequest) returns (CheckHealthResponse); 142 | rpc CollectMetrics(CollectMetricsRequest) returns (CollectMetricsResponse); 143 | } 144 | 145 | message CollectMetricsRequest { 146 | PluginContext pluginContext = 1; 147 | } 148 | 149 | message CollectMetricsResponse { 150 | message Payload { 151 | bytes prometheus = 1; 152 | } 153 | 154 | Payload metrics = 1; 155 | } 156 | 157 | message CheckHealthRequest { 158 | PluginContext pluginContext = 1; 159 | } 160 | 161 | message CheckHealthResponse { 162 | enum HealthStatus { 163 | UNKNOWN = 0; 164 | OK = 1; 165 | ERROR = 2; 166 | } 167 | 168 | HealthStatus status = 1; 169 | string message = 2; 170 | bytes jsonDetails = 3; 171 | } 172 | 173 | //----------------------------------------------- 174 | // Transform - Very experimental 175 | //----------------------------------------------- 176 | 177 | service Transform { 178 | rpc TransformData(QueryDataRequest) returns (QueryDataResponse); 179 | } 180 | 181 | service TransformDataCallBack { 182 | rpc QueryData(QueryDataRequest) returns (QueryDataResponse); 183 | } 184 | -------------------------------------------------------------------------------- /proto/rendererv2.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package pluginextensionv2; 3 | 4 | option go_package = ".;pluginextensionv2"; 5 | 6 | message StringList { 7 | repeated string values = 1; 8 | } 9 | 10 | message RenderRequest { 11 | string url = 1; 12 | int32 width = 2; 13 | int32 height = 3; 14 | float deviceScaleFactor = 4; 15 | string filePath = 5; 16 | string renderKey = 6; 17 | string domain = 7; 18 | int32 timeout = 8; 19 | string timezone = 9; 20 | map headers = 10; 21 | string authToken = 11; 22 | string encoding = 12; 23 | } 24 | 25 | message RenderResponse { 26 | string error = 1; 27 | } 28 | 29 | message RenderCSVRequest { 30 | string url = 1; 31 | string filePath = 2; 32 | string renderKey = 3; 33 | string domain = 4; 34 | int32 timeout = 5; 35 | string timezone = 6; 36 | map headers = 7; 37 | string authToken = 8; 38 | } 39 | 40 | message RenderCSVResponse { 41 | string error = 1; 42 | string fileName = 2; 43 | } 44 | 45 | service Renderer { 46 | rpc Render(RenderRequest) returns (RenderResponse); 47 | rpc RenderCSV(RenderCSVRequest) returns (RenderCSVResponse); 48 | } 49 | -------------------------------------------------------------------------------- /proto/sanitizer.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package pluginextensionv2; 3 | 4 | option go_package = ".;pluginextensionv2"; 5 | 6 | message SanitizeRequest { 7 | string filename = 1; 8 | bytes content = 2; 9 | string configType = 3; // DOMPurify, ... 10 | bytes config = 4; 11 | string authToken = 5; 12 | } 13 | 14 | message SanitizeResponse { 15 | string error = 1; 16 | bytes sanitized = 2; 17 | } 18 | 19 | service Sanitizer { 20 | rpc Sanitize(SanitizeRequest) returns (SanitizeResponse); 21 | } 22 | -------------------------------------------------------------------------------- /scripts/archive_target.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ARCH="${1:-}" 4 | 5 | if [ -z "$ARCH" ]; then 6 | echo "ARCH (arg 1) has to be set" 7 | exit 1 8 | fi 9 | 10 | OUT="${2:-plugin-${ARCH}}" 11 | 12 | apt-get update && apt-get install zip -y 13 | mkdir -p artifacts 14 | (cd dist && zip -yqr ../artifacts/${OUT}.zip ${OUT}) 15 | -------------------------------------------------------------------------------- /scripts/build_push_docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | TAG='' 6 | if [ "$1" = "master" ]; then 7 | TAG="master-$(git rev-parse --short HEAD)" 8 | else 9 | git fetch --tags 10 | TAG=$(git describe --tags --abbrev=0 | cut -d "v" -f 2) 11 | fi 12 | 13 | echo "building ${TAG}" 14 | echo "$DOCKER_PASS" | docker login -u "$DOCKER_USER" --password-stdin 15 | tags=("-t ${IMAGE_NAME}:${TAG}") 16 | if [ -z "$(echo $TAG | grep -E "beta|master")" ]; then 17 | tags+=("-t ${IMAGE_NAME}:latest") 18 | fi 19 | 20 | # The default Docker builder does not support multiple platforms, so this creates a non-default builder that does support multiple platforms. 21 | if ! docker buildx inspect | grep -E 'Driver:\s+docker-container' >/dev/null; then 22 | docker buildx create --use 23 | fi 24 | 25 | docker buildx build --platform linux/amd64,linux/arm64 --push ${tags[@]} . 26 | -------------------------------------------------------------------------------- /scripts/clean_target.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ARCH="${1:-}" 4 | OUT="${2:-}" 5 | 6 | if [ -z "$ARCH" ]; then 7 | echo "ARCH (arg 1) has to be set" 8 | exit 1 9 | fi 10 | 11 | PLUGIN_NAME=plugin-${ARCH} 12 | 13 | if [ ! -z "$OUT" ]; then 14 | PLUGIN_NAME=${OUT} 15 | fi 16 | 17 | rm -rf .dist/${PLUGIN_NAME} 18 | rm -f ./artifacts/${PLUGIN_NAME}.zip -------------------------------------------------------------------------------- /scripts/createGcomPluginJson.ts: -------------------------------------------------------------------------------- 1 | import path = require("path"); 2 | const fs = require('fs'); 3 | 4 | const outputFolder = process.argv[2] 5 | const commit = process.argv[3]; 6 | 7 | if (!outputFolder) { 8 | throw new Error('expected output folder as the first arg') 9 | } 10 | 11 | if (!commit) { 12 | throw new Error(`usage: 'yarn run create-gcom-plugin-json '`); 13 | } 14 | 15 | const outputPath = path.join(outputFolder, 'plugin.json') 16 | const rootPluginJsonPath = path.resolve('./plugin.json'); 17 | 18 | 19 | enum PluginVersion { 20 | 'darwin-amd64' = 'darwin-amd64', 21 | 'linux-amd64' = 'linux-amd64', 22 | 'windows-amd64' = 'windows-amd64', 23 | } 24 | 25 | type PluginJson = { 26 | url: string; 27 | commit: string; 28 | download: Record; 29 | }; 30 | 31 | const pluginVersionToFileName: Record = { 32 | [PluginVersion['darwin-amd64']]: 'plugin-darwin-x64-unknown.zip', 33 | [PluginVersion['linux-amd64']]: 'plugin-linux-x64-glibc.zip', 34 | [PluginVersion['windows-amd64']]: 'plugin-win32-x64-unknown.zip', 35 | }; 36 | 37 | const baseUrl = `https://github.com/grafana/grafana-image-renderer`; 38 | const fileUrl = (release, fileName) => `${baseUrl}/releases/download/${release}/${fileName}`; 39 | 40 | const axios = require('axios'); 41 | 42 | const getFileNamesToChecksumMap = async (releaseVersion: string): Promise> => { 43 | const res = await axios.get(fileUrl(releaseVersion, 'md5sums.txt')); 44 | 45 | if (typeof res.data !== 'string') { 46 | throw new Error('expected checksum data to be string'); 47 | } 48 | 49 | const text = res.data as string; 50 | 51 | return text 52 | .split('\n') 53 | .map((l) => l.replaceAll(/\s+/g, ' ').split(' ')) 54 | .filter((arr) => arr.length === 2) 55 | .reduce((acc, [checksum, artifact]) => { 56 | const artifactPrefix = 'artifacts/'; 57 | if (artifact.startsWith(artifactPrefix)) { 58 | return { ...acc, [artifact.substring(artifactPrefix.length)]: checksum }; 59 | } else { 60 | throw new Error(`expected artifact name to start with "artifact/". actual: ${artifact}`); 61 | } 62 | }, {}); 63 | }; 64 | 65 | const verifyChecksums = (map: Record) => { 66 | const expectedFileNames = Object.values(pluginVersionToFileName); 67 | const fileNamesInChecksumMap = Object.keys(map); 68 | for (const expectedFileName of expectedFileNames) { 69 | if (!fileNamesInChecksumMap.includes(expectedFileName)) { 70 | throw new Error(`expected to find ${expectedFileName} in the checksum map. actual: [${fileNamesInChecksumMap.join(', ')}]`); 71 | } 72 | } 73 | }; 74 | 75 | const getReleaseVersion = (): string => { 76 | const rootPluginJson = JSON.parse(fs.readFileSync(rootPluginJsonPath)); 77 | const version = rootPluginJson?.info?.version; 78 | 79 | if (!version || typeof version !== 'string' || !version.length) { 80 | throw new Error(`expected to find value for "info.version" in root plugin.json (${rootPluginJsonPath})`); 81 | } 82 | return `v${version}`; 83 | }; 84 | 85 | const createGcomPluginJson = (map: Record, releaseVersion: string): PluginJson => ({ 86 | url: baseUrl, 87 | commit: commit, 88 | download: Object.values(PluginVersion) 89 | .map((ver) => { 90 | const fileName = pluginVersionToFileName[ver]; 91 | const md5 = map[fileName]; 92 | if (!md5 || !md5.length) { 93 | throw new Error(`expected non-empty md5 checksum for plugin version ${ver} with filename ${fileName}`); 94 | } 95 | 96 | return { [ver]: { md5, url: fileUrl(releaseVersion, fileName) } }; 97 | }) 98 | .reduce((acc, next) => ({ ...acc, ...next }), {}) as PluginJson['download'], 99 | }); 100 | 101 | const run = async () => { 102 | const releaseVersion = getReleaseVersion(); 103 | console.log(`Creating gcom plugin json with version ${releaseVersion} and commit ${commit}`); 104 | 105 | const artifactsToChecksumMap = await getFileNamesToChecksumMap(releaseVersion); 106 | verifyChecksums(artifactsToChecksumMap); 107 | 108 | console.log(`Fetched artifact checksums ${JSON.stringify(artifactsToChecksumMap, null, 2)}`); 109 | 110 | const pluginJson = createGcomPluginJson(artifactsToChecksumMap, releaseVersion); 111 | if (!fs.existsSync(outputFolder)) { 112 | fs.mkdirSync(outputFolder) 113 | } 114 | fs.writeFileSync(outputPath, JSON.stringify(pluginJson, null, 2)); 115 | 116 | console.log(`Done! Path: ${path.resolve(outputPath)}`) 117 | }; 118 | 119 | run(); 120 | -------------------------------------------------------------------------------- /scripts/download_chrome.js: -------------------------------------------------------------------------------- 1 | const { BrowserPlatform, Browser, install, resolveBuildId } = require('@puppeteer/browsers'); 2 | const fs = require('fs') 3 | const path = require('path'); 4 | 5 | const archArg = process.argv[2]; 6 | let [ 7 | // Should be one of linux, mac, win32, win64 as per options in BrowserFetcher but we reuse the same arch string 8 | // as for grpc download (ie darwin-x64-unknown) so we need to transform it a bit 9 | platform, 10 | arch, 11 | ] = archArg.split('-'); 12 | 13 | if (platform === 'win32' && arch === 'x64') { 14 | platform = BrowserPlatform.WIN64; 15 | } 16 | 17 | if (platform === 'darwin') { 18 | if (arch === 'arm64') { 19 | platform = BrowserPlatform.MAC_ARM; 20 | } else { 21 | platform = BrowserPlatform.MAC; 22 | } 23 | } 24 | 25 | const outputPath = path.resolve(process.cwd(), 'dist', process.argv[3] || `plugin-${archArg}`); 26 | 27 | const browserVersion = Browser.CHROMEHEADLESSSHELL; 28 | 29 | async function download() { 30 | const buildId = await resolveBuildId(browserVersion, platform, 'latest'); 31 | console.log(`Installing ${browserVersion} into ${outputPath}`); 32 | return install({ 33 | baseUrl: 'https://storage.googleapis.com/chrome-for-testing-public', 34 | cacheDir: outputPath, 35 | browser: browserVersion, 36 | platform, 37 | buildId, 38 | }); 39 | } 40 | 41 | download().then(browser => { 42 | console.log(`${browserVersion} downloaded into:`, outputPath); 43 | 44 | const chromeInfo = { buildId: browser.buildId }; 45 | return fs.writeFileSync(path.resolve(outputPath, 'chrome-info.json'), JSON.stringify(chromeInfo)); 46 | }); 47 | -------------------------------------------------------------------------------- /scripts/drone/common.star: -------------------------------------------------------------------------------- 1 | load('scripts/drone/utils.star', 'ci_image') 2 | load('scripts/drone/vault.star', 'from_secret') 3 | 4 | def install_deps_step(): 5 | return { 6 | 'name': 'yarn-install', 7 | 'image': ci_image, 8 | 'commands': [ 9 | '. ~/.init-nvm.sh', 10 | 'yarn install --frozen-lockfile --no-progress', 11 | ], 12 | 'depends_on': [ 13 | 'grabpl', 14 | ], 15 | 'environment': { 16 | 'PUPPETEER_CACHE_DIR': '/drone/src/cache', 17 | }, 18 | } 19 | 20 | def build_step(): 21 | return { 22 | 'name': 'yarn-build', 23 | 'image': ci_image, 24 | 'commands': [ 25 | '. ~/.init-nvm.sh', 26 | 'yarn build', 27 | ], 28 | 'depends_on': [ 29 | 'yarn-install', 30 | ], 31 | } 32 | 33 | def package_step(arch, name='', skip_chromium=False, override_output='', skip_errors=True): 34 | pkg_cmd = './scripts/package_target.sh {}'.format(arch) 35 | bpm_cmd = 'bin/grabpl build-plugin-manifest ./dist/' 36 | arc_cmd = './scripts/archive_target.sh {}'.format(arch) 37 | 38 | if skip_chromium: 39 | pkg_cmd += ' true {}'.format(override_output) 40 | bpm_cmd += '{}'.format(override_output) 41 | arc_cmd += ' {}'.format(override_output) 42 | else: 43 | bpm_cmd += 'plugin-{}'.format(arch) 44 | 45 | if skip_errors: 46 | bpm_cmd += ' || true' 47 | 48 | if name == '': 49 | name = 'package-{}'.format(arch) 50 | 51 | step = { 52 | 'name': name, 53 | 'image': ci_image, 54 | 'commands': [ 55 | '. ~/.init-nvm.sh', 56 | pkg_cmd, 57 | bpm_cmd, 58 | arc_cmd, 59 | ], 60 | 'depends_on': ['yarn-test'], 61 | 'environment': { 62 | 'GRAFANA_API_KEY': from_secret('grafana_api_key'), 63 | } 64 | } 65 | 66 | return step 67 | 68 | def security_scan_step(): 69 | return { 70 | 'name': 'security-scan', 71 | 'image': ci_image, 72 | 'commands': [ 73 | '. ~/.init-nvm.sh', 74 | 'echo "Starting veracode scan..."', 75 | '# Increase heap size or the scanner will die.', 76 | 'export _JAVA_OPTIONS=-Xmx4g', 77 | 'mkdir -p ci/jobs/security_scan', 78 | 'curl -sSL https://download.sourceclear.com/ci.sh | sh -s scan --skip-compile --quick --allow-dirty', 79 | ], 80 | 'depends_on': ['yarn-build'], 81 | 'environment': { 82 | 'SRCCLR_API_TOKEN': from_secret('srcclr_api_token'), 83 | }, 84 | 'failure': 'ignore', 85 | } 86 | 87 | def e2e_services(): 88 | return [{ 89 | 'name': 'grafana', 90 | 'image': 'grafana/grafana-enterprise:latest', 91 | 'environment': { 92 | 'GF_FEATURE_TOGGLES_ENABLE': 'renderAuthJWT', 93 | 'GF_PATHS_PROVISIONING': '/drone/src/scripts/drone/provisioning', 94 | }, 95 | }] 96 | 97 | def e2e_setup_step(): 98 | return { 99 | 'name': 'wait-for-grafana', 100 | 'image': 'jwilder/dockerize:0.6.1', 101 | 'commands': [ 102 | 'dockerize -wait http://grafana:3000 -timeout 120s', 103 | ] 104 | } 105 | 106 | def tests_step(): 107 | return { 108 | 'name': 'yarn-test', 109 | 'image': 'us-docker.pkg.dev/grafanalabs-dev/grafana-ci/docker-puppeteer:3.0.0', 110 | 'depends_on': ['wait-for-grafana', 'yarn-build'], 111 | 'commands': [ 112 | 'yarn test-ci', 113 | ], 114 | 'environment': { 115 | 'PUPPETEER_CACHE_DIR': '/drone/src/cache', 116 | 'CI': 'true', 117 | }, 118 | } -------------------------------------------------------------------------------- /scripts/drone/grabpl.star: -------------------------------------------------------------------------------- 1 | grabpl_version = 'v3.0.20' 2 | curl_image = 'byrnedo/alpine-curl:0.1.8' 3 | wix_image = 'grafana/ci-wix:0.1.1' 4 | 5 | def download_grabpl_step(platform="linux"): 6 | if platform == 'windows': 7 | return { 8 | 'name': 'grabpl', 9 | 'image': wix_image, 10 | 'commands': [ 11 | '$$ProgressPreference = "SilentlyContinue"', 12 | 'Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/{}/windows/grabpl.exe -OutFile grabpl.exe'.format( 13 | grabpl_version 14 | ), 15 | ], 16 | } 17 | 18 | return { 19 | 'name': 'grabpl', 20 | 'image': curl_image, 21 | 'commands': [ 22 | 'mkdir -p bin', 23 | 'curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/{}/grabpl'.format( 24 | grabpl_version 25 | ), 26 | 'chmod +x bin/grabpl', 27 | ], 28 | } 29 | -------------------------------------------------------------------------------- /scripts/drone/pipeline.star: -------------------------------------------------------------------------------- 1 | load('scripts/drone/utils.star', 'pipeline') 2 | load('scripts/drone/grabpl.star', 'download_grabpl_step') 3 | load('scripts/drone/common.star', 'install_deps_step', 'build_step', 'package_step', 'security_scan_step', 'e2e_setup_step', 'e2e_services', 'tests_step') 4 | load('scripts/drone/promotion.star', 'publish_to_docker_master', 'publish_to_docker_release', 'publish_gh_release', 'publish_to_gcom') 5 | 6 | def common_steps(skip_errors): 7 | return [ 8 | download_grabpl_step(), 9 | install_deps_step(), 10 | build_step(), 11 | e2e_setup_step(), 12 | tests_step(), 13 | security_scan_step(), 14 | package_step(arch='linux-x64-glibc', skip_errors=skip_errors), 15 | package_step(arch='darwin-x64-unknown', skip_errors=skip_errors), 16 | package_step(arch='win32-x64-unknown', skip_errors=skip_errors), 17 | package_step(arch='linux-x64-glibc', name='package-linux-x64-glibc-no-chromium', skip_chromium=True, override_output='plugin-linux-x64-glibc-no-chromium', skip_errors=skip_errors), 18 | package_step(arch='alpine-x64-unknown', name='package-alpine-x64-no-chromium', skip_chromium=True, override_output='plugin-alpine-x64-no-chromium', skip_errors=skip_errors), 19 | ] 20 | 21 | def prs_pipeline(): 22 | return [ 23 | pipeline( 24 | name='test-pr', 25 | trigger={ 26 | 'event': ['pull_request'], 27 | }, 28 | steps=common_steps(True), 29 | services=e2e_services(), 30 | ), 31 | ] 32 | 33 | def master_pipeline(): 34 | steps = common_steps(False) + [ 35 | publish_to_docker_master(), 36 | ] 37 | 38 | return [ 39 | pipeline( 40 | name='test-master', 41 | trigger={ 42 | 'branch': ['master'], 43 | 'event': ['push'], 44 | }, 45 | steps=steps, 46 | services=e2e_services(), 47 | ) 48 | ] 49 | 50 | def promotion_pipeline(): 51 | trigger = { 52 | 'branch': ['master'], 53 | 'event': ['promote'], 54 | 'target': ['release'], 55 | } 56 | 57 | steps = common_steps(False) + [ 58 | publish_gh_release(), 59 | publish_to_docker_release(), 60 | publish_to_gcom(), 61 | ] 62 | 63 | return [ 64 | pipeline( 65 | name='release', 66 | trigger=trigger, 67 | steps=steps, 68 | services=e2e_services(), 69 | ) 70 | ] 71 | -------------------------------------------------------------------------------- /scripts/drone/promotion.star: -------------------------------------------------------------------------------- 1 | load('scripts/drone/utils.star', 'docker_image', 'ci_image') 2 | load('scripts/drone/vault.star', 'from_secret') 3 | 4 | def publish_gh_release(): 5 | return { 6 | 'name': 'publish_to_github', 7 | 'image': 'cibuilds/github:0.13.0', 8 | 'commands': [ 9 | './scripts/generate_md5sum.sh', 10 | '. ./scripts/get_gh_token.sh', 11 | './scripts/publish_github_release.sh', 12 | ], 13 | 'environment': { 14 | # These are passed as secrets for security 15 | 'GITHUB_APP_ID': from_secret('github_app_id'), 16 | 'GITHUB_APP_PRIVATE_KEY': from_secret('github_app_private_key'), 17 | 'GITHUB_INSTALLATION_ID': from_secret('github_app_installation_id'), 18 | }, 19 | 'depends_on': [ 20 | 'package-linux-x64-glibc', 21 | 'package-darwin-x64-unknown', 22 | 'package-win32-x64-unknown', 23 | 'package-linux-x64-glibc-no-chromium', 24 | 'package-alpine-x64-no-chromium', 25 | ], 26 | } 27 | 28 | def publish_to_docker_master(): 29 | step = publish_to_docker() 30 | step['name'] += '_master' 31 | step['commands'][0] += ' master' 32 | return step 33 | 34 | def publish_to_docker_release(): 35 | step = publish_to_docker() 36 | step['depends_on'] = ['publish_to_github'] 37 | return step 38 | 39 | def publish_to_docker(): 40 | return { 41 | 'name': 'publish_to_docker', 42 | 'image': 'google/cloud-sdk:449.0.0', 43 | 'environment': { 44 | 'IMAGE_NAME': docker_image, 45 | 'DOCKER_USER': from_secret('docker_username'), 46 | 'DOCKER_PASS': from_secret('docker_password'), 47 | }, 48 | 'commands': ['./scripts/build_push_docker.sh'], 49 | 'volumes': [{'name': 'docker', 'path': '/var/run/docker.sock'}], 50 | 'depends_on': ['yarn-test'], 51 | } 52 | 53 | def publish_to_gcom(): 54 | return { 55 | 'name': 'publish_to_gcom', 56 | 'image': ci_image, 57 | 'commands': [ 58 | '. ~/.init-nvm.sh', 59 | 'yarn run create-gcom-plugin-json ${DRONE_COMMIT}', 60 | 'yarn run push-to-gcom', 61 | ], 62 | 'environment': { 63 | 'GCOM_URL': from_secret('gcom_url'), 64 | 'GCOM_UAGENT': from_secret('gcom_uagent'), 65 | 'GCOM_PUBLISH_TOKEN': from_secret('gcom_publish_token'), 66 | }, 67 | 'depends_on': ['publish_to_github'], 68 | } 69 | -------------------------------------------------------------------------------- /scripts/drone/provisioning/dashboards/dashboards.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | providers: 4 | - name: 'Tests' 5 | folder: 'tests' 6 | folderUid: '' 7 | type: file 8 | allowUiUpdates: false 9 | updateIntervalSeconds: 60 10 | options: 11 | path: /drone/src/scripts/drone/provisioning/dashboards 12 | -------------------------------------------------------------------------------- /scripts/drone/provisioning/datasources/datasources.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: 1 3 | 4 | datasources: 5 | - name: TestData 6 | type: testdata 7 | uid: b5c1e67d-b771-4e21-a81a-f80f0d1885a7 8 | editable: false -------------------------------------------------------------------------------- /scripts/drone/utils.star: -------------------------------------------------------------------------------- 1 | load('scripts/drone/vault.star', 'gcr_pull_secret', 'gar_pull_secret') 2 | 3 | ci_image = 'grafana/grafana-plugin-ci:1.9.0' 4 | docker_image = 'grafana/grafana-image-renderer' 5 | publisher_image = 'grafana/integration-grafana-publisher:v9' 6 | 7 | def pipeline( 8 | name, 9 | trigger, 10 | steps, 11 | services=[], 12 | platform='linux', 13 | depends_on=[], 14 | environment=None, 15 | volumes=[], 16 | ): 17 | if platform != 'windows': 18 | platform_conf = { 19 | 'platform': {'os': 'linux', 'arch': 'amd64'}, 20 | # A shared cache is used on the host 21 | # To avoid issues with parallel builds, we run this repo on single build agents 22 | 'node': {'type': 'no-parallel'}, 23 | } 24 | else: 25 | platform_conf = { 26 | 'platform': { 27 | 'os': 'windows', 28 | 'arch': 'amd64', 29 | 'version': '1809', 30 | } 31 | } 32 | 33 | pipeline = { 34 | 'kind': 'pipeline', 35 | 'type': 'docker', 36 | 'name': name, 37 | 'trigger': trigger, 38 | 'services': services, 39 | 'steps': steps, 40 | 'clone': { 41 | 'retries': 3, 42 | }, 43 | 'volumes': [ 44 | { 45 | 'name': 'docker', 46 | 'host': { 47 | 'path': '/var/run/docker.sock', 48 | }, 49 | } 50 | ], 51 | 'depends_on': depends_on, 52 | 'image_pull_secrets': [gcr_pull_secret, gar_pull_secret], 53 | } 54 | if environment: 55 | pipeline.update( 56 | { 57 | 'environment': environment, 58 | } 59 | ) 60 | 61 | pipeline['volumes'].extend(volumes) 62 | pipeline.update(platform_conf) 63 | 64 | return pipeline 65 | -------------------------------------------------------------------------------- /scripts/drone/vault.star: -------------------------------------------------------------------------------- 1 | gcr_pull_secret = "gcr" 2 | gar_pull_secret = "gar" 3 | 4 | def from_secret(secret): 5 | return {'from_secret': secret} 6 | 7 | def vault_secret(name, path, key): 8 | return { 9 | 'kind': 'secret', 10 | 'name': name, 11 | 'get': { 12 | 'path': path, 13 | 'name': key, 14 | }, 15 | } 16 | 17 | def secrets(): 18 | return [ 19 | vault_secret(gcr_pull_secret, 'secret/data/common/gcr', '.dockerconfigjson'), 20 | vault_secret('github_app_id', 'ci/data/repo/grafana/grafana-image-renderer/github-app', 'app-id'), 21 | vault_secret('github_app_private_key', 'ci/data/repo/grafana/grafana-image-renderer/github-app', 'private-key'), 22 | vault_secret('github_app_installation_id', 'ci/data/repo/grafana/grafana-image-renderer/github-app', 'app-installation-id'), 23 | vault_secret('gcom_publish_token', 'infra/data/ci/drone-plugins', 'gcom_publish_token'), 24 | vault_secret('grafana_api_key', 'infra/data/ci/drone-plugins', 'grafana_api_key'), 25 | vault_secret('srcclr_api_token', 'infra/data/ci/drone-plugins', 'srcclr_api_token'), 26 | vault_secret(gar_pull_secret, 'secret/data/common/gar', '.dockerconfigjson'), 27 | vault_secret('docker_username', 'ci/data/common/dockerhub', 'username'), 28 | vault_secret('docker_password', 'ci/data/common/dockerhub', 'password'), 29 | ] 30 | -------------------------------------------------------------------------------- /scripts/generate_md5sum.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | for archive in artifacts/*.zip; do 4 | md5sum $archive >> artifacts/md5sums.txt 5 | done 6 | -------------------------------------------------------------------------------- /scripts/get_gh_token.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # Ensure necessary tools are installed 6 | apk add --no-cache openssl curl jq 7 | 8 | # Write the private key to a file 9 | echo "$GITHUB_APP_PRIVATE_KEY" > private-key.pem 10 | chmod 600 private-key.pem 11 | 12 | # Generate the JWT 13 | NOW=$(date +%s) 14 | EXPIRATION=$(($NOW + 600)) 15 | HEADER=$(printf '{"alg":"RS256","typ":"JWT"}' | openssl base64 -A | tr '+/' '-_' | tr -d '=') 16 | PAYLOAD=$(printf '{"iat":%d,"exp":%d,"iss":"%s"}' $NOW $EXPIRATION $GITHUB_APP_ID | openssl base64 -A | tr '+/' '-_' | tr -d '=') 17 | HEADER_PAYLOAD="$HEADER.$PAYLOAD" 18 | SIGNATURE=$(echo -n "$HEADER_PAYLOAD" | openssl dgst -sha256 -sign ./private-key.pem | openssl base64 -A | tr '+/' '-_' | tr -d '=') 19 | JWT="$HEADER_PAYLOAD.$SIGNATURE" 20 | 21 | # Request the installation access token 22 | RESPONSE=$(curl -s -X POST \ 23 | -H "Authorization: Bearer $JWT" \ 24 | -H "Accept: application/vnd.github+json" \ 25 | https://api.github.com/app/installations/$GITHUB_INSTALLATION_ID/access_tokens) 26 | 27 | # Extract the token from the response 28 | GITHUB_TOKEN=$(echo $RESPONSE | jq -r '.token') 29 | 30 | # Export the token for use in subsequent commands 31 | export GITHUB_TOKEN 32 | -------------------------------------------------------------------------------- /scripts/package_target.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ARCH="${1:-}" 4 | SKIP_CHROMIUM=${2:-false} 5 | OUT="${3:-}" 6 | 7 | if [ -z "$ARCH" ]; then 8 | echo "ARCH (arg 1) has to be set" 9 | exit 1 10 | fi 11 | 12 | mkdir -p dist 13 | node scripts/pkg.js ${ARCH} ${OUT} 14 | if [ $? != 0 ]; then 15 | echo "${?}\n". 1>&2 && exit 1 16 | fi 17 | 18 | if [ ${SKIP_CHROMIUM} = false ]; then 19 | node scripts/download_chrome.js ${ARCH} ${OUT} 20 | else 21 | echo "Skipping chrome download" 22 | fi 23 | 24 | if [ $? != 0 ]; then 25 | echo "${?}\n". 1>&2 && exit 1 26 | fi 27 | 28 | node scripts/rename_executable.js ${ARCH} ${OUT} 29 | 30 | if [ $? != 0 ]; then 31 | echo "${?}\n". 1>&2 && exit 1 32 | fi 33 | 34 | COPY_PATH=dist/plugin-${ARCH}/ 35 | 36 | if [ ! -z "$OUT" ]; then 37 | COPY_PATH=dist/${OUT} 38 | fi 39 | 40 | cp plugin.json ${COPY_PATH} 41 | cp README.md ${COPY_PATH} 42 | cp CHANGELOG.md ${COPY_PATH} 43 | cp LICENSE ${COPY_PATH} 44 | cp -r img ${COPY_PATH} 45 | -------------------------------------------------------------------------------- /scripts/pkg.js: -------------------------------------------------------------------------------- 1 | const childProcess = require('child_process'); 2 | const fs = require('fs') 3 | 4 | const archArg = process.argv[2]; 5 | let [ 6 | // linux, darwin, win32 7 | platform, 8 | // ia32, x64, arm, arm64 9 | arch, 10 | ] = archArg.split('-'); 11 | 12 | const platformTransform = { 13 | darwin: 'macos', 14 | win32: 'win', 15 | }; 16 | 17 | const archTransform = { 18 | ia32: 'x84', 19 | arm: 'armv6', 20 | // I only assume this is correct 21 | arm64: 'armv6', 22 | }; 23 | 24 | platform = platformTransform[platform] || platform; 25 | arch = archTransform[arch] || arch; 26 | 27 | if(platform === 'macos' && (arch.includes('arm'))) { 28 | arch = 'arm64' 29 | } 30 | 31 | const outputPath = "dist/" + (process.argv[3] || `plugin-${archArg}`); 32 | const outputNodeModules = `${outputPath}/node_modules` 33 | 34 | childProcess.execSync(`"./node_modules/.bin/pkg" -t node20-${platform}-${arch} . --out-path ${outputPath} --no-native-build`, {stdio: 'inherit'}); 35 | 36 | childProcess.execSync(`rm -rf ${outputNodeModules}`) 37 | -------------------------------------------------------------------------------- /scripts/publish_github_release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | git fetch --tags 6 | 7 | RELEASE_NOTES=$(awk 'BEGIN {FS="##"; RS=""} FNR==1 {print; exit}' CHANGELOG.md) 8 | VERSION=$(cat plugin.json|jq '.info.version'| sed s/\"//g) 9 | PRERELEASE='' 10 | LATEST_TAG=$(git describe --tags --abbrev=0) 11 | 12 | if [ v"${VERSION}" == "${LATEST_TAG}" ]; then 13 | echo "Tag ${LATEST_TAG} have already been pushed. Exiting..." 14 | exit 1 15 | fi 16 | 17 | if [[ $VERSION == *"beta"* ]]; then 18 | PRERELEASE='-prerelease' 19 | fi 20 | 21 | git config user.email "eng@grafana.com" 22 | git config user.name "Drone Automation" 23 | 24 | echo "Pushing tag v${VERSION}..." 25 | git tag v"${VERSION}" 26 | git push "https://${GITHUB_APP_ID}:${GITHUB_TOKEN}@github.com/grafana/grafana-image-renderer.git" v"${VERSION}" 27 | 28 | echo "Pushing multiple artifacts to release v${VERSION}..." 29 | ghr \ 30 | -u "${DRONE_REPO_OWNER}" \ 31 | -r "${DRONE_REPO_NAME}" \ 32 | -c "${DRONE_COMMIT_SHA}" \ 33 | -n "v${VERSION}" \ 34 | -b "${RELEASE_NOTES}" \ 35 | ${PRERELEASE} v"${VERSION}" ./artifacts/ 36 | -------------------------------------------------------------------------------- /scripts/push-to-gcom.sh: -------------------------------------------------------------------------------- 1 | #!/bin/zsh 2 | 3 | JSON=$(cat ./scripts/tmp/plugin.json) 4 | 5 | echo $JSON 6 | echo "Pushing..." 7 | 8 | curl -s -H "User-Agent: $GCOM_UAGENT" -H "Authorization: Bearer $GCOM_PUBLISH_TOKEN" "$GCOM_URL/plugins" -X POST -H "Content-Type: application/json" -d "$JSON" 9 | -------------------------------------------------------------------------------- /scripts/rename_executable.js: -------------------------------------------------------------------------------- 1 | const childProcess = require('child_process'); 2 | 3 | const archArg = process.argv[2]; 4 | 5 | let [ 6 | // linux, darwin, win32 7 | platform, 8 | // ia32, x64, arm, arm64 9 | arch, 10 | ] = archArg.split('-'); 11 | 12 | const platformTransform = { 13 | win32: 'windows', 14 | alpine: 'linux', 15 | }; 16 | 17 | const archTransform = { 18 | x64: 'amd64', 19 | ia32: '386' 20 | }; 21 | 22 | let ext = platform === 'win32' ? '.exe' : ''; 23 | const outputPath = "dist/" + (process.argv[3] || `plugin-${archArg}`); 24 | 25 | const execFileName = `plugin_start_${platformTransform[platform] || platform}_${archTransform[arch] || arch}${ext}`; 26 | childProcess.execSync(`mv ${outputPath}/renderer${ext} ${outputPath}/${execFileName}`, {stdio: 'inherit'}); 27 | 28 | -------------------------------------------------------------------------------- /scripts/run_tests.sh: -------------------------------------------------------------------------------- 1 | # Install / update dependencies 2 | yarn install --frozen-lockfile 3 | 4 | # Start Grafana 5 | docker compose -f ./devenv/docker/test/docker-compose.yaml up -d 6 | 7 | # Start testing 8 | yarn jest 9 | -------------------------------------------------------------------------------- /src/app.ts: -------------------------------------------------------------------------------- 1 | import { startTracing } from './tracing'; 2 | import * as path from 'path'; 3 | import * as _ from 'lodash'; 4 | import * as fs from 'fs'; 5 | import { Browser, computeExecutablePath } from '@puppeteer/browsers'; 6 | import { RenderGRPCPluginV2 } from './plugin/v2/grpc_plugin'; 7 | import { HttpServer } from './service/http-server'; 8 | import { populateServiceConfigFromEnv, ServiceConfig, defaultServiceConfig } from './service/config'; 9 | import { populatePluginConfigFromEnv, PluginConfig, defaultPluginConfig } from './plugin/v2/config'; 10 | import { ConsoleLogger, PluginLogger } from './logger'; 11 | import * as minimist from 'minimist'; 12 | import { serve } from './node-plugin'; 13 | import { createSanitizer } from './sanitizer/Sanitizer'; 14 | import { getConfig } from './config/config'; 15 | 16 | async function main() { 17 | const argv = minimist(process.argv.slice(2)); 18 | const env = Object.assign({}, process.env); 19 | const command = argv._[0]; 20 | 21 | if (command === undefined) { 22 | const config = getConfig() as PluginConfig; 23 | const logger = new PluginLogger(); 24 | 25 | if (config.rendering.tracing.url) { 26 | startTracing(logger); 27 | } 28 | 29 | if (!config.rendering.chromeBin && (process as any).pkg) { 30 | const execPath = path.dirname(process.execPath); 31 | const chromeInfoFile = fs.readFileSync(path.resolve(execPath, 'chrome-info.json'), 'utf8'); 32 | const chromeInfo = JSON.parse(chromeInfoFile); 33 | 34 | config.rendering.chromeBin = computeExecutablePath({ 35 | cacheDir: path.dirname(process.execPath), 36 | browser: Browser.CHROMEHEADLESSSHELL, 37 | buildId: chromeInfo.buildId, 38 | }); 39 | logger.debug(`Setting chromeBin to ${config.rendering.chromeBin}`); 40 | } 41 | 42 | await serve({ 43 | handshakeConfig: { 44 | protocolVersion: 2, 45 | magicCookieKey: 'grafana_plugin_type', 46 | magicCookieValue: 'datasource', 47 | }, 48 | versionedPlugins: { 49 | 2: { 50 | renderer: new RenderGRPCPluginV2(config, logger), 51 | }, 52 | }, 53 | logger: logger, 54 | grpcHost: config.plugin.grpc.host, 55 | grpcPort: config.plugin.grpc.port, 56 | }); 57 | } else if (command === 'server') { 58 | const config = getConfig() as ServiceConfig; 59 | const logger = new ConsoleLogger(config.service.logging); 60 | 61 | if (config.rendering.tracing.url) { 62 | startTracing(logger); 63 | } 64 | 65 | const sanitizer = createSanitizer(); 66 | const server = new HttpServer(config, logger, sanitizer); 67 | await server.start(); 68 | } else { 69 | console.log('Unknown command'); 70 | } 71 | } 72 | 73 | main().catch((err) => { 74 | const errorLog = { 75 | '@level': 'error', 76 | '@message': 'failed to start grafana-image-renderer', 77 | error: err.message, 78 | trace: err.stack, 79 | }; 80 | console.error(JSON.stringify(errorLog)); 81 | process.exit(1); 82 | }); 83 | -------------------------------------------------------------------------------- /src/browser/browser.test.ts: -------------------------------------------------------------------------------- 1 | import { ConsoleLogger } from '../logger'; 2 | import { RenderOptions } from '../types'; 3 | import { Browser } from './browser'; 4 | import * as promClient from 'prom-client'; 5 | 6 | jest.mock('../logger'); 7 | 8 | const renderingConfig = { 9 | args: ['--no-sandbox', '--disable-gpu'], 10 | ignoresHttpsErrors: false, 11 | acceptLanguage: 'fr-CA', 12 | width: 1000, 13 | height: 500, 14 | deviceScaleFactor: 1, 15 | maxWidth: 3000, 16 | maxHeight: 3000, 17 | maxDeviceScaleFactor: 4, 18 | pageZoomLevel: 1, 19 | mode: 'default', 20 | clustering: { 21 | monitor: false, 22 | mode: 'browser', 23 | maxConcurrency: 5, 24 | timeout: 30, 25 | }, 26 | verboseLogging: false, 27 | dumpio: false, 28 | timingMetrics: false, 29 | tracing: { 30 | url: '', 31 | serviceName: '', 32 | }, 33 | emulateNetworkConditions: false, 34 | }; 35 | 36 | const browser = new Browser(renderingConfig, new ConsoleLogger({ level: 'info' }), { 37 | durationHistogram: new promClient.Histogram({ 38 | name: 'grafana_image_renderer_step_duration_seconds', 39 | help: 'duration histogram of browser steps for rendering an image labeled with: step', 40 | labelNames: ['step'], 41 | buckets: [0.1, 0.3, 0.5, 1, 3, 5, 10, 20, 30], 42 | }), 43 | }); 44 | 45 | describe('Test validateRenderOptions', () => { 46 | it('should fail when passing a socket URL', () => { 47 | const fn = () => { 48 | browser.validateRenderOptions({ 49 | url: 'socket://localhost', 50 | filePath: '', 51 | timeout: 0, 52 | renderKey: '', 53 | domain: '', 54 | }); 55 | }; 56 | 57 | expect(fn).toThrow(Error); 58 | }); 59 | 60 | it('should use accept-language header if it exists', () => { 61 | let options: RenderOptions = { 62 | url: 'http://localhost', 63 | filePath: '', 64 | timeout: 0, 65 | renderKey: '', 66 | headers: { 67 | 'Accept-Language': 'en-US', 68 | }, 69 | domain: '', 70 | }; 71 | 72 | browser.validateRenderOptions(options); 73 | 74 | expect(options.headers?.['Accept-Language']).toEqual('en-US'); 75 | }); 76 | 77 | it('should use acceptLanguage configuration if no header is given', () => { 78 | let options: RenderOptions = { 79 | url: 'http://localhost', 80 | filePath: '', 81 | timeout: 0, 82 | renderKey: '', 83 | domain: '', 84 | }; 85 | 86 | browser.validateRenderOptions(options); 87 | 88 | expect(options.headers?.['Accept-Language']).toEqual(renderingConfig.acceptLanguage); 89 | }); 90 | 91 | it('should use timeout option if given', () => { 92 | let options: RenderOptions = { 93 | url: 'http://localhost', 94 | filePath: '', 95 | timeout: 5, 96 | renderKey: '', 97 | domain: '', 98 | }; 99 | 100 | browser.validateRenderOptions(options); 101 | 102 | expect(options.timeout).toEqual(5); 103 | }); 104 | 105 | it('should use default timeout if none is given', () => { 106 | let options: RenderOptions = { 107 | url: 'http://localhost', 108 | filePath: '', 109 | timeout: 0, 110 | renderKey: '', 111 | domain: '', 112 | }; 113 | 114 | browser.validateRenderOptions(options); 115 | 116 | expect(options.timeout).toEqual(30); 117 | }); 118 | }); 119 | -------------------------------------------------------------------------------- /src/browser/clustered.ts: -------------------------------------------------------------------------------- 1 | import { Cluster as PoolpeteerCluster } from 'poolpeteer'; 2 | import { Cluster as PuppeteerCluster } from 'puppeteer-cluster'; 3 | import { ImageRenderOptions, RenderOptions } from '../types'; 4 | import { Browser, RenderResponse, RenderCSVResponse, Metrics } from './browser'; 5 | import { Logger } from '../logger'; 6 | import { RenderingConfig, ClusteringConfig } from '../config/rendering'; 7 | 8 | enum RenderType { 9 | CSV = 'csv', 10 | PNG = 'png', 11 | } 12 | 13 | interface ClusterOptions { 14 | groupId?: string; 15 | options: RenderOptions | ImageRenderOptions; 16 | renderType: RenderType; 17 | signal: AbortSignal; 18 | } 19 | 20 | type ClusterResponse = RenderResponse | RenderCSVResponse; 21 | 22 | const contextPerRenderKey = 'contextPerRenderKey'; 23 | 24 | type Cluster = PuppeteerCluster | PoolpeteerCluster; 25 | 26 | export class ClusteredBrowser extends Browser { 27 | cluster: Cluster; 28 | clusteringConfig: ClusteringConfig; 29 | concurrency: number; 30 | 31 | constructor(config: RenderingConfig, log: Logger, metrics: Metrics) { 32 | super(config, log, metrics); 33 | 34 | this.clusteringConfig = config.clustering; 35 | this.concurrency = PuppeteerCluster.CONCURRENCY_BROWSER; 36 | 37 | if (this.clusteringConfig.mode === 'context') { 38 | this.concurrency = PuppeteerCluster.CONCURRENCY_CONTEXT; 39 | } 40 | 41 | if (this.clusteringConfig.mode === contextPerRenderKey) { 42 | this.concurrency = PoolpeteerCluster.CONCURRENCY_CONTEXT_PER_REQUEST_GROUP; 43 | } 44 | } 45 | 46 | shouldUsePoolpeteer(): boolean { 47 | return this.clusteringConfig.mode === contextPerRenderKey; 48 | } 49 | 50 | async createCluster(): Promise> { 51 | const launcherOptions = this.getLauncherOptions({}); 52 | 53 | const clusterOptions = { 54 | concurrency: this.concurrency, 55 | workerShutdownTimeout: 5000, 56 | monitor: this.clusteringConfig.monitor, 57 | maxConcurrency: this.clusteringConfig.maxConcurrency, 58 | timeout: this.clusteringConfig.timeout * 1000, 59 | puppeteerOptions: launcherOptions, 60 | }; 61 | 62 | // TODO use poolpeteer by default after initial release and testing (8.5?) 63 | if (this.shouldUsePoolpeteer()) { 64 | this.log.debug('Launching Browser cluster with poolpeteer'); 65 | return PoolpeteerCluster.launch(clusterOptions); 66 | } 67 | 68 | this.log.debug('Launching Browser cluster with puppeteer-cluster'); 69 | return PuppeteerCluster.launch(clusterOptions); 70 | } 71 | 72 | async start(): Promise { 73 | this.cluster = await this.createCluster(); 74 | await this.cluster.task(async ({ page, data }) => { 75 | const { options, renderType, signal } = data; 76 | if (options.timezone) { 77 | // set timezone 78 | await page.emulateTimezone(options.timezone); 79 | } 80 | 81 | try { 82 | await this.addPageListeners(page, options.headers); 83 | switch (renderType) { 84 | case RenderType.CSV: 85 | return await this.exportCSV(page, options, signal); 86 | case RenderType.PNG: 87 | default: 88 | return await this.takeScreenshot(page, options as ImageRenderOptions, signal); 89 | } 90 | } finally { 91 | this.removePageListeners(page); 92 | } 93 | }); 94 | } 95 | 96 | private getGroupId = (options: ImageRenderOptions | RenderOptions) => { 97 | if (this.clusteringConfig.mode === contextPerRenderKey) { 98 | return `${options.domain}${options.renderKey}`; 99 | } 100 | 101 | return undefined; 102 | }; 103 | 104 | async render(options: ImageRenderOptions, signal: AbortSignal): Promise { 105 | this.validateImageOptions(options); 106 | return this.cluster.execute({ groupId: this.getGroupId(options), options, renderType: RenderType.PNG, signal }); 107 | } 108 | 109 | async renderCSV(options: RenderOptions, signal: AbortSignal): Promise { 110 | this.validateRenderOptions(options); 111 | return this.cluster.execute({ groupId: this.getGroupId(options), options, renderType: RenderType.CSV, signal }); 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /src/browser/error.ts: -------------------------------------------------------------------------------- 1 | export class StepTimeoutError extends Error { 2 | constructor(step) { 3 | super('Timeout error while performing step: ' + step); 4 | this.name = 'TimeoutError'; 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /src/browser/index.ts: -------------------------------------------------------------------------------- 1 | import { RenderingConfig } from '../config/rendering'; 2 | import { Logger } from '../logger'; 3 | import { Browser, Metrics } from './browser'; 4 | import { ClusteredBrowser } from './clustered'; 5 | import { ReusableBrowser } from './reusable'; 6 | 7 | export function createBrowser(config: RenderingConfig, log: Logger, metrics: Metrics): Browser { 8 | if (config.mode === 'clustered') { 9 | log.info( 10 | 'using clustered browser', 11 | 'mode', 12 | config.clustering.mode, 13 | 'maxConcurrency', 14 | config.clustering.maxConcurrency, 15 | 'timeout', 16 | config.clustering.timeout 17 | ); 18 | return new ClusteredBrowser(config, log, metrics); 19 | } 20 | 21 | if (config.mode === 'reusable') { 22 | log.info('using reusable browser'); 23 | return new ReusableBrowser(config, log, metrics); 24 | } 25 | 26 | if (!config.args.includes('--disable-gpu')) { 27 | log.warn( 28 | 'using default mode without the --disable-gpu flag is not recommended as it can cause Puppeteer newPage function to freeze, leaving browsers open' 29 | ); 30 | } 31 | 32 | return new Browser(config, log, metrics); 33 | } 34 | 35 | export { Browser }; 36 | -------------------------------------------------------------------------------- /src/browser/pdf.ts: -------------------------------------------------------------------------------- 1 | import * as puppeteer from 'puppeteer'; 2 | 3 | // Allow setting the more esoteric PDF options via URL parameters 4 | export function getPDFOptionsFromURL(url: string): puppeteer.PDFOptions { 5 | const urlParams = new URLSearchParams(url); 6 | return { 7 | landscape: urlParams.get('pdf.landscape') !== 'false', // defaults true 8 | format: (urlParams.get('pdf.format') as puppeteer.PaperFormat) ?? 'A4', 9 | omitBackground: urlParams.get('pdf.omitBackground') === 'true', // defaults false, 10 | printBackground: urlParams.get('pdf.printBackground') !== 'false', // defaults true, 11 | pageRanges: urlParams.get('pdf.pageRanges') ?? undefined, 12 | }; 13 | } 14 | -------------------------------------------------------------------------------- /src/browser/reusable.ts: -------------------------------------------------------------------------------- 1 | import * as puppeteer from 'puppeteer'; 2 | import { ImageRenderOptions, RenderOptions } from '../types'; 3 | import { Browser, RenderResponse, RenderCSVResponse, Metrics } from './browser'; 4 | import { Logger } from '../logger'; 5 | import { RenderingConfig } from '../config/rendering'; 6 | 7 | export class ReusableBrowser extends Browser { 8 | browser: puppeteer.Browser; 9 | 10 | constructor(config: RenderingConfig, log: Logger, metrics: Metrics) { 11 | super(config, log, metrics); 12 | } 13 | 14 | async start(): Promise { 15 | const launcherOptions = this.getLauncherOptions({}); 16 | this.browser = await puppeteer.launch(launcherOptions); 17 | } 18 | 19 | async render(options: ImageRenderOptions, signal: AbortSignal): Promise { 20 | let context: puppeteer.BrowserContext | undefined; 21 | let page: puppeteer.Page | undefined; 22 | 23 | try { 24 | page = await this.withMonitoring('newPage', async () => { 25 | this.validateImageOptions(options); 26 | context = await this.browser.createBrowserContext(); 27 | return context.newPage(); 28 | }); 29 | 30 | if (options.timezone) { 31 | // set timezone 32 | await page.emulateTimezone(options.timezone); 33 | } 34 | 35 | await this.addPageListeners(page, options.headers); 36 | 37 | return await this.takeScreenshot(page, options, signal); 38 | } finally { 39 | if (page) { 40 | this.removePageListeners(page); 41 | await page.close(); 42 | } 43 | if (context) { 44 | await context.close(); 45 | } 46 | } 47 | } 48 | 49 | async renderCSV(options: RenderOptions, signal: AbortSignal): Promise { 50 | let context: puppeteer.BrowserContext | undefined; 51 | let page: puppeteer.Page | undefined; 52 | 53 | try { 54 | this.validateRenderOptions(options); 55 | context = await this.browser.createBrowserContext(); 56 | page = await context.newPage(); 57 | 58 | if (options.timezone) { 59 | // set timezone 60 | await page.emulateTimezone(options.timezone); 61 | } 62 | 63 | await this.addPageListeners(page, options.headers); 64 | 65 | return await this.exportCSV(page, options, signal); 66 | } finally { 67 | if (page) { 68 | this.removePageListeners(page); 69 | await page.close(); 70 | } 71 | if (context) { 72 | await context.close(); 73 | } 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/config/config.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import * as _ from 'lodash'; 3 | import * as minimist from 'minimist'; 4 | import { defaultServiceConfig, populateServiceConfigFromEnv, ServiceConfig } from '../service/config'; 5 | import { defaultPluginConfig, populatePluginConfigFromEnv, PluginConfig } from '../plugin/v2/config'; 6 | 7 | export function getConfig(): PluginConfig | ServiceConfig { 8 | const argv = minimist(process.argv.slice(2)); 9 | const env = Object.assign({}, process.env); 10 | const command = argv._[0]; 11 | 12 | if (command === 'server') { 13 | let config: ServiceConfig = defaultServiceConfig; 14 | 15 | if (argv.config) { 16 | try { 17 | const fileConfig = readJSONFileSync(argv.config); 18 | config = _.merge(config, fileConfig); 19 | } catch (e) { 20 | console.error('failed to read config from path', argv.config, 'error', e); 21 | } 22 | } 23 | 24 | populateServiceConfigFromEnv(config, env); 25 | 26 | return config; 27 | } 28 | 29 | const config: PluginConfig = defaultPluginConfig; 30 | populatePluginConfigFromEnv(config, env); 31 | return config; 32 | } 33 | 34 | function readJSONFileSync(filePath: string) { 35 | const rawdata = fs.readFileSync(filePath, 'utf8'); 36 | return JSON.parse(rawdata); 37 | } 38 | -------------------------------------------------------------------------------- /src/config/rendering.ts: -------------------------------------------------------------------------------- 1 | export interface ClusteringConfig { 2 | monitor: boolean; 3 | mode: string; 4 | maxConcurrency: number; 5 | timeout: number; 6 | } 7 | 8 | // https://chromedevtools.github.io/devtools-protocol/tot/Network/#method-emulateNetworkConditions 9 | type NetworkConditions = { 10 | offline: boolean; 11 | downloadThroughput: number; 12 | uploadThroughput: number; 13 | latency: number; 14 | }; 15 | 16 | export interface TracesConfig { 17 | url: string; 18 | serviceName?: string; 19 | } 20 | 21 | export interface RenderingConfig { 22 | chromeBin?: string; 23 | args: string[]; 24 | ignoresHttpsErrors: boolean; 25 | timezone?: string; 26 | acceptLanguage?: string; 27 | width: number; 28 | height: number; 29 | deviceScaleFactor: number; 30 | maxWidth: number; 31 | maxHeight: number; 32 | maxDeviceScaleFactor: number; 33 | pageZoomLevel: number; 34 | mode: string; 35 | clustering: ClusteringConfig; 36 | verboseLogging: boolean; 37 | dumpio: boolean; 38 | timingMetrics: boolean; 39 | headed?: boolean; 40 | networkConditions?: NetworkConditions; 41 | emulateNetworkConditions: boolean; 42 | tracing: TracesConfig; 43 | } 44 | 45 | export const defaultRenderingConfig: RenderingConfig = { 46 | chromeBin: undefined, 47 | args: ['--no-sandbox', '--disable-gpu'], 48 | ignoresHttpsErrors: false, 49 | timezone: undefined, 50 | acceptLanguage: undefined, 51 | width: 1000, 52 | height: 500, 53 | headed: false, 54 | deviceScaleFactor: 1, 55 | maxWidth: 3000, 56 | maxHeight: 3000, 57 | maxDeviceScaleFactor: 4, 58 | pageZoomLevel: 1, 59 | mode: 'default', 60 | clustering: { 61 | monitor: false, 62 | mode: 'browser', 63 | maxConcurrency: 5, 64 | timeout: 30, 65 | }, 66 | emulateNetworkConditions: false, 67 | verboseLogging: false, 68 | dumpio: false, 69 | timingMetrics: false, 70 | tracing: { 71 | url: '', 72 | serviceName: '', 73 | }, 74 | }; 75 | 76 | export enum Mode { 77 | Plugin = 'plugin', 78 | Server = 'server', 79 | } 80 | 81 | type Keys = { 82 | [K in keyof T]?: T[K] extends object ? (T[K] extends any[] ? string : Keys) : string; 83 | }; 84 | 85 | const envConfig: Record> = { 86 | server: { 87 | chromeBin: 'CHROME_BIN', 88 | args: 'RENDERING_ARGS', 89 | ignoresHttpsErrors: 'IGNORE_HTTPS_ERRORS', 90 | timezone: 'BROWSER_TZ', 91 | acceptLanguage: 'RENDERING_LANGUAGE', 92 | width: 'RENDERING_VIEWPORT_WIDTH', 93 | height: 'RENDERING_VIEWPORT_HEIGHT', 94 | deviceScaleFactor: 'RENDERING_VIEWPORT_DEVICE_SCALE_FACTOR', 95 | maxWidth: 'RENDERING_VIEWPORT_MAX_WIDTH', 96 | maxHeight: 'RENDERING_VIEWPORT_MAX_HEIGHT', 97 | maxDeviceScaleFactor: 'RENDERING_VIEWPORT_MAX_DEVICE_SCALE_FACTOR', 98 | pageZoomLevel: 'RENDERING_VIEWPORT_PAGE_ZOOM_LEVEL', 99 | mode: 'RENDERING_MODE', 100 | clustering: { 101 | mode: 'RENDERING_CLUSTERING_MODE', 102 | maxConcurrency: 'RENDERING_CLUSTERING_MAX_CONCURRENCY', 103 | timeout: 'RENDERING_CLUSTERING_TIMEOUT', 104 | }, 105 | verboseLogging: 'RENDERING_VERBOSE_LOGGING', 106 | dumpio: 'RENDERING_DUMPIO', 107 | timingMetrics: 'RENDERING_TIMING_METRICS', 108 | tracing: { 109 | url: 'RENDERING_TRACING_URL', 110 | }, 111 | }, 112 | plugin: { 113 | chromeBin: 'GF_PLUGIN_RENDERING_CHROME_BIN', 114 | args: 'GF_PLUGIN_RENDERING_ARGS', 115 | ignoresHttpsErrors: 'GF_PLUGIN_RENDERING_IGNORE_HTTPS_ERRORS', 116 | timezone: 'GF_PLUGIN_RENDERING_TIMEZONE', 117 | acceptLanguage: 'GF_PLUGIN_RENDERING_LANGUAGE', 118 | width: 'GF_PLUGIN_RENDERING_VIEWPORT_WIDTH', 119 | height: 'GF_PLUGIN_RENDERING_VIEWPORT_HEIGHT', 120 | deviceScaleFactor: 'GF_PLUGIN_RENDERING_VIEWPORT_DEVICE_SCALE_FACTOR', 121 | maxWidth: 'GF_PLUGIN_RENDERING_VIEWPORT_MAX_WIDTH', 122 | maxHeight: 'GF_PLUGIN_RENDERING_VIEWPORT_MAX_HEIGHT', 123 | maxDeviceScaleFactor: 'GF_PLUGIN_RENDERING_VIEWPORT_MAX_DEVICE_SCALE_FACTOR', 124 | pageZoomLevel: 'GF_PLUGIN_RENDERING_VIEWPORT_PAGE_ZOOM_LEVEL', 125 | mode: 'GF_PLUGIN_RENDERING_MODE', 126 | clustering: { 127 | mode: 'GF_PLUGIN_RENDERING_CLUSTERING_MODE', 128 | maxConcurrency: 'GF_PLUGIN_RENDERING_CLUSTERING_MAX_CONCURRENCY', 129 | timeout: 'GF_PLUGIN_RENDERING_CLUSTERING_TIMEOUT', 130 | }, 131 | verboseLogging: 'GF_PLUGIN_RENDERING_VERBOSE_LOGGING', 132 | dumpio: 'GF_PLUGIN_RENDERING_DUMPIO', 133 | timingMetrics: 'GF_PLUGIN_RENDERING_TIMING_METRICS', 134 | tracing: { 135 | url: 'GF_PLUGIN_RENDERING_TRACING_URL', 136 | 137 | }, 138 | }, 139 | }; 140 | 141 | export function populateRenderingConfigFromEnv(config: RenderingConfig, env: NodeJS.ProcessEnv, mode: Mode) { 142 | const envKeys = envConfig[mode]; 143 | 144 | if (env[envKeys.chromeBin!]) { 145 | config.chromeBin = env[envKeys.chromeBin!]; 146 | } 147 | 148 | if (env[envKeys.args!]) { 149 | const args = env[envKeys.args!] as string; 150 | if (args.length > 0) { 151 | const argsList = args.split(','); 152 | if (argsList.length > 0) { 153 | config.args = argsList; 154 | } 155 | } 156 | } 157 | 158 | if (env[envKeys.ignoresHttpsErrors!]) { 159 | config.ignoresHttpsErrors = env[envKeys.ignoresHttpsErrors!] === 'true'; 160 | } 161 | 162 | if (env[envKeys.timezone!]) { 163 | config.timezone = env[envKeys.timezone!]; 164 | } else if (env['TZ']) { 165 | config.timezone = env['TZ']; 166 | } 167 | 168 | if (env[envKeys.acceptLanguage!]) { 169 | config.acceptLanguage = env[envKeys.acceptLanguage!]; 170 | } 171 | 172 | if (env[envKeys.width!]) { 173 | config.width = parseInt(env[envKeys.width!] as string, 10); 174 | } 175 | 176 | if (env[envKeys.height!]) { 177 | config.height = parseInt(env[envKeys.height!] as string, 10); 178 | } 179 | 180 | if (env[envKeys.deviceScaleFactor!]) { 181 | config.deviceScaleFactor = parseFloat(env[envKeys.deviceScaleFactor!] as string); 182 | } 183 | 184 | if (env[envKeys.maxWidth!]) { 185 | config.maxWidth = parseInt(env[envKeys.maxWidth!] as string, 10); 186 | } 187 | 188 | if (env[envKeys.maxHeight!]) { 189 | config.maxHeight = parseInt(env[envKeys.maxHeight!] as string, 10); 190 | } 191 | 192 | if (env[envKeys.maxDeviceScaleFactor!]) { 193 | config.maxDeviceScaleFactor = parseFloat(env[envKeys.maxDeviceScaleFactor!] as string); 194 | } 195 | 196 | if (env[envKeys.pageZoomLevel!]) { 197 | config.pageZoomLevel = parseFloat(env[envKeys.pageZoomLevel!] as string); 198 | } 199 | 200 | if (env[envKeys.mode!]) { 201 | config.mode = env[envKeys.mode!] as string; 202 | } 203 | 204 | if (env[envKeys.clustering?.mode!]) { 205 | config.clustering.mode = env[envKeys.clustering?.mode!] as string; 206 | } 207 | 208 | if (env[envKeys.clustering?.maxConcurrency!]) { 209 | config.clustering.maxConcurrency = parseInt(env[envKeys.clustering?.maxConcurrency!] as string, 10); 210 | } 211 | 212 | if (env[envKeys.clustering?.timeout!]) { 213 | config.clustering.timeout = parseInt(env[envKeys.clustering?.timeout!] as string, 10); 214 | } 215 | 216 | if (env[envKeys.verboseLogging!]) { 217 | config.verboseLogging = env[envKeys.verboseLogging!] === 'true'; 218 | } 219 | 220 | if (env[envKeys.dumpio!]) { 221 | config.dumpio = env[envKeys.dumpio!] === 'true'; 222 | } 223 | 224 | if (env[envKeys.timingMetrics!]) { 225 | config.timingMetrics = env[envKeys.timingMetrics!] === 'true'; 226 | } 227 | 228 | if (env[envKeys.tracing?.url!]) { 229 | config.tracing.url = env[envKeys.tracing?.url!] as string; 230 | } 231 | } 232 | -------------------------------------------------------------------------------- /src/config/security.ts: -------------------------------------------------------------------------------- 1 | export interface SecurityConfig { 2 | authToken: string | string[]; 3 | } 4 | 5 | export const isAuthTokenValid = (config: SecurityConfig, reqAuthToken: string): boolean => { 6 | let configToken = config.authToken || ['']; 7 | if (typeof configToken === 'string') { 8 | configToken = [configToken]; 9 | } 10 | 11 | return reqAuthToken !== '' && configToken.includes(reqAuthToken); 12 | }; 13 | -------------------------------------------------------------------------------- /src/exit.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from './logger'; 2 | 3 | type cleanUpFn = () => void; 4 | const cleanUpHandlers: cleanUpFn[] = []; 5 | 6 | export function registerExitCleanUp(fn) { 7 | cleanUpHandlers.push(fn); 8 | } 9 | 10 | export class ExitManager { 11 | constructor(private log: Logger) { 12 | process.stdin.resume(); //so the program will not close instantly 13 | 14 | //do something when app is closing 15 | process.on('exit', this.exitHandler.bind(this)); 16 | 17 | //catches ctrl+c event 18 | process.on('SIGINT', this.exitHandler.bind(this)); 19 | 20 | // catches "kill pid" (for example: nodemon restart) 21 | process.on('SIGUSR1', this.exitHandler.bind(this)); 22 | process.on('SIGUSR2', this.exitHandler.bind(this)); 23 | 24 | //catches uncaught exceptions 25 | process.on('uncaughtException', this.exitHandler.bind(this)); 26 | } 27 | 28 | exitHandler(options, err) { 29 | for (const fn of cleanUpHandlers) { 30 | try { 31 | fn(); 32 | } catch (err) { 33 | this.log.info('Failed to call cleanup function ' + err); 34 | } 35 | } 36 | if (err) { 37 | this.log.info(err); 38 | } 39 | process.exit(); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/logger.ts: -------------------------------------------------------------------------------- 1 | import * as winston from 'winston'; 2 | import { LoggingConfig } from './service/config'; 3 | import { context, trace } from '@opentelemetry/api'; 4 | 5 | export interface LogWriter { 6 | write(message, encoding); 7 | } 8 | 9 | export interface Logger { 10 | errorWriter: LogWriter; 11 | debugWriter: LogWriter; 12 | debug(message?: string, ...optionalParams: any[]); 13 | info(message?: string, ...optionalParams: any[]); 14 | warn(message?: string, ...optionalParams: any[]); 15 | error(message?: string, ...optionalParams: any[]); 16 | } 17 | 18 | export class ConsoleLogger implements Logger { 19 | errorWriter: LogWriter; 20 | debugWriter: LogWriter; 21 | logger: winston.Logger; 22 | 23 | constructor(config: LoggingConfig) { 24 | const transports: any[] = []; 25 | 26 | if (config.console) { 27 | const options: any = { 28 | exitOnError: false, 29 | }; 30 | if (config.console.level) { 31 | options.level = config.console.level; 32 | } 33 | const formatters: any[] = []; 34 | if (config.console.colorize) { 35 | formatters.push(winston.format.colorize()); 36 | } 37 | 38 | if (config.console.json) { 39 | formatters.push(winston.format.json()); 40 | } else { 41 | formatters.push(winston.format.align()); 42 | formatters.push(winston.format.simple()); 43 | } 44 | 45 | options.format = winston.format.combine(...(formatters as any)); 46 | transports.push(new winston.transports.Console(options)); 47 | } 48 | 49 | //@opentelemetry/instrumentation-winston auto inject trace-context into Winston log records 50 | 51 | this.logger = winston.createLogger({ 52 | level: config.level, 53 | exitOnError: false, 54 | transports: transports, 55 | }); 56 | 57 | this.errorWriter = { 58 | write: (message) => { 59 | this.logger.error(message); 60 | }, 61 | }; 62 | this.debugWriter = { 63 | write: (message) => { 64 | this.logger.debug(message); 65 | }, 66 | }; 67 | } 68 | 69 | private logEntry(level: string, message: string, ...optionalParams: any[]) { 70 | const meta: any = {}; 71 | if (optionalParams) { 72 | for (let n = 0; n < optionalParams.length; n += 2) { 73 | const key = optionalParams[n]; 74 | const value = optionalParams[n + 1]; 75 | 76 | if (key !== null && value !== null) { 77 | meta[key] = value; 78 | } 79 | } 80 | } 81 | 82 | this.logger.log(level, message, meta); 83 | } 84 | 85 | debug(message: string, ...optionalParams: any[]) { 86 | this.logEntry('debug', message, ...optionalParams); 87 | } 88 | 89 | info(message: string, ...optionalParams: any[]) { 90 | this.logEntry('info', message, ...optionalParams); 91 | } 92 | 93 | warn(message: string, ...optionalParams: any[]) { 94 | this.logEntry('warn', message, ...optionalParams); 95 | } 96 | 97 | error(message: string, ...optionalParams: any[]) { 98 | this.logEntry('error', message, ...optionalParams); 99 | } 100 | } 101 | 102 | export class PluginLogger implements Logger { 103 | errorWriter: LogWriter; 104 | debugWriter: LogWriter; 105 | 106 | private logEntry(level: string, message?: string, ...optionalParams: any[]) { 107 | const logEntry = { 108 | '@level': level, 109 | }; 110 | 111 | if (message) { 112 | logEntry['@message'] = message; 113 | } 114 | 115 | if (optionalParams) { 116 | for (let n = 0; n < optionalParams.length; n += 2) { 117 | const key = optionalParams[n]; 118 | const value = optionalParams[n + 1]; 119 | 120 | if (key !== null && value !== null) { 121 | logEntry[key] = value; 122 | } 123 | } 124 | } 125 | 126 | console.error(JSON.stringify(logEntry)); 127 | } 128 | 129 | debug(message?: string, ...optionalParams: any[]) { 130 | this.logEntry('debug', message, ...optionalParams); 131 | } 132 | 133 | info(message?: string, ...optionalParams: any[]) { 134 | this.logEntry('info', message, ...optionalParams); 135 | } 136 | 137 | warn(message?: string, ...optionalParams: any[]) { 138 | this.logEntry('warn', message, ...optionalParams); 139 | } 140 | 141 | error(message?: string, ...optionalParams: any[]) { 142 | this.logEntry('error', message, ...optionalParams); 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /src/node-plugin/index.ts: -------------------------------------------------------------------------------- 1 | import { defaultGRPCServer, serve } from './lib/server'; 2 | import { GrpcPlugin, ServeConfig } from './lib/types'; 3 | 4 | export { GrpcPlugin, ServeConfig, serve, defaultGRPCServer }; 5 | -------------------------------------------------------------------------------- /src/node-plugin/lib/server.ts: -------------------------------------------------------------------------------- 1 | import * as grpc from '@grpc/grpc-js'; 2 | import * as protoLoader from '@grpc/proto-loader'; 3 | import { coreProtocolVersion, PluginSet, VersionedPluginSet, ServeConfig } from './types'; 4 | import { PluginLogger } from '../../logger'; 5 | 6 | export const healthPackageDef = protoLoader.loadSync(__dirname + '/../../../proto/health.proto', { 7 | keepCase: true, 8 | longs: String, 9 | enums: String, 10 | defaults: true, 11 | oneofs: true, 12 | }); 13 | 14 | export const healthProtoDescriptor = grpc.loadPackageDefinition(healthPackageDef); 15 | 16 | export const defaultGRPCServer = () => new grpc.Server(); 17 | 18 | interface ProtocolNegotiation { 19 | protoVersion: number; 20 | pluginSet: PluginSet; 21 | } 22 | 23 | const protocolVersion = (opts: ServeConfig): ProtocolNegotiation => { 24 | let protoVersion = opts.handshakeConfig.protocolVersion; 25 | const pluginSet = opts.plugins; 26 | const env = Object.assign({}, process.env); 27 | 28 | let clientVersions: number[] = []; 29 | if (env['PLUGIN_PROTOCOL_VERSIONS']) { 30 | const protocolVersions = (env['PLUGIN_PROTOCOL_VERSIONS'] as string).split(','); 31 | for (let n = 0; n < protocolVersions.length; n++) { 32 | const protocolVersion = parseInt(protocolVersions[n], 10); 33 | clientVersions.push(protocolVersion); 34 | } 35 | } 36 | 37 | // We want to iterate in reverse order, to ensure we match the newest 38 | // compatible plugin version. 39 | clientVersions = clientVersions.sort().reverse(); 40 | 41 | // set the old un-versioned fields as if they were versioned plugins 42 | if (!opts.versionedPlugins) { 43 | opts.versionedPlugins = {} as VersionedPluginSet; 44 | } 45 | 46 | if (pluginSet) { 47 | opts.versionedPlugins[protoVersion] = pluginSet; 48 | } 49 | 50 | // Sort the versions to make sure we match the latest first 51 | let versions: number[] = []; 52 | for (let n = 0; n < Object.keys(opts.versionedPlugins).length; n++) { 53 | const version = Object.keys(opts.versionedPlugins)[n]; 54 | versions.push(parseInt(version, 10)); 55 | } 56 | 57 | versions = versions.sort().reverse(); 58 | let versionedPluginSet: PluginSet = {}; 59 | 60 | for (let n = 0; n < versions.length; n++) { 61 | const version = versions[n]; 62 | // Record each version, since we guarantee that this returns valid 63 | // values even if they are not a protocol match. 64 | protoVersion = version; 65 | versionedPluginSet = opts.versionedPlugins[version]; 66 | 67 | for (let i = 0; i < clientVersions.length; i++) { 68 | const clientVersion = clientVersions[i]; 69 | if (clientVersion === protoVersion) { 70 | return { 71 | protoVersion, 72 | pluginSet: versionedPluginSet, 73 | }; 74 | } 75 | } 76 | } 77 | 78 | return { 79 | protoVersion, 80 | pluginSet: versionedPluginSet, 81 | }; 82 | }; 83 | 84 | export const serve = async (opts: ServeConfig) => { 85 | const env = Object.assign({}, process.env); 86 | opts.logger = opts.logger || new PluginLogger(); 87 | 88 | if (opts.handshakeConfig.magicCookieKey === '' || opts.handshakeConfig.magicCookieValue === '') { 89 | throw new Error( 90 | 'Misconfigured ServeConfig given to serve this plugin: no magic cookie key or value was set. Please notify the plugin author and report this as a bug.' 91 | ); 92 | } 93 | 94 | if (env[opts.handshakeConfig.magicCookieKey] !== opts.handshakeConfig.magicCookieValue) { 95 | throw new Error( 96 | 'This binary is a plugin. These are not meant to be executed directly. Please execute the program that consumes these plugins, which will load any plugins automatically' 97 | ); 98 | } 99 | 100 | // negotiate the version and plugins 101 | // start with default version in the handshake config 102 | const { protoVersion, pluginSet } = protocolVersion(opts); 103 | 104 | const server = new grpc.Server(); 105 | const grpcHealthV1: any = healthProtoDescriptor['grpc']['health']['v1']; 106 | server.addService(grpcHealthV1.Health.service, { 107 | check: (_: any, callback: any) => { 108 | callback(null, { status: 'SERVING' }); 109 | }, 110 | }); 111 | 112 | // Register all plugins onto the gRPC server. 113 | for (const key in pluginSet) { 114 | if (pluginSet.hasOwnProperty(key)) { 115 | const p = pluginSet[key]; 116 | await p.grpcServer(server); 117 | } 118 | } 119 | 120 | opts.grpcHost = opts.grpcHost || '127.0.0.1'; 121 | opts.grpcPort = opts.grpcPort || 0; 122 | 123 | return new Promise((resolve, reject) => { 124 | const address = `${opts.grpcHost}:${opts.grpcPort}`; 125 | server.bindAsync(address, grpc.ServerCredentials.createInsecure(), (error: Error | null, port: number) => { 126 | if (error) { 127 | reject(error); 128 | } 129 | if (port === 0) { 130 | reject(new Error(`failed to bind address=${address}, boundPortNumber=${port}`)); 131 | } 132 | 133 | server.start(); 134 | console.log(`${coreProtocolVersion}|${protoVersion}|tcp|${opts.grpcHost}:${port}|grpc`); 135 | resolve(); 136 | }); 137 | }); 138 | }; 139 | -------------------------------------------------------------------------------- /src/node-plugin/lib/types.ts: -------------------------------------------------------------------------------- 1 | import * as grpc from '@grpc/grpc-js'; 2 | import { Logger } from '../../logger'; 3 | 4 | export interface GrpcPlugin { 5 | grpcServer(server: grpc.Server): Promise; 6 | } 7 | 8 | // CoreProtocolVersion is the ProtocolVersion of the plugin system itself. 9 | // We will increment this whenever we change any protocol behavior. This 10 | // will invalidate any prior plugins but will at least allow us to iterate 11 | // on the core in a safe way. We will do our best to do this very 12 | // infrequently. 13 | export const coreProtocolVersion = 1; 14 | 15 | // HandshakeConfig is the configuration used by client and servers to 16 | // handshake before starting a plugin connection. This is embedded by 17 | // both ServeConfig and ClientConfig. 18 | // 19 | // In practice, the plugin host creates a HandshakeConfig that is exported 20 | // and plugins then can easily consume it. 21 | export interface HandshakeConfig { 22 | // ProtocolVersion is the version that clients must match on to 23 | // agree they can communicate. This should match the ProtocolVersion 24 | // set on ClientConfig when using a plugin. 25 | // This field is not required if VersionedPlugins are being used in the 26 | // Client or Server configurations. 27 | protocolVersion: number; 28 | // MagicCookieKey and value are used as a very basic verification 29 | // that a plugin is intended to be launched. This is not a security 30 | // measure, just a UX feature. If the magic cookie doesn't match, 31 | // we show human-friendly output. 32 | magicCookieKey: string; 33 | magicCookieValue: string; 34 | } 35 | 36 | export interface PluginSet { 37 | [key: string]: GrpcPlugin; 38 | } 39 | 40 | export interface VersionedPluginSet { 41 | [key: number]: PluginSet; 42 | } 43 | 44 | export interface ServeConfig { 45 | // HandshakeConfig is the configuration that must match clients. 46 | handshakeConfig: HandshakeConfig; 47 | 48 | // Plugins are the plugins that are served. 49 | // The implied version of this PluginSet is the Handshake.ProtocolVersion. 50 | plugins?: PluginSet; 51 | 52 | // VersionedPlugins is a map of PluginSets for specific protocol versions. 53 | // These can be used to negotiate a compatible version between client and 54 | // server. If this is set, Handshake.ProtocolVersion is not required. 55 | versionedPlugins?: VersionedPluginSet; 56 | 57 | // GRPCServer should be non-nil to enable serving the plugins over 58 | // gRPC. This is a function to create the server when needed with the 59 | // given server options. The server options populated by go-plugin will 60 | // be for TLS if set. You may modify the input slice. 61 | // 62 | // Note that the grpc.Server will automatically be registered with 63 | // the gRPC health checking service. This is not optional since go-plugin 64 | // relies on this to implement Ping(). 65 | grpcServer?(): grpc.Server; 66 | 67 | // Logger is used to pass a logger into the server. If none is provided the 68 | // server will create a default logger. 69 | // Logger hclog.Logger 70 | 71 | grpcHost?: string; 72 | grpcPort?: number; 73 | 74 | logger?: Logger; 75 | } 76 | -------------------------------------------------------------------------------- /src/plugin/v2/config.ts: -------------------------------------------------------------------------------- 1 | import { SecurityConfig } from '../../config/security'; 2 | import { defaultRenderingConfig, populateRenderingConfigFromEnv, Mode, RenderingConfig } from '../../config/rendering'; 3 | 4 | export interface PluginConfig { 5 | plugin: { 6 | grpc: { 7 | host: string; 8 | port: number; 9 | }; 10 | security: SecurityConfig; 11 | }; 12 | rendering: RenderingConfig; 13 | } 14 | 15 | export const defaultPluginConfig: PluginConfig = { 16 | plugin: { 17 | grpc: { 18 | host: '127.0.0.1', 19 | port: 0, 20 | }, 21 | security: { 22 | authToken: '-', 23 | }, 24 | }, 25 | rendering: defaultRenderingConfig, 26 | }; 27 | 28 | export function populatePluginConfigFromEnv(config: PluginConfig, env: NodeJS.ProcessEnv) { 29 | if (env['GF_PLUGIN_GRPC_HOST']) { 30 | config.plugin.grpc.host = env['GF_PLUGIN_GRPC_HOST'] as string; 31 | } 32 | 33 | if (env['GF_PLUGIN_GRPC_PORT']) { 34 | config.plugin.grpc.port = parseInt(env['GF_PLUGIN_GRPC_PORT'] as string, 10); 35 | } 36 | 37 | if (env['GF_PLUGIN_AUTH_TOKEN']) { 38 | const authToken = env['GF_PLUGIN_AUTH_TOKEN'] as string; 39 | config.plugin.security.authToken = authToken.includes(' ') ? authToken.split(' ') : authToken; 40 | } 41 | 42 | populateRenderingConfigFromEnv(config.rendering, env, Mode.Plugin); 43 | } 44 | -------------------------------------------------------------------------------- /src/plugin/v2/grpc_plugin.ts: -------------------------------------------------------------------------------- 1 | import * as grpc from '@grpc/grpc-js'; 2 | import * as protoLoader from '@grpc/proto-loader'; 3 | import { context, propagation } from '@opentelemetry/api'; 4 | import * as promClient from 'prom-client'; 5 | import { GrpcPlugin } from '../../node-plugin'; 6 | import { Logger } from '../../logger'; 7 | import { PluginConfig } from './config'; 8 | import { SecurityConfig, isAuthTokenValid } from '../../config/security'; 9 | import { createBrowser, Browser } from '../../browser'; 10 | import { HTTPHeaders, ImageRenderOptions, RenderOptions } from '../../types'; 11 | import { 12 | RenderRequest, 13 | RenderResponse, 14 | RenderCSVRequest, 15 | RenderCSVResponse, 16 | CheckHealthRequest, 17 | CheckHealthResponse, 18 | CollectMetricsRequest, 19 | CollectMetricsResponse, 20 | HealthStatus, 21 | GRPCSanitizeRequest, 22 | GRPCSanitizeResponse, 23 | } from './types'; 24 | import { createSanitizer, Sanitizer } from '../../sanitizer/Sanitizer'; 25 | import { SanitizeRequest } from '../../sanitizer/types'; 26 | import { Status } from '@grpc/grpc-js/build/src/constants'; 27 | 28 | const rendererV2PackageDef = protoLoader.loadSync(__dirname + '/../../../proto/rendererv2.proto', { 29 | keepCase: true, 30 | longs: String, 31 | enums: String, 32 | defaults: true, 33 | oneofs: true, 34 | }); 35 | 36 | const pluginV2PackageDef = protoLoader.loadSync(__dirname + '/../../../proto/pluginv2.proto', { 37 | keepCase: true, 38 | longs: String, 39 | enums: String, 40 | defaults: true, 41 | oneofs: true, 42 | }); 43 | 44 | const sanitizerPackageDef = protoLoader.loadSync(__dirname + '/../../../proto/sanitizer.proto', { 45 | keepCase: true, 46 | longs: String, 47 | enums: String, 48 | defaults: true, 49 | oneofs: true, 50 | }); 51 | 52 | interface TraceCarrier { 53 | traceparent?: string; 54 | tracestate?: string; 55 | } 56 | 57 | const rendererV2ProtoDescriptor = grpc.loadPackageDefinition(rendererV2PackageDef); 58 | const pluginV2ProtoDescriptor = grpc.loadPackageDefinition(pluginV2PackageDef); 59 | const sanitizerProtoDescriptor = grpc.loadPackageDefinition(sanitizerPackageDef); 60 | 61 | export class RenderGRPCPluginV2 implements GrpcPlugin { 62 | constructor(private config: PluginConfig, private log: Logger) {} 63 | 64 | async grpcServer(server: grpc.Server) { 65 | const metrics = setupMetrics(); 66 | const browser = createBrowser(this.config.rendering, this.log, metrics); 67 | const pluginService = new PluginGRPCServer(browser, this.log, createSanitizer(), this.config); 68 | 69 | const rendererServiceDef = rendererV2ProtoDescriptor['pluginextensionv2']['Renderer']['service']; 70 | server.addService(rendererServiceDef, pluginService as any); 71 | 72 | const pluginServiceDef = pluginV2ProtoDescriptor['pluginv2']['Diagnostics']['service']; 73 | server.addService(pluginServiceDef, pluginService as any); 74 | 75 | const sanitizerServiceDef = sanitizerProtoDescriptor['pluginextensionv2']['Sanitizer']['service']; 76 | server.addService(sanitizerServiceDef, pluginService as any); 77 | 78 | metrics.up.set(1); 79 | 80 | let browserVersion = 'unknown'; 81 | let labelValue = 1; 82 | 83 | try { 84 | browserVersion = await browser.getBrowserVersion(); 85 | } catch (err) { 86 | this.log.error('Failed to get browser version', 'err', err); 87 | labelValue = 0; 88 | } 89 | metrics.browserInfo.labels(browserVersion).set(labelValue); 90 | if (browserVersion !== 'unknown') { 91 | this.log.debug('Using browser version', 'browserVersion', browserVersion); 92 | } 93 | 94 | await pluginService.start(browserVersion); 95 | } 96 | } 97 | 98 | class PluginGRPCServer { 99 | private browserVersion: string | undefined; 100 | 101 | constructor(private browser: Browser, private log: Logger, private sanitizer: Sanitizer, private config: PluginConfig) {} 102 | 103 | async start(browserVersion?: string) { 104 | this.browserVersion = browserVersion; 105 | await this.browser.start(); 106 | } 107 | 108 | async render(call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData) { 109 | const abortController = new AbortController(); 110 | const { signal } = abortController; 111 | 112 | const req = call.request; 113 | 114 | if (!req) { 115 | return callback({ code: Status.INVALID_ARGUMENT, details: 'Request cannot be null' }); 116 | } 117 | 118 | if (!isAuthTokenValid(this.config.plugin.security, req.authToken)) { 119 | return callback({ code: Status.UNAUTHENTICATED, details: 'Unauthorized request' }); 120 | } 121 | 122 | if (req.url && !(req.url.startsWith('http://') || req.url.startsWith('https://'))) { 123 | return callback({ code: Status.INVALID_ARGUMENT, details: 'Forbidden query url protocol' }); 124 | } 125 | 126 | const options: ImageRenderOptions = { 127 | url: req.url, 128 | width: req.width, 129 | height: req.height, 130 | filePath: req.filePath, 131 | timeout: req.timeout, 132 | renderKey: req.renderKey, 133 | domain: req.domain, 134 | timezone: req.timezone, 135 | deviceScaleFactor: req.deviceScaleFactor, 136 | headers: this.getHeaders(req), 137 | encoding: req.encoding, 138 | }; 139 | 140 | this.log.debug('Render request received', 'url', options.url); 141 | call.on('cancelled', (err) => { 142 | this.log.debug('Connection closed', 'url', options.url, 'error', err); 143 | abortController.abort(); 144 | }); 145 | let errStr = ''; 146 | 147 | try { 148 | await this.browser.render(options, signal); 149 | } catch (err) { 150 | this.log.error('Render request failed', 'url', options.url, 'error', err.toString()); 151 | errStr = err.toString(); 152 | } 153 | callback(null, { error: errStr }); 154 | } 155 | 156 | async renderCsv(call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData) { 157 | const abortController = new AbortController(); 158 | const { signal } = abortController; 159 | 160 | const req = call.request; 161 | 162 | if (!req) { 163 | return callback({ code: Status.INVALID_ARGUMENT, details: 'Request cannot be null' }); 164 | } 165 | 166 | if (!isAuthTokenValid(this.config.plugin.security, req.authToken)) { 167 | return callback({ code: Status.UNAUTHENTICATED, details: 'Unauthorized request' }); 168 | } 169 | 170 | if (req.url && !(req.url.startsWith('http://') || req.url.startsWith('https://'))) { 171 | return callback({ code: Status.INVALID_ARGUMENT, details: 'Forbidden query url protocol' }); 172 | } 173 | 174 | const options: RenderOptions = { 175 | url: req.url, 176 | filePath: req.filePath, 177 | timeout: req.timeout, 178 | renderKey: req.renderKey, 179 | domain: req.domain, 180 | timezone: req.timezone, 181 | headers: this.getHeaders(req), 182 | }; 183 | 184 | this.log.debug('Render request received', 'url', options.url); 185 | call.on('cancelled', (err) => { 186 | this.log.debug('Connection closed', 'url', options.url, 'error', err); 187 | abortController.abort(); 188 | }); 189 | 190 | let errStr = ''; 191 | let fileName = ''; 192 | try { 193 | const result = await this.browser.renderCSV(options, signal); 194 | fileName = result.fileName || ''; 195 | } catch (err) { 196 | this.log.error('Render request failed', 'url', options.url, 'error', err.toString()); 197 | errStr = err.toString(); 198 | } 199 | callback(null, { error: errStr, fileName }); 200 | } 201 | 202 | async checkHealth(_: grpc.ServerUnaryCall, callback: grpc.sendUnaryData) { 203 | const jsonDetails = Buffer.from( 204 | JSON.stringify({ 205 | browserVersion: this.browserVersion, 206 | }) 207 | ); 208 | 209 | callback(null, { status: HealthStatus.OK, message: 'Success', jsonDetails: jsonDetails }); 210 | } 211 | 212 | async collectMetrics(_: grpc.ServerUnaryCall, callback: grpc.sendUnaryData) { 213 | const payload = Buffer.from(await promClient.register.metrics()); 214 | callback(null, { metrics: { prometheus: payload } }); 215 | } 216 | 217 | async sanitize(call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData) { 218 | const grpcReq = call.request; 219 | 220 | if (!isAuthTokenValid(this.config.plugin.security, grpcReq.authToken)) { 221 | return callback({ code: Status.UNAUTHENTICATED, details: 'Unauthorized request' }); 222 | } 223 | 224 | const req: SanitizeRequest = { 225 | content: grpcReq.content, 226 | config: JSON.parse(grpcReq.config.toString()), 227 | configType: grpcReq.configType, 228 | }; 229 | 230 | this.log.debug('Sanitize request received', 'contentLength', req.content.length, 'name', grpcReq.filename); 231 | 232 | try { 233 | const sanitizeResponse = this.sanitizer.sanitize(req); 234 | callback(null, { error: '', sanitized: sanitizeResponse.sanitized }); 235 | } catch (e) { 236 | this.log.error('Sanitization failed', 'contentLength', req.content.length, 'name', grpcReq.filename, 'error', e.stack); 237 | callback(null, { error: e.stack, sanitized: Buffer.from('', 'binary') }); 238 | } 239 | } 240 | 241 | getHeaders(req: RenderRequest | RenderCSVRequest): HTTPHeaders { 242 | const headers: HTTPHeaders = {}; 243 | 244 | if (req.headers?.hasOwnProperty('Accept-Language')) { 245 | const h = req.headers['Accept-Language']; 246 | headers['Accept-Language'] = h.values.join(';'); 247 | } 248 | 249 | if (this.config.rendering.tracing.url) { 250 | const output: TraceCarrier = {}; 251 | propagation.inject(context.active(), output); 252 | const { traceparent, tracestate } = output; 253 | headers['traceparent'] = traceparent ?? ''; 254 | headers['tracestate'] = tracestate ?? ''; 255 | } 256 | 257 | return headers; 258 | } 259 | } 260 | 261 | interface PluginMetrics { 262 | up: promClient.Gauge; 263 | browserInfo: promClient.Gauge; 264 | durationHistogram: promClient.Histogram; 265 | } 266 | 267 | const setupMetrics = (): PluginMetrics => { 268 | promClient.collectDefaultMetrics(); 269 | 270 | return { 271 | up: new promClient.Gauge({ 272 | name: 'up', 273 | help: '1 = up, 0 = not up', 274 | }), 275 | browserInfo: new promClient.Gauge({ 276 | name: 'grafana_image_renderer_browser_info', 277 | help: "A metric with a constant '1 value labeled by version of the browser in use", 278 | labelNames: ['version'], 279 | }), 280 | durationHistogram: new promClient.Histogram({ 281 | name: 'grafana_image_renderer_step_duration_seconds', 282 | help: 'duration histogram of browser steps for rendering an image labeled with: step', 283 | labelNames: ['step'], 284 | buckets: [0.1, 0.3, 0.5, 1, 3, 5, 10, 20, 30], 285 | }), 286 | }; 287 | }; 288 | -------------------------------------------------------------------------------- /src/plugin/v2/types.ts: -------------------------------------------------------------------------------- 1 | import { ConfigType } from '../../sanitizer/types'; 2 | 3 | export interface StringList { 4 | values: string[]; 5 | } 6 | 7 | export interface RenderRequest { 8 | url: string; 9 | width: number; 10 | height: number; 11 | deviceScaleFactor: number; 12 | filePath: string; 13 | renderKey: string; 14 | domain: string; 15 | timeout: number; 16 | timezone: string; 17 | headers: { 18 | [header: string]: StringList; 19 | }; 20 | authToken: string; 21 | encoding: string; 22 | } 23 | 24 | export interface RenderResponse { 25 | error?: any; 26 | } 27 | 28 | export interface RenderCSVRequest { 29 | url: string; 30 | filePath: string; 31 | renderKey: string; 32 | domain: string; 33 | timeout: number; 34 | timezone: string; 35 | headers: { 36 | [header: string]: StringList; 37 | }; 38 | authToken: string; 39 | } 40 | 41 | export interface RenderCSVResponse { 42 | error?: any; 43 | fileName?: string; 44 | } 45 | 46 | export interface CollectMetricsRequest {} 47 | 48 | export interface MetricsPayload { 49 | prometheus: Buffer; 50 | } 51 | 52 | export interface CollectMetricsResponse { 53 | metrics: MetricsPayload; 54 | } 55 | 56 | export interface CheckHealthRequest { 57 | config: any; 58 | } 59 | 60 | export enum HealthStatus { 61 | UNKNOWN = 0, 62 | OK = 1, 63 | ERROR = 2, 64 | } 65 | 66 | export interface CheckHealthResponse { 67 | status: HealthStatus; 68 | message?: string; 69 | jsonDetails?: Buffer; 70 | } 71 | 72 | export interface GRPCSanitizeRequest { 73 | filename: string; 74 | content: Buffer; 75 | configType: ConfigType; 76 | config: Buffer; 77 | allowAllLinksInSvgUseTags: boolean; 78 | authToken: string; 79 | } 80 | 81 | export interface GRPCSanitizeResponse { 82 | error: string; 83 | sanitized: Buffer; 84 | } 85 | -------------------------------------------------------------------------------- /src/sanitizer/Sanitizer.ts: -------------------------------------------------------------------------------- 1 | import * as DOMPurify from 'dompurify'; 2 | import { JSDOM } from 'jsdom'; 3 | import { ConfigType, isDOMPurifyConfig, SanitizeRequest, SanitizeResponse } from './types'; 4 | 5 | const svgTags = { 6 | altGlyphDef: /(<\/?)altGlyphDef([> ])/gi, 7 | altGlyphItem: /(<\/?)altGlyphItem([> ])/gi, 8 | altGlyph: /(<\/?)altGlyph([> ])/gi, 9 | animateColor: /(<\/?)animateColor([> ])/gi, 10 | animateMotion: /(<\/?)animateMotion([> ])/gi, 11 | animateTransform: /(<\/?)animateTransform([> ])/gi, 12 | clipPath: /(<\/?)clipPath([> ])/gi, 13 | feBlend: /(<\/?)feBlend([> ])/gi, 14 | feColorMatrix: /(<\/?)feColorMatrix([> ])/gi, 15 | feComponentTransfer: /(<\/?)feComponentTransfer([> ])/gi, 16 | feComposite: /(<\/?)feComposite([> ])/gi, 17 | feConvolveMatrix: /(<\/?)feConvolveMatrix([> ])/gi, 18 | feDiffuseLighting: /(<\/?)feDiffuseLighting([> ])/gi, 19 | feDisplacementMap: /(<\/?)feDisplacementMap([> ])/gi, 20 | feDistantLight: /(<\/?)feDistantLight([> ])/gi, 21 | feDropShadow: /(<\/?)feDropShadow([> ])/gi, 22 | feFlood: /(<\/?)feFlood([> ])/gi, 23 | feFuncA: /(<\/?)feFuncA([> ])/gi, 24 | feFuncB: /(<\/?)feFuncB([> ])/gi, 25 | feFuncG: /(<\/?)feFuncG([> ])/gi, 26 | feFuncR: /(<\/?)feFuncR([> ])/gi, 27 | feGaussianBlur: /(<\/?)feGaussianBlur([> ])/gi, 28 | feImage: /(<\/?)feImage([> ])/gi, 29 | feMergeNode: /(<\/?)feMergeNode([> ])/gi, 30 | feMerge: /(<\/?)feMerge([> ])/gi, 31 | feMorphology: /(<\/?)feMorphology([> ])/gi, 32 | feOffset: /(<\/?)feOffset([> ])/gi, 33 | fePointLight: /(<\/?)fePointLight([> ])/gi, 34 | feSpecularLighting: /(<\/?)feSpecularLighting([> ])/gi, 35 | feSpotLight: /(<\/?)feSpotLight([> ])/gi, 36 | feTile: /(<\/?)feTile([> ])/gi, 37 | feTurbulence: /(<\/?)feTurbulence([> ])/gi, 38 | foreignObject: /(<\/?)foreignObject([> ])/gi, 39 | glyphRef: /(<\/?)glyphRef([> ])/gi, 40 | linearGradient: /(<\/?)linearGradient([> ])/gi, 41 | radialGradient: /(<\/?)radialGradient([> ])/gi, 42 | textPath: /(<\/?)textPath([> ])/gi, 43 | }; 44 | 45 | const svgFilePrefix = ''; 46 | 47 | export class Sanitizer { 48 | constructor(private domPurify: DOMPurify.DOMPurify) {} 49 | 50 | private sanitizeUseTagHook = (node) => { 51 | if (node.nodeName === 'use') { 52 | if ( 53 | (node.hasAttribute('xlink:href') && !node.getAttribute('xlink:href').match(/^#/)) || 54 | (node.hasAttribute('href') && !node.getAttribute('href').match(/^#/)) 55 | ) { 56 | node.remove(); 57 | } 58 | } 59 | }; 60 | 61 | private sanitizeSvg = (req: SanitizeRequest): SanitizeResponse => { 62 | if (req.config.allowAllLinksInSvgUseTags !== true) { 63 | this.domPurify.addHook('afterSanitizeAttributes', this.sanitizeUseTagHook); 64 | } 65 | 66 | const dirty = req.content.toString(); 67 | let sanitized = this.domPurify.sanitize(dirty, req.config.domPurifyConfig ?? {}) as string; 68 | 69 | // ensure tags have the correct capitalization, as dompurify converts them to lowercase 70 | Object.entries(svgTags).forEach(([regex, tag]) => { 71 | sanitized = sanitized.replace(regex, '$1' + tag + '$2'); 72 | }); 73 | 74 | this.domPurify.removeHook('afterSanitizeAttributes'); 75 | return { sanitized: Buffer.from([svgFilePrefix, sanitized].join('\n')) }; 76 | }; 77 | 78 | sanitize = (req: SanitizeRequest): SanitizeResponse => { 79 | const configType = req.configType; 80 | if (!isDOMPurifyConfig(req)) { 81 | throw new Error('unsupported config type: ' + configType); 82 | } 83 | 84 | if (req.config.domPurifyConfig?.USE_PROFILES?.['svg']) { 85 | return this.sanitizeSvg(req); 86 | } 87 | 88 | const dirty = req.content.toString(); 89 | const sanitized = this.domPurify.sanitize(dirty, req.config.domPurifyConfig ?? {}) as string; 90 | return { 91 | sanitized: Buffer.from(sanitized), 92 | }; 93 | }; 94 | } 95 | 96 | export const createSanitizer = () => { 97 | return new Sanitizer(DOMPurify(new JSDOM('').window as any)); 98 | }; 99 | -------------------------------------------------------------------------------- /src/sanitizer/types.ts: -------------------------------------------------------------------------------- 1 | import * as DOMPurify from 'dompurify'; 2 | 3 | export enum ConfigType { 4 | DOMPurify = 'DOMPurify', 5 | } 6 | 7 | export const isDOMPurifyConfig = (req: SanitizeRequest): req is SanitizeRequest => req.configType === ConfigType.DOMPurify; 8 | 9 | const allConfigTypes = Object.values(ConfigType); 10 | 11 | export type ConfigTypeToConfig = { 12 | [ConfigType.DOMPurify]: { 13 | domPurifyConfig?: DOMPurify.Config; 14 | allowAllLinksInSvgUseTags?: boolean; 15 | }; 16 | }; 17 | 18 | export const isSanitizeRequest = (obj: any): obj is SanitizeRequest => { 19 | return Boolean(obj?.content) && allConfigTypes.includes(obj.configType) && typeof obj.config === 'object'; 20 | }; 21 | 22 | export type SanitizeRequest = { 23 | content: Buffer; 24 | configType: configType; 25 | config: ConfigTypeToConfig[configType]; 26 | }; 27 | 28 | export type SanitizeResponse = { 29 | sanitized: Buffer; 30 | }; 31 | -------------------------------------------------------------------------------- /src/service/config.ts: -------------------------------------------------------------------------------- 1 | import { defaultRenderingConfig, populateRenderingConfigFromEnv, Mode, RenderingConfig } from '../config/rendering'; 2 | import { SecurityConfig } from '../config/security'; 3 | 4 | export interface MetricsConfig { 5 | enabled: boolean; 6 | collectDefaultMetrics: boolean; 7 | requestDurationBuckets: number[]; 8 | } 9 | 10 | export interface ConsoleLoggerConfig { 11 | level?: string; 12 | json: boolean; 13 | colorize: boolean; 14 | } 15 | 16 | export interface LoggingConfig { 17 | level: string; 18 | console?: ConsoleLoggerConfig; 19 | } 20 | 21 | export interface RateLimiterConfig { 22 | enabled: boolean; 23 | redisHost?: string; 24 | redisPort?: number; 25 | requestsPerSecond: number; 26 | } 27 | 28 | export interface ServiceConfig { 29 | service: { 30 | host?: string; 31 | port: number; 32 | protocol?: string; 33 | certFile?: string; 34 | certKey?: string; 35 | minTLSVersion?: string; 36 | metrics: MetricsConfig; 37 | logging: LoggingConfig; 38 | security: SecurityConfig; 39 | rateLimiter: RateLimiterConfig; 40 | }; 41 | rendering: RenderingConfig; 42 | } 43 | 44 | export const defaultServiceConfig: ServiceConfig = { 45 | service: { 46 | host: undefined, 47 | port: 8081, 48 | protocol: 'http', 49 | metrics: { 50 | enabled: false, 51 | collectDefaultMetrics: true, 52 | requestDurationBuckets: [0.5, 1, 3, 5, 7, 10, 20, 30, 60], 53 | }, 54 | logging: { 55 | level: 'info', 56 | console: { 57 | json: true, 58 | colorize: false, 59 | }, 60 | }, 61 | security: { 62 | authToken: '-', 63 | }, 64 | rateLimiter: { 65 | enabled: false, 66 | requestsPerSecond: 5, 67 | }, 68 | }, 69 | rendering: defaultRenderingConfig, 70 | }; 71 | 72 | export function populateServiceConfigFromEnv(config: ServiceConfig, env: NodeJS.ProcessEnv) { 73 | if (env['HTTP_HOST']) { 74 | config.service.host = env['HTTP_HOST']; 75 | } 76 | 77 | if (env['HTTP_PORT']) { 78 | config.service.port = parseInt(env['HTTP_PORT'] as string, 10); 79 | } 80 | 81 | if (env['HTTP_PROTOCOL']) { 82 | config.service.protocol = env['HTTP_PROTOCOL']; 83 | } 84 | 85 | if (env['HTTP_CERT_FILE']) { 86 | config.service.certFile = env['HTTP_CERT_FILE']; 87 | } 88 | 89 | if (env['HTTP_CERT_KEY']) { 90 | config.service.certKey = env['HTTP_CERT_KEY']; 91 | } 92 | 93 | if (env['HTTP_MIN_TLS_VERSION']) { 94 | config.service.minTLSVersion = env['HTTP_MIN_TLS_VERSION']; 95 | } 96 | 97 | if (env['AUTH_TOKEN']) { 98 | const authToken = env['AUTH_TOKEN'] as string; 99 | config.service.security.authToken = authToken.includes(' ') ? authToken.split(' ') : authToken; 100 | } 101 | 102 | if (env['LOG_LEVEL']) { 103 | config.service.logging.level = env['LOG_LEVEL'] as string; 104 | } 105 | 106 | if (env['ENABLE_METRICS']) { 107 | config.service.metrics.enabled = env['ENABLE_METRICS'] === 'true'; 108 | } 109 | 110 | populateRenderingConfigFromEnv(config.rendering, env, Mode.Server); 111 | } 112 | -------------------------------------------------------------------------------- /src/service/http-server.integration.test.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import * as jwt from 'jsonwebtoken'; 3 | import * as path from 'path'; 4 | import * as request from 'supertest'; 5 | import * as pixelmatch from 'pixelmatch'; 6 | import * as fastPng from 'fast-png'; 7 | import * as promClient from 'prom-client'; 8 | 9 | import { HttpServer } from './http-server'; 10 | import { ConsoleLogger } from '../logger'; 11 | import { ServiceConfig } from './config'; 12 | import { createSanitizer } from '../sanitizer/Sanitizer'; 13 | 14 | const testDashboardUid = 'd10881ec-0d35-4909-8de7-6ab563a9ab29'; 15 | const allPanelsDashboardUid = 'edlopzu6hn4lcd'; 16 | const panelIds = { 17 | graph: 1, 18 | table: 2, 19 | error: 3, 20 | slow: 4, 21 | }; 22 | const renderKey = jwt.sign( 23 | { 24 | renderUser: { 25 | org_id: 1, 26 | user_id: 1, 27 | org_role: 'Admin', 28 | }, 29 | }, 30 | '-', 31 | { algorithm: 'HS512' } 32 | ); 33 | 34 | const goldenFilesFolder = './tests/testdata'; 35 | const defaultServiceConfig: ServiceConfig = { 36 | service: { 37 | host: undefined, 38 | port: 8081, 39 | metrics: { 40 | enabled: false, 41 | collectDefaultMetrics: true, 42 | requestDurationBuckets: [0.5, 1, 3, 5, 7, 10, 20, 30, 60], 43 | }, 44 | logging: { 45 | level: 'debug', 46 | console: { 47 | json: true, 48 | colorize: false, 49 | }, 50 | }, 51 | security: { 52 | authToken: '-', 53 | }, 54 | rateLimiter: { 55 | enabled: false, 56 | requestsPerSecond: 5, 57 | }, 58 | }, 59 | rendering: { 60 | args: ['--no-sandbox', '--disable-gpu'], 61 | ignoresHttpsErrors: false, 62 | timezone: 'Europe/Paris', 63 | width: 500, 64 | height: 300, 65 | deviceScaleFactor: 1, 66 | maxWidth: 1000, 67 | maxHeight: 500, 68 | maxDeviceScaleFactor: 2, 69 | pageZoomLevel: 1, 70 | mode: 'default', 71 | clustering: { 72 | monitor: false, 73 | mode: 'browser', 74 | maxConcurrency: 5, 75 | timeout: 30, 76 | }, 77 | timingMetrics: false, 78 | tracing: { 79 | url: '', 80 | serviceName: '', 81 | }, 82 | emulateNetworkConditions: false, 83 | // Set to true to get more logs 84 | verboseLogging: false, // true, 85 | dumpio: false, // true, 86 | // Uncoment to see what's happening in the browser during the tests 87 | // headed: true, 88 | }, 89 | }; 90 | 91 | const imageWidth = 500; 92 | const imageHeight = 300; 93 | const imageDiffThreshold = 0.01 * imageHeight * imageWidth; 94 | const matchingThreshold = 0.3; 95 | 96 | const sanitizer = createSanitizer(); 97 | let server; 98 | 99 | let domain = 'localhost'; 100 | function getGrafanaEndpoint(domain: string) { 101 | return `http://${domain}:3000`; 102 | } 103 | 104 | let envSettings = { 105 | saveDiff: false, 106 | updateGolden: false, 107 | }; 108 | 109 | beforeEach(async () => { 110 | return setupTestEnv(); 111 | }); 112 | 113 | afterEach(() => { 114 | return cleanUpTestEnv(); 115 | }); 116 | 117 | function setupTestEnv(config?: ServiceConfig) { 118 | if (process.env['CI'] === 'true') { 119 | domain = 'grafana'; 120 | } 121 | 122 | envSettings.saveDiff = process.env['SAVE_DIFF'] === 'true'; 123 | envSettings.updateGolden = process.env['UPDATE_GOLDEN'] === 'true'; 124 | 125 | const currentConfig = config ?? defaultServiceConfig; 126 | server = new HttpServer(currentConfig, new ConsoleLogger(currentConfig.service.logging), sanitizer); 127 | return server.start(); 128 | } 129 | 130 | function cleanUpTestEnv() { 131 | promClient.register.clear(); 132 | return server.close(); 133 | } 134 | 135 | describe('Test /render/version', () => { 136 | it('should respond with unauthorized', () => { 137 | return request(server.app).get('/render/version').expect(401); 138 | }); 139 | 140 | it('should respond with the current plugin version', () => { 141 | const pluginInfo = require('../../plugin.json'); 142 | return request(server.app).get('/render/version').set('X-Auth-Token', '-').expect(200, { version: pluginInfo.info.version }); 143 | }); 144 | }); 145 | 146 | describe('Test /render', () => { 147 | it('should respond with unauthorized', () => { 148 | return request(server.app).get('/render').expect(401); 149 | }); 150 | 151 | it('should respond with the graph panel screenshot', async () => { 152 | const url = `${getGrafanaEndpoint(domain)}/d-solo/${testDashboardUid}?panelId=${panelIds.graph}&render=1&from=1699333200000&to=1699344000000`; 153 | const response = await request(server.app) 154 | .get( 155 | `/render?url=${encodeURIComponent( 156 | url 157 | )}&timeout=5&renderKey=${renderKey}&domain=${domain}&width=${imageWidth}&height=${imageHeight}&deviceScaleFactor=1` 158 | ) 159 | .set('X-Auth-Token', '-'); 160 | 161 | expect(response.statusCode).toEqual(200); 162 | expect(response.headers['content-type']).toEqual('image/png'); 163 | 164 | const pixelDiff = compareImage('graph', response.body); 165 | expect(pixelDiff).toBeLessThan(imageDiffThreshold); 166 | }); 167 | 168 | it('should respond with the table panel screenshot', async () => { 169 | const url = `${getGrafanaEndpoint(domain)}/d-solo/${testDashboardUid}?panelId=${panelIds.table}&render=1&from=1699333200000&to=1699344000000`; 170 | const response = await request(server.app) 171 | .get( 172 | `/render?url=${encodeURIComponent( 173 | url 174 | )}&timeout=5&renderKey=${renderKey}&domain=${domain}&width=${imageWidth}&height=${imageHeight}&deviceScaleFactor=1` 175 | ) 176 | .set('X-Auth-Token', '-'); 177 | 178 | expect(response.statusCode).toEqual(200); 179 | expect(response.headers['content-type']).toEqual('image/png'); 180 | 181 | const pixelDiff = compareImage('table', response.body); 182 | expect(pixelDiff).toBeLessThan(imageDiffThreshold); 183 | }); 184 | 185 | it('should respond with a panel error screenshot', async () => { 186 | const url = `${getGrafanaEndpoint(domain)}/d-solo/${testDashboardUid}?panelId=${panelIds.error}&render=1&from=1699333200000&to=1699344000000`; 187 | const response = await request(server.app) 188 | .get( 189 | `/render?url=${encodeURIComponent( 190 | url 191 | )}&timeout=5&renderKey=${renderKey}&domain=${domain}&width=${imageWidth}&height=${imageHeight}&deviceScaleFactor=1` 192 | ) 193 | .set('X-Auth-Token', '-'); 194 | 195 | expect(response.statusCode).toEqual(200); 196 | expect(response.headers['content-type']).toEqual('image/png'); 197 | 198 | const pixelDiff = compareImage('error', response.body); 199 | expect(pixelDiff).toBeLessThan(imageDiffThreshold); 200 | }); 201 | 202 | it('should take a full dashboard screenshot', async () => { 203 | const url = `${getGrafanaEndpoint(domain)}/d/${allPanelsDashboardUid}?render=1&from=1699333200000&to=1699344000000&kiosk=true`; 204 | const response = await request(server.app) 205 | .get( 206 | `/render?url=${encodeURIComponent(url)}&timeout=5&renderKey=${renderKey}&domain=${domain}&width=${imageWidth}&height=-1&deviceScaleFactor=1` 207 | ) 208 | .set('X-Auth-Token', '-'); 209 | 210 | expect(response.statusCode).toEqual(200); 211 | expect(response.headers['content-type']).toEqual('image/png'); 212 | 213 | const pixelDiff = compareImage('full-page-screenshot', response.body); 214 | expect(pixelDiff).toBeLessThan(imageDiffThreshold); 215 | }); 216 | 217 | it('should respond with too many requests', async () => { 218 | await cleanUpTestEnv(); 219 | const config = JSON.parse(JSON.stringify(defaultServiceConfig)); 220 | config.service.rateLimiter.enabled = true; 221 | config.service.rateLimiter.requestsPerSecond = 0; 222 | await setupTestEnv(config); 223 | 224 | const response = await request(server.app).get('/render').set('X-Auth-Token', '-'); 225 | expect(response.statusCode).toEqual(429); 226 | }); 227 | }); 228 | 229 | // compareImage returns the number of different pixels between the image stored in the test file and the one from the response body. 230 | // It updates the stored file and returns 0 if tests are run with UPDATE_GOLDEN=true. 231 | // It writes the diff file to /testdata if tests are run with SAVE_DIFF=true. 232 | function compareImage(testName: string, responseBody: any): number { 233 | const goldenFilePath = path.join(goldenFilesFolder, `${testName}.png`); 234 | if (envSettings.updateGolden) { 235 | fs.writeFileSync(goldenFilePath, responseBody); 236 | return 0; 237 | } 238 | 239 | let diff: { width: number; height: number; data: Uint8ClampedArray } | null = null; 240 | if (envSettings.saveDiff) { 241 | diff = { 242 | width: imageWidth, 243 | height: imageHeight, 244 | data: new Uint8ClampedArray(imageWidth * imageHeight * 4), 245 | }; 246 | } 247 | 248 | const responseImage = fastPng.decode(responseBody); 249 | const expectedImage = fastPng.decode(fs.readFileSync(goldenFilePath)); 250 | 251 | const pixelDiff = pixelmatch( 252 | responseImage.data as Uint8ClampedArray, 253 | expectedImage.data as Uint8ClampedArray, 254 | diff ? diff.data : null, 255 | imageWidth, 256 | imageHeight, 257 | { 258 | threshold: matchingThreshold, 259 | } 260 | ); 261 | 262 | if (diff && pixelDiff >= imageDiffThreshold) { 263 | fs.writeFileSync(path.join(goldenFilesFolder, `diff_${testName}.png`), fastPng.encode(diff as fastPng.ImageData)); 264 | } 265 | 266 | return pixelDiff; 267 | } 268 | -------------------------------------------------------------------------------- /src/service/http-server.ts: -------------------------------------------------------------------------------- 1 | import * as bodyParser from 'body-parser'; 2 | import * as boom from '@hapi/boom'; 3 | import * as contentDisposition from 'content-disposition'; 4 | import * as express from 'express'; 5 | import * as fs from 'fs'; 6 | import * as http from 'http'; 7 | import * as https from 'https'; 8 | import * as morgan from 'morgan'; 9 | import * as multer from 'multer'; 10 | import * as net from 'net'; 11 | import * as path from 'path'; 12 | import * as promClient from 'prom-client'; 13 | 14 | import { Logger } from '../logger'; 15 | import { Browser, createBrowser } from '../browser'; 16 | import { ServiceConfig } from './config'; 17 | import { setupHttpServerMetrics } from './metrics'; 18 | import { setupRateLimiter } from './ratelimiter'; 19 | import { HTTPHeaders, ImageRenderOptions, RenderOptions } from '../types'; 20 | import { Sanitizer } from '../sanitizer/Sanitizer'; 21 | import { isSanitizeRequest } from '../sanitizer/types'; 22 | import { asyncMiddleware, trustedUrlMiddleware, authTokenMiddleware, rateLimiterMiddleware } from './middlewares'; 23 | import { SecureVersion } from 'tls'; 24 | 25 | const upload = multer({ storage: multer.memoryStorage() }); 26 | 27 | enum SanitizeRequestPartName { 28 | 'file' = 'file', 29 | 'config' = 'config', 30 | } 31 | 32 | export class HttpServer { 33 | app: express.Express; 34 | browser: Browser; 35 | server: http.Server; 36 | 37 | constructor(private config: ServiceConfig, private log: Logger, private sanitizer: Sanitizer) {} 38 | 39 | async start() { 40 | this.app = express(); 41 | 42 | this.app.use( 43 | morgan('combined', { 44 | skip: (req, res) => { 45 | return res.statusCode >= 400; 46 | }, 47 | stream: this.log.debugWriter, 48 | }) 49 | ); 50 | this.app.use( 51 | morgan('combined', { 52 | skip: (req, res) => { 53 | return res.statusCode < 400; 54 | }, 55 | stream: this.log.errorWriter, 56 | }) 57 | ); 58 | 59 | this.app.use(bodyParser.json()); 60 | 61 | if (this.config.service.metrics.enabled) { 62 | setupHttpServerMetrics(this.app, this.config.service.metrics, this.log); 63 | } 64 | 65 | this.app.get('/', (req: express.Request, res: express.Response) => { 66 | res.send('Grafana Image Renderer'); 67 | }); 68 | 69 | // Middlewares for /render endpoints 70 | this.app.use('/render', authTokenMiddleware(this.config.service.security), trustedUrlMiddleware); 71 | const rateLimiterConfig = this.config.service.rateLimiter; 72 | if (rateLimiterConfig.enabled) { 73 | let rateLimiter = setupRateLimiter(rateLimiterConfig, this.log); 74 | this.app.use('/render', rateLimiterMiddleware(rateLimiter)); 75 | } 76 | 77 | // Set up /render endpoints 78 | this.app.get('/render', asyncMiddleware(this.render)); 79 | this.app.get('/render/csv', asyncMiddleware(this.renderCSV)); 80 | this.app.get('/render/version', (req: express.Request, res: express.Response) => { 81 | const pluginInfo = require('../../plugin.json'); 82 | res.send({ version: pluginInfo.info.version }); 83 | }); 84 | 85 | // Middlewares for /sanitize endpoints 86 | this.app.use('/sanitize', authTokenMiddleware(this.config.service.security)); 87 | 88 | // Set up /sanitize endpoints 89 | this.app.post( 90 | '/sanitize', 91 | upload.fields([ 92 | { name: SanitizeRequestPartName.file, maxCount: 1 }, 93 | { name: SanitizeRequestPartName.config, maxCount: 1 }, 94 | ]), 95 | asyncMiddleware(this.sanitize) 96 | ); 97 | 98 | this.app.use((err, req, res, next) => { 99 | if (err.stack) { 100 | this.log.error('Request failed', 'url', req.url, 'stack', err.stack); 101 | } else { 102 | this.log.error('Request failed', 'url', req.url, 'error', err); 103 | } 104 | 105 | if (err.output) { 106 | return res.status(err.output.statusCode).json(err.output.payload); 107 | } 108 | 109 | return res.status(500).json(err); 110 | }); 111 | 112 | this.createServer(); 113 | 114 | const metrics = { 115 | durationHistogram: new promClient.Histogram({ 116 | name: 'grafana_image_renderer_step_duration_seconds', 117 | help: 'duration histogram of browser steps for rendering an image labeled with: step', 118 | labelNames: ['step'], 119 | buckets: [0.1, 0.3, 0.5, 1, 3, 5, 10, 20, 30], 120 | }), 121 | }; 122 | this.browser = createBrowser(this.config.rendering, this.log, metrics); 123 | 124 | if (this.config.service.metrics.enabled) { 125 | const browserInfo = new promClient.Gauge({ 126 | name: 'grafana_image_renderer_browser_info', 127 | help: "A metric with a constant '1 value labeled by version of the browser in use", 128 | labelNames: ['version'], 129 | }); 130 | 131 | try { 132 | const browserVersion = await this.browser.getBrowserVersion(); 133 | browserInfo.labels(browserVersion).set(1); 134 | } catch { 135 | this.log.error('Failed to get browser version'); 136 | browserInfo.labels('unknown').set(0); 137 | } 138 | } 139 | 140 | await this.browser.start(); 141 | } 142 | 143 | createServer() { 144 | const { protocol, host, port } = this.config.service; 145 | if (protocol === 'https') { 146 | const { certFile, certKey, minTLSVersion } = this.config.service; 147 | if (!certFile || !certKey) { 148 | throw new Error('No cert file or cert key provided, cannot start HTTPS server'); 149 | } 150 | 151 | if (minTLSVersion && minTLSVersion !== 'TLSv1.2' && minTLSVersion !== 'TLSv1.3') { 152 | throw new Error('Only allowed TLS min versions are TLSv1.2 and TLSv1.3'); 153 | } 154 | 155 | const options = { 156 | cert: fs.readFileSync(certFile), 157 | key: fs.readFileSync(certKey), 158 | 159 | maxVersion: 'TLSv1.3' as SecureVersion, 160 | minVersion: (minTLSVersion || 'TLSv1.2') as SecureVersion, 161 | }; 162 | 163 | this.server = https.createServer(options, this.app); 164 | } else { 165 | this.server = http.createServer(this.app); 166 | } 167 | 168 | if (host) { 169 | this.server.listen(port, host, () => { 170 | const info = this.server.address() as net.AddressInfo; 171 | this.log.info(`${protocol?.toUpperCase()} Server started, listening at ${protocol}://${host}:${info.port}`); 172 | }); 173 | } else { 174 | this.server.listen(port, () => { 175 | const info = this.server.address() as net.AddressInfo; 176 | this.log.info(`${protocol?.toUpperCase()} Server started, listening at ${protocol}://localhost:${info.port}`); 177 | }); 178 | } 179 | } 180 | 181 | close() { 182 | this.server.close(); 183 | } 184 | 185 | render = async (req: express.Request, res: express.Response, next: express.NextFunction) => { 186 | const abortController = new AbortController(); 187 | const { signal } = abortController; 188 | 189 | if (!req.query.url) { 190 | throw boom.badRequest('Missing url parameter'); 191 | } 192 | 193 | const options: ImageRenderOptions = { 194 | url: req.query.url, 195 | width: req.query.width, 196 | height: req.query.height, 197 | filePath: req.query.filePath, 198 | timeout: req.query.timeout, 199 | renderKey: req.query.renderKey, 200 | domain: req.query.domain, 201 | timezone: req.query.timezone, 202 | encoding: req.query.encoding, 203 | deviceScaleFactor: req.query.deviceScaleFactor, 204 | headers: this.getHeaders(req), 205 | }; 206 | 207 | this.log.debug('Render request received', 'url', options.url); 208 | req.on('close', (err) => { 209 | this.log.debug('Connection closed', 'url', options.url, 'error', err); 210 | abortController.abort(); 211 | }); 212 | 213 | try { 214 | const result = await this.browser.render(options, signal); 215 | 216 | res.sendFile(result.filePath, (err) => { 217 | if (err) { 218 | next(err); 219 | } else { 220 | try { 221 | this.log.debug('Deleting temporary file', 'file', result.filePath); 222 | fs.unlinkSync(result.filePath); 223 | } catch (e) { 224 | this.log.error('Failed to delete temporary file', 'file', result.filePath); 225 | } 226 | } 227 | }); 228 | } catch (e) { 229 | this.log.error('Render failed', 'url', options.url, 'error', e.stack); 230 | return res.status(500).json({ error: e.message }); 231 | } 232 | }; 233 | 234 | sanitize = async (req: express.Request, res: express.Response<{ error: string }>) => { 235 | const file = req.files?.[SanitizeRequestPartName.file]?.[0] as Express.Multer.File | undefined; 236 | if (!file) { 237 | throw boom.badRequest('missing file'); 238 | } 239 | 240 | const configFile = req.files?.[SanitizeRequestPartName.config]?.[0] as Express.Multer.File | undefined; 241 | if (!configFile) { 242 | throw boom.badRequest('missing config'); 243 | } 244 | 245 | const config = JSON.parse(configFile.buffer.toString()); 246 | 247 | const sanitizeReq = { 248 | ...config, 249 | content: file.buffer, 250 | }; 251 | 252 | if (!isSanitizeRequest(sanitizeReq)) { 253 | throw boom.badRequest('invalid request: ' + JSON.stringify(config)); 254 | } 255 | 256 | this.log.debug('Sanitize request received', 'contentLength', file.size, 'name', file.filename, 'config', JSON.stringify(config)); 257 | 258 | try { 259 | const sanitizeResponse = this.sanitizer.sanitize(sanitizeReq); 260 | res.writeHead(200, { 261 | 'Content-Disposition': `attachment;filename=${file.filename ?? 'sanitized'}`, 262 | 'Content-Length': sanitizeResponse.sanitized.length, 263 | 'Content-Type': file.mimetype ?? 'application/octet-stream', 264 | }); 265 | return res.end(sanitizeResponse.sanitized); 266 | } catch (e) { 267 | this.log.error('Sanitization failed', 'filesize', file.size, 'name', file.filename, 'error', e.stack); 268 | return res.status(500).json({ error: e.message }); 269 | } 270 | }; 271 | 272 | renderCSV = async (req: express.Request, res: express.Response, next: express.NextFunction) => { 273 | const abortController = new AbortController(); 274 | const { signal } = abortController; 275 | 276 | if (!req.query.url) { 277 | throw boom.badRequest('Missing url parameter'); 278 | } 279 | 280 | const options: RenderOptions = { 281 | url: req.query.url, 282 | filePath: req.query.filePath, 283 | timeout: req.query.timeout, 284 | renderKey: req.query.renderKey, 285 | domain: req.query.domain, 286 | timezone: req.query.timezone, 287 | encoding: req.query.encoding, 288 | headers: this.getHeaders(req), 289 | }; 290 | 291 | this.log.debug('Render request received', 'url', options.url); 292 | req.on('close', (err) => { 293 | this.log.debug('Connection closed', 'url', options.url, 'error', err); 294 | abortController.abort(); 295 | }); 296 | 297 | try { 298 | const result = await this.browser.renderCSV(options, signal); 299 | 300 | if (result.fileName) { 301 | res.setHeader('Content-Disposition', contentDisposition(result.fileName)); 302 | } 303 | res.sendFile(result.filePath, (err) => { 304 | if (err) { 305 | next(err); 306 | } else { 307 | try { 308 | this.log.debug('Deleting temporary file', 'file', result.filePath); 309 | fs.unlink(result.filePath, (err) => { 310 | if (err) { 311 | throw err; 312 | } 313 | 314 | if (!options.filePath) { 315 | fs.rmdir(path.dirname(result.filePath), () => {}); 316 | } 317 | }); 318 | } catch (e) { 319 | this.log.error('Failed to delete temporary file', 'file', result.filePath, 'error', e.message); 320 | } 321 | } 322 | }); 323 | } catch (e) { 324 | this.log.error('Render CSV failed', 'url', options.url, 'error', e.stack); 325 | return res.status(500).json({ error: e.message }); 326 | } 327 | }; 328 | 329 | getHeaders(req: express.Request): HTTPHeaders { 330 | const headers: HTTPHeaders = {}; 331 | 332 | if (req.headers['Accept-Language']) { 333 | headers['Accept-Language'] = (req.headers['Accept-Language'] as string[]).join(';'); 334 | } 335 | 336 | // Propagate traces (only if tracing is enabled) 337 | if (this.config.rendering.tracing.url && req.headers['traceparent']) { 338 | headers['traceparent'] = req.headers['traceparent'] as string; 339 | headers['tracestate'] = (req.headers['tracestate'] as string) ?? ''; 340 | } 341 | 342 | return headers; 343 | } 344 | } 345 | -------------------------------------------------------------------------------- /src/service/metrics.ts: -------------------------------------------------------------------------------- 1 | import * as promBundle from 'express-prom-bundle'; 2 | import * as promClient from 'prom-client'; 3 | import * as onFinished from 'on-finished'; 4 | import express = require('express'); 5 | 6 | import { MetricsConfig } from './config'; 7 | import { Logger } from '../logger'; 8 | 9 | export const setupHttpServerMetrics = (app: express.Express, config: MetricsConfig, log: Logger) => { 10 | log.info( 11 | 'Metrics enabled', 12 | 'collectDefaultMetrics', 13 | config.collectDefaultMetrics, 14 | 'requestDurationBuckets', 15 | config.requestDurationBuckets.join(',') 16 | ); 17 | 18 | // Exclude all non-rendering endpoints: 19 | // - endpoints that do not include render 20 | // - /render/version 21 | const excludeRegExp = /^(((?!(render)).)*|.*version.*)$/; 22 | 23 | const opts = { 24 | httpDurationMetricName: 'grafana_image_renderer_service_http_request_duration_seconds', 25 | metricType: 'histogram', 26 | buckets: config.requestDurationBuckets, 27 | excludeRoutes: [excludeRegExp], 28 | promClient: {}, 29 | formatStatusCode: (res) => { 30 | if (res && res.req && res.req.aborted) { 31 | // Nginx non-standard code 499 Client Closed Request 32 | // Used when the client has closed the request before 33 | // the server could send a response. 34 | return 499; 35 | } 36 | 37 | return res.status_code || res.statusCode; 38 | }, 39 | } as any; 40 | 41 | if (config.collectDefaultMetrics) { 42 | opts.promClient.collectDefaultMetrics = {}; 43 | } 44 | 45 | const metricsMiddleware = promBundle(opts); 46 | app.use(metricsMiddleware); 47 | 48 | const httpRequestsInFlight = new promClient.Gauge({ 49 | name: 'grafana_image_renderer_http_request_in_flight', 50 | help: 'A gauge of requests currently being served by the image renderer.', 51 | }); 52 | app.use(requestsInFlightMiddleware(httpRequestsInFlight, excludeRegExp)); 53 | }; 54 | 55 | const requestsInFlightMiddleware = (httpRequestsInFlight: promClient.Gauge, excludeRegExp: RegExp) => { 56 | return (req, res, next) => { 57 | const path = req.originalUrl || req.url; 58 | if (path.match(excludeRegExp)) { 59 | return next(); 60 | } 61 | 62 | httpRequestsInFlight.inc(); 63 | onFinished(res, () => { 64 | httpRequestsInFlight.dec(); 65 | }); 66 | 67 | next(); 68 | }; 69 | }; 70 | -------------------------------------------------------------------------------- /src/service/middlewares.ts: -------------------------------------------------------------------------------- 1 | import express = require('express'); 2 | import * as boom from '@hapi/boom'; 3 | import { ImageRenderOptions } from '../types'; 4 | import { SecurityConfig, isAuthTokenValid } from '../config/security'; 5 | import { RateLimiterAbstract } from 'rate-limiter-flexible'; 6 | 7 | export const asyncMiddleware = (fn) => (req, res, next) => { 8 | Promise.resolve(fn(req, res, next)).catch((err) => { 9 | if (!err.isBoom) { 10 | return next(boom.badImplementation(err)); 11 | } 12 | next(err); 13 | }); 14 | }; 15 | 16 | export const trustedUrlMiddleware = ( 17 | req: express.Request, 18 | res: express.Response, 19 | next: express.NextFunction 20 | ) => { 21 | const queryUrl = req.query.url; 22 | 23 | if (queryUrl && !(queryUrl.startsWith('http://') || queryUrl.startsWith('https://'))) { 24 | return next(boom.forbidden('Forbidden query url protocol')); 25 | } 26 | 27 | next(); 28 | }; 29 | 30 | export const authTokenMiddleware = (config: SecurityConfig) => { 31 | return (req: express.Request, res: express.Response, next: express.NextFunction) => { 32 | const headerToken = req.header('X-Auth-Token'); 33 | if (headerToken === undefined || !isAuthTokenValid(config, headerToken)) { 34 | return next(boom.unauthorized('Unauthorized request')); 35 | } 36 | 37 | next(); 38 | }; 39 | }; 40 | 41 | export const rateLimiterMiddleware = (rateLimiter: RateLimiterAbstract) => { 42 | return async (req: express.Request, res: express.Response, next: express.NextFunction) => { 43 | const rateLimiterKey = req.header('X-Tenant-ID') || req.ip; 44 | if (rateLimiterKey === undefined) { 45 | return next(boom.badRequest('Missing X-Tenant-ID header to use rate limiter')); 46 | } 47 | 48 | try { 49 | await rateLimiter.consume(rateLimiterKey); 50 | next(); 51 | } catch (err) { 52 | res.set('Retry-After', String(Math.ceil(err.msBeforeNext / 1000))); 53 | res.status(429).send('Too Many Requests'); 54 | } 55 | }; 56 | }; 57 | -------------------------------------------------------------------------------- /src/service/ratelimiter.ts: -------------------------------------------------------------------------------- 1 | import { RateLimiterRedis, RateLimiterMemory } from 'rate-limiter-flexible'; 2 | import { Redis } from 'ioredis'; 3 | 4 | import { RateLimiterConfig } from './config'; 5 | import { Logger } from '../logger'; 6 | 7 | export const setupRateLimiter = (config: RateLimiterConfig, log: Logger) => { 8 | let rateLimiter; 9 | 10 | if (config.redisHost && config.redisPort) { 11 | const redisClient = new Redis({ 12 | host: config.redisHost, 13 | port: config.redisPort, 14 | }); 15 | 16 | rateLimiter = new RateLimiterRedis({ 17 | storeClient: redisClient, 18 | keyPrefix: 'rate-limit', 19 | points: config.requestsPerSecond, // Maximum number of requests 20 | duration: 1, // per second 21 | }); 22 | 23 | log.info('Rate limiter enabled using Redis', 'requestsPerSecond', config.requestsPerSecond); 24 | } else { 25 | // Fallback to in-memory storage 26 | rateLimiter = new RateLimiterMemory({ 27 | points: config.requestsPerSecond, 28 | duration: 1, 29 | }); 30 | 31 | log.info('Rate limiter enabled using in-memory storage', 'requestsPerSecond', config.requestsPerSecond); 32 | } 33 | 34 | return rateLimiter; 35 | }; 36 | -------------------------------------------------------------------------------- /src/tracing.ts: -------------------------------------------------------------------------------- 1 | import { NodeSDK } from '@opentelemetry/sdk-node'; 2 | import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node'; 3 | import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'; 4 | import { ATTR_SERVICE_NAME } from '@opentelemetry/semantic-conventions'; 5 | import { Resource } from '@opentelemetry/resources'; 6 | import { diag, DiagConsoleLogger, DiagLogLevel } from '@opentelemetry/api'; 7 | 8 | import { Logger } from './logger'; 9 | import { getConfig } from './config/config'; 10 | 11 | const config = getConfig(); 12 | let sdk; 13 | if (config.rendering.tracing.url) { 14 | sdk = initTracing(config.rendering.tracing.url, config.rendering.verboseLogging); 15 | } 16 | 17 | function initTracing(exporterURL: string, verboseLogging: boolean = false) { 18 | if (verboseLogging) { 19 | diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG); 20 | } 21 | 22 | const traceExporter = new OTLPTraceExporter({ 23 | url: exporterURL, 24 | }); 25 | 26 | return new NodeSDK({ 27 | resource: new Resource({ 28 | [ATTR_SERVICE_NAME]: config.rendering.tracing.serviceName || 'grafana-image-renderer', 29 | }), 30 | traceExporter, 31 | instrumentations: [ 32 | getNodeAutoInstrumentations({ 33 | // only instrument fs if it is part of another trace 34 | '@opentelemetry/instrumentation-fs': { 35 | requireParentSpan: true, 36 | }, 37 | }), 38 | ], 39 | }); 40 | } 41 | 42 | export function startTracing(log: Logger) { 43 | sdk.start(); 44 | log.info('Starting tracing'); 45 | 46 | process.on('SIGTERM', () => { 47 | sdk 48 | .shutdown() 49 | .then(() => log.debug('Tracing terminated')) 50 | .catch((error) => log.error('Error terminating tracing', 'err', error)) 51 | .finally(() => process.exit(0)); 52 | }); 53 | } 54 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | export interface HTTPHeaders { 2 | 'Accept-Language'?: string; 3 | [header: string]: string | undefined; 4 | } 5 | 6 | // Common options for CSV and Images 7 | export interface RenderOptions { 8 | url: string; 9 | filePath: string; 10 | timeout: number; // seconds 11 | renderKey: string; 12 | domain: string; 13 | timezone?: string; 14 | encoding?: string; 15 | headers?: HTTPHeaders; 16 | } 17 | 18 | export interface ImageRenderOptions extends RenderOptions { 19 | width: string | number; 20 | height: string | number; 21 | deviceScaleFactor?: string | number; 22 | scrollDelay?: number; 23 | 24 | // Runtime options derived from the input 25 | fullPageImage?: boolean; 26 | scaleImage?: number; 27 | } 28 | -------------------------------------------------------------------------------- /tests/reporter.js: -------------------------------------------------------------------------------- 1 | const { DefaultReporter } = require('@jest/reporters') 2 | 3 | class Reporter extends DefaultReporter 4 | { 5 | constructor() 6 | { 7 | super(...arguments) 8 | } 9 | 10 | printTestFileHeader(_testPath, config, result) 11 | { 12 | const console = result.console 13 | 14 | if(result.numFailingTests === 0 && !result.testExecError) 15 | { 16 | result.console = null 17 | } 18 | 19 | super.printTestFileHeader(...arguments) 20 | 21 | result.console = console 22 | } 23 | } 24 | 25 | module.exports = Reporter -------------------------------------------------------------------------------- /tests/testdata/error.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grafana/grafana-image-renderer/dda7d51c4fd3a8ff0544cc12c6b05debe47da41e/tests/testdata/error.png -------------------------------------------------------------------------------- /tests/testdata/full-page-screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grafana/grafana-image-renderer/dda7d51c4fd3a8ff0544cc12c6b05debe47da41e/tests/testdata/full-page-screenshot.png -------------------------------------------------------------------------------- /tests/testdata/graph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grafana/grafana-image-renderer/dda7d51c4fd3a8ff0544cc12c6b05debe47da41e/tests/testdata/graph.png -------------------------------------------------------------------------------- /tests/testdata/table.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grafana/grafana-image-renderer/dda7d51c4fd3a8ff0544cc12c6b05debe47da41e/tests/testdata/table.png -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "moduleResolution": "node", 4 | "target": "es2021", 5 | "module": "commonjs", 6 | "outDir": "build", 7 | "sourceMap": true, 8 | "strictNullChecks": true, 9 | "allowSyntheticDefaultImports": true, 10 | "typeRoots": [ 11 | "node_modules/@types" 12 | ], 13 | "rootDir": "src" 14 | }, 15 | "include": [ 16 | "./src/*.ts", 17 | "src/config/config.ts" 18 | ], 19 | "exclude": [ 20 | "node_modules" 21 | ] 22 | } --------------------------------------------------------------------------------