├── .babelrc
├── .env.template
├── .eslintrc.yml
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ └── integrate.yml
├── .gitignore
├── .husky
├── .gitignore
└── pre-commit
├── CODE_OF_CONDUCT.md
├── LICENSE
├── README.md
├── __test__
├── MetricsCard.test.tsx
└── server.test.ts
├── assets
└── franzView_logo.png
├── configs
├── jmx_exporter
│ ├── jmx_prometheus_javaagent-0.16.1.jar
│ ├── kafka.yml
│ └── zookeeper.yml
└── prometheus
│ ├── prometheus.example.yml
│ ├── prometheus.template.yml
│ └── prometheus.yml
├── docker-compose-kafka-only.yml
├── docker-compose-kafka-prom.yml
├── docker-compose-prom-only.yml
├── jest.config.js
├── package-lock.json
├── package.json
├── public
└── index.html
├── src
├── client
│ ├── App.tsx
│ ├── Layout
│ │ └── Layout.tsx
│ ├── components
│ │ ├── AddTopic.tsx
│ │ ├── CPUUsage.tsx
│ │ ├── ConfirmationDialog.tsx
│ │ ├── ConsumerCard.tsx
│ │ ├── EditableField.tsx
│ │ ├── Header.tsx
│ │ ├── MetricsCard.tsx
│ │ ├── PopoverMoreInfo.tsx
│ │ ├── RealTimeLineChart.tsx
│ │ ├── ReassignPartitions.tsx
│ │ ├── Searchbar.tsx
│ │ ├── Sidebar.tsx
│ │ ├── Title.tsx
│ │ ├── TopicGrid.tsx
│ │ └── listItems.tsx
│ ├── index.html
│ ├── index.tsx
│ ├── models
│ │ ├── queries.tsx
│ │ └── typeKeyMap.tsx
│ ├── pages
│ │ ├── Brokers.tsx
│ │ ├── Dashboard.tsx
│ │ ├── Topics.tsx
│ │ └── TopicsList.tsx
│ └── utils
│ │ └── validate.ts
└── server
│ ├── graphql
│ ├── datasources
│ │ ├── brokerAdmin.ts
│ │ ├── models
│ │ │ └── promQueries.ts
│ │ └── prometheusAPI.ts
│ ├── resolvers.ts
│ └── typeDefs.ts
│ ├── kafka
│ └── kafka.ts
│ └── server.ts
├── tsconfig.json
├── types
└── types.d.ts
└── webpack.config.js
/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": [
3 | [
4 | "@babel/preset-env",
5 | {
6 | "modules": false
7 | }
8 | ],
9 | "@babel/preset-react"
10 | ]
11 | }
12 |
--------------------------------------------------------------------------------
/.env.template:
--------------------------------------------------------------------------------
1 | # Update KAKFA_BROKER with the url and port for your kafka broker or brokers
2 | # ex. localhost:9092 or localhost:9092,localhost:9093...
3 | KAKFA_BROKER=exampleHost:9092
4 | # Update PROMETHEUS_URL with the url and port for Prometheus server
5 | PROMETHEUS_URL=http://exampleProm:9090
--------------------------------------------------------------------------------
/.eslintrc.yml:
--------------------------------------------------------------------------------
1 | env:
2 | browser: true
3 | es2021: true
4 | node: true
5 | jest: true
6 | extends:
7 | - eslint:recommended
8 | - plugin:react/recommended
9 | - plugin:@typescript-eslint/recommended
10 | parser: "@typescript-eslint/parser"
11 | parserOptions:
12 | ecmaFeatures:
13 | jsx: true
14 | ecmaVersion: latest
15 | sourceType: commonJS
16 | plugins:
17 | - react
18 | - "@typescript-eslint"
19 | - "testing-library"
20 | - "jest-dom"
21 | # overrides:
22 | # - files:
23 | # - "**/__tests__/**/*.[jt]s?(x)"
24 | # - extends:
25 | # - "plugin:testing-library/react"
26 | rules: {
27 | "@typescript-eslint/no-inferrable-types": "off",
28 | "react/react-in-jsx-scope": "off",
29 | "react/jsx-filename-extension":
30 | [1, { "extensions": [".js", ".jsx", ".ts", ".tsx"] }],
31 | # "testing-library/await-async-query": "error",
32 | # "testing-library/no-await-sync-query": "error",
33 | # "testing-library/no-debugging-utils": "warn",
34 | # "testing-library/no-dom-import": "off",
35 | # "jest-dom/prefer-checked": "error",
36 | # "jest-dom/prefer-enabled-disabled": "error",
37 | # "jest-dom/prefer-required": "error",
38 | # "jest-dom/prefer-to-have-attribute": "error",
39 | }
40 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/workflows/integrate.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | - dev
8 | pull_request:
9 | branches:
10 | - main
11 | - dev
12 |
13 |
14 | jobs:
15 | build:
16 |
17 | runs-on: macos-latest
18 | environment: dev
19 |
20 | strategy:
21 | matrix:
22 | node-version: [14.x, 16.x, 17.x]
23 | # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
24 |
25 | steps:
26 | - uses: actions/checkout@v3
27 | - name: create env file
28 | run: |
29 | touch .env
30 | echo KAKFA_BROKER=${{ secrets.BROKER }} >> .env
31 | echo PROMETHEUS_URL=${{ secrets.PROMETHEUS_URL }} >> .env
32 | cat .env
33 | - name: Use Node.js ${{ matrix.node-version }}
34 | uses: actions/setup-node@v3
35 | with:
36 | node-version: ${{ matrix.node-version }}
37 | cache: 'npm'
38 | - run: npm ci
39 | - run: npm run build --if-present
40 | - run: npm test --if-present
41 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | public/report.html
3 | build/
4 | # Logs
5 | logs
6 | *.log
7 | npm-debug.log*
8 | yarn-debug.log*
9 | yarn-error.log*
10 | lerna-debug.log*
11 |
12 | # Diagnostic reports (https://nodejs.org/api/report.html)
13 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
14 |
15 | # Runtime data
16 | pids
17 | *.pid
18 | *.seed
19 | *.pid.lock
20 |
21 | # Directory for instrumented libs generated by jscoverage/JSCover
22 | lib-cov
23 |
24 | # Coverage directory used by tools like istanbul
25 | coverage
26 | *.lcov
27 |
28 | # nyc test coverage
29 | .nyc_output
30 |
31 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
32 | .grunt
33 |
34 | # Bower dependency directory (https://bower.io/)
35 | bower_components
36 |
37 | # node-waf configuration
38 | .lock-wscript
39 |
40 | # Compiled binary addons (https://nodejs.org/api/addons.html)
41 | build/Release
42 |
43 | # Dependency directories
44 | node_modules/
45 | jspm_packages/
46 |
47 | # TypeScript v1 declaration files
48 | typings/
49 |
50 | # TypeScript cache
51 | *.tsbuildinfo
52 |
53 | # Optional npm cache directory
54 | .npm
55 |
56 | # Optional eslint cache
57 | .eslintcache
58 |
59 | # Microbundle cache
60 | .rpt2_cache/
61 | .rts2_cache_cjs/
62 | .rts2_cache_es/
63 | .rts2_cache_umd/
64 |
65 | # Optional REPL history
66 | .node_repl_history
67 |
68 | # Output of 'npm pack'
69 | *.tgz
70 |
71 | # Yarn Integrity file
72 | .yarn-integrity
73 |
74 | # dotenv environment variables file
75 | .env
76 | .env.test
77 |
78 | # parcel-bundler cache (https://parceljs.org/)
79 | .cache
80 |
81 | # Next.js build output
82 | .next
83 |
84 | # Nuxt.js build / generate output
85 | .nuxt
86 | dist
87 |
88 | # Gatsby files
89 | .cache/
90 | # Comment in the public line in if your project uses Gatsby and *not* Next.js
91 | # https://nextjs.org/blog/next-9-1#public-directory-support
92 | # public
93 |
94 | # vuepress build output
95 | .vuepress/dist
96 |
97 | # Serverless directories
98 | .serverless/
99 |
100 | # FuseBox cache
101 | .fusebox/
102 |
103 | # DynamoDB Local files
104 | .dynamodb/
105 |
106 | # TernJS port file
107 | .tern-port
108 |
109 | # My notes on the process of building this app.
110 | journal.md
111 |
112 | ./public/report.html
113 | bundle.js.map
114 | bundle.js
115 | bundle.js.LICENSE.txt
--------------------------------------------------------------------------------
/.husky/.gitignore:
--------------------------------------------------------------------------------
1 | _
2 |
--------------------------------------------------------------------------------
/.husky/pre-commit:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | . "$(dirname "$0")/_/husky.sh"
3 |
4 | npx lint-staged
5 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | franzview@gmail.com.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 OSLabs Beta
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
FranzView
6 |
An open-source Kafka monitoring and management tool built with JavaScript developers in mind!
7 |
8 |
9 |
10 |
11 |
12 | franzview.com
13 |
14 |
15 | ## Table of Contents
16 |
17 | 1. [About the Project](#about-the-project)
18 | - [Built With](#built-with)
19 | 1. [Getting Started](#getting-started)
20 | - [Requirements](#requirements)
21 | - [Installation](#installation)
22 | - [When you're ready to use FranzView](#when-youre-ready-to-use-franzview)
23 | 1. [Contributors](#contributors)
24 | 1. [Roadmap](#roadmap)
25 | 1. [Prometheus Server and Demo Cluster](#prometheus-server-and-demo-cluster)
26 | 1. [License](#license)
27 |
28 | ## About the Project
29 |
30 | FranzView is an open-source web application to help small teams with monitoring and management of Apache Kafka clusters. With FranzView you can monitor key metrics related to broker and topic performance and take actions around them. Through the UI you are able to:
31 |
32 | - Monitor key performance metrics in real time by broker or topic and diagnose any issues through different views
33 | - Create and delete topics within a cluster
34 | - Reassign partition replicas to support with load balancing, change replication factor for a topic, and solve for underreplication issues
35 |
36 | These features are supported by a GraphQL API for improved performance, for abstraction of PromQL queries, and is easily extendable based on the nuances of your cluster.
37 |
38 | ### Built With
39 |
40 | - [TypeScript](https://www.typescriptlang.org/)
41 | - [Apollo GraphQL](https://www.apollographql.com/)
42 | - [React](https://reactjs.org/)
43 | - [Material-UI](https://mui.com/)
44 | - [Chart.js](https://www.chartjs.org/docs/latest/)
45 | - [Jest](https://jestjs.io/)
46 | - [Prometheus](https://prometheus.io/)
47 | - [KafkaJS](https://kafka.js.org/)
48 |
49 | ## Getting Started
50 |
51 | ### Requirements
52 |
53 | Before starting setup you'll need to take the following steps:
54 |
55 | - Have node installed. FranzView is tested to work on Node 14+.
56 | - If you'd like to use our demo cluster, make sure you have Docker Desktop and Docker Compose installed and then check the [demo instructions](#prometheus-server-and-demo-cluster).
57 | - Set up [JMX exporter](https://github.com/prometheus/jmx_exporter) on your cluster. You can find the configuration files and a copy of the JMX exporter jar file in the `configs/jmx_exporter` folder in this repo.
58 | 1. If you're starting your Kafka cluster from the CLI you can set up JMX exporter following these commands:
59 | ```
60 | export KAFKA_OPTS='-javaagent:{PATH_TO_JMX_EXPORTER}/jmx-exporter.jar={PORT}:{PATH_TO_JMX_EXPORTER_KAFKA.yml}/kafka.yml'
61 | ```
62 | 2. Launch or restart your broker as you normally would.
63 | - Have a Prometheus metric server set up with targets setup for each of your brokers. You should use the `prometheus.template.yml` as a template.
64 |
65 | Please check the docker-compose files in this repo as examples or to spin up a demo cluster.
66 |
67 | ### Installation
68 |
69 | 1. Clone down this repository:
70 |
71 | ```
72 | git clone https://github.com/oslabs-beta/franz
73 | ```
74 |
75 | 2. Create a `.env` file using the template in the `.env.template` file to set the environment variables.
76 | 3. In the FranzView root directory to install all dependencies:
77 |
78 | ```
79 | npm install
80 | ```
81 |
82 | 4. Build your version of Franzview:
83 |
84 | ```
85 | npm run build
86 | ```
87 |
88 | ### When you're ready to use FranzView
89 |
90 | 1. Start the server:
91 |
92 | ```
93 | npm start
94 | ```
95 |
96 | 2. FranzView defaults to running on port 3000. So simply go to http://localhost:3000, and voila! You can view your metrics and start managing your Kafka cluster!
97 |
98 | ## Contributors
99 |
100 | - Chris Dunleavy | [GitHub](https://github.com/christopherdunleavy) | [Linkedin](https://www.linkedin.com/in/christopher-dunleavy-web-dev)
101 | - Ryan Hastie | [GitHub](https://github.com/rbhastie) | [Linkedin](https://www.linkedin.com/in/ryan-hastie)
102 | - Jonathan Haviv | [GitHub](https://github.com/jonathanhaviv) | [Linkedin](https://www.linkedin.com/in/jonathanhaviv)
103 | - Rachelle Maiorca | [GitHub](https://github.com/rmaiorca) | [Linkedin](https://www.linkedin.com/in/rmaiorca)
104 | - Patrick Reid | [GitHub](https://github.com/flyingwolf1701) | [Linkedin](https://www.linkedin.com/in/patrickjreid)
105 |
106 | ## Roadmap
107 |
108 | Franzview is in early stages, but we wanted to get it in the hands of developers as soon as possible to be able to start incorporating user feedback immediately. Here are features we're working on bringing to FranzView in the near future:
109 |
110 | - Additional filtering options for topics and to filter data by time
111 | - The option to auto-deploy a Prometheus server if one isn't passed in
112 | - Additional authentication support for Kafka Clusters
113 | - Log exploration to support with troubleshooting
114 | - Consumer metrics to monitor consumer performance and make improvements
115 | - Frontend querying tools so you can query data that is important to your team
116 |
117 | If you don't see a feature that you're looking for listed above, find any bugs, or have any other suggestions, please feel free to [open an issue](https://github.com/oslabs-beta/franz/issues) and our team will work with you to get it implemented!
118 |
119 | Also if you create a custom implementation of FranzView we'd love to see how you're using it!
120 |
121 | ## Prometheus Server and Demo Cluster
122 |
123 | We have a few different docker-compose files depending on your needs.
124 |
125 | - If you just need a Kafka cluster (this will spin up a cluster with one zookeeper instance and three brokers ([localhost:9092](localhost:9092), [localhost:9093](localhost:9093), [localhost:9094](localhost:9094)):
126 | ```
127 | docker-compose -f docker-compose-kafka-only.yml up -d
128 | ```
129 | - If you just need a Prometheus server:
130 | 1. Create a `prometheus.yml` file from the template `prometheus.template.yml`
131 | 1. Save it in the `configs/prometheus` folder
132 | 1. Run the following command to spin up a Prometheus server running at http://localhost:9090:
133 | ```
134 | docker-compose -f docker-compose-prom-only.yml up -d
135 | ```
136 | - If you just need want to spin up a Prometheus server + Kafka Cluster.:
137 | 1. We already have a Prometheus config set up, so don't worry about it!
138 | 1. Run the following command to spin up a Prometheus server running at http://localhost:9090 and 3 brokers ([localhost:9092](localhost:9092), [localhost:9093](localhost:9093), [localhost:9094](localhost:9094)):
139 | ```
140 | docker-compose -f docker-compose-kafka-prom.yml up -d
141 | ```
142 |
143 | ## License
144 |
145 | This product is licensed under the MIT License without restriction.
146 |
--------------------------------------------------------------------------------
/__test__/MetricsCard.test.tsx:
--------------------------------------------------------------------------------
1 | /**
2 | * @jest-environment jsdom
3 | */
4 |
5 | import React from "react";
6 | import { render, screen } from "@testing-library/react";
7 |
8 | import MetricsCard from "../src/client/components/MetricsCard";
9 |
10 | describe("MetricsCard", () => {
11 | test("renders MetricsCard component", () => {
12 | render(
13 |
18 | );
19 |
20 | screen.debug();
21 | });
22 | });
23 |
--------------------------------------------------------------------------------
/__test__/server.test.ts:
--------------------------------------------------------------------------------
1 | import request from "supertest";
2 | import appServer from "../src/server/server";
3 | import crypto from "node:crypto";
4 | import { admin } from "../src/server/kafka/kafka";
5 |
6 | const server = "http://localhost:3000";
7 |
8 | beforeAll(async () => {
9 | global.testServer = await appServer;
10 | global.admin = await admin;
11 | });
12 |
13 | afterAll(async () => {
14 | await global.admin.disconnect();
15 | await global.testServer.stop();
16 | });
17 |
18 | describe("REST Server", () => {
19 | describe("404s for non-existant routes", () => {
20 | it("Bad POST Request", () => {
21 | return request(server).post("/badRoute").expect(404);
22 | });
23 |
24 | it("Bad PUT Request", () => {
25 | return request(server).put("/badRoute").expect(404);
26 | });
27 |
28 | it("Bad DELETE Request", () => {
29 | return request(server).delete("/badRoute").expect(404);
30 | });
31 | });
32 | });
33 |
34 | describe("GraphQL Queries", () => {
35 | describe("Cluster Queries", () => {
36 | it("A query for the cluster type can return the active controller count which is an object with a time field and number.", async () => {
37 | const result = await global.testServer.executeOperation({
38 | query: `query Cluster {
39 | cluster {
40 | activeControllerCount {
41 | count: metric
42 | time
43 | }
44 | }
45 | }`,
46 | });
47 |
48 | expect(result.errors).toBeUndefined();
49 | expect(result.data.cluster).toHaveProperty("activeControllerCount");
50 | expect(result.data.cluster.activeControllerCount).toEqual(
51 | expect.objectContaining({
52 | count: expect.any(Number),
53 | time: expect.any(String),
54 | })
55 | );
56 | });
57 |
58 | it("A query for the cluster type can return the list of brokers in the cluster.", async () => {
59 | const result = await global.testServer.executeOperation({
60 | query: `query Cluster {
61 | cluster {
62 | brokers {
63 | brokerHost
64 | brokerId
65 | brokerPort
66 | cpuUsage {
67 | cpuUsage: metric
68 | time
69 | }
70 | numberUnderReplicatedPartitions {
71 | underReplicatedPartitions: metric
72 | time
73 | }
74 | }
75 | }
76 | }`,
77 | });
78 |
79 | expect(Array.isArray(result.data.cluster.brokers)).toBeTruthy();
80 | expect(result.data.cluster.brokers).toEqual(
81 | expect.arrayContaining([
82 | expect.objectContaining({
83 | brokerId: expect.any(Number),
84 | brokerPort: expect.any(Number),
85 | brokerHost: expect.any(String),
86 | cpuUsage: expect.objectContaining({
87 | cpuUsage: expect.any(Number),
88 | time: expect.any(String),
89 | }),
90 | numberUnderReplicatedPartitions: expect.objectContaining({
91 | underReplicatedPartitions: expect.any(Number),
92 | time: expect.any(String),
93 | }),
94 | }),
95 | ])
96 | );
97 | });
98 |
99 | it("A query for the cluster type can return information about which broker is the active controller.", async () => {
100 | const result = await global.testServer.executeOperation({
101 | query: `query Cluster {
102 | cluster {
103 | brokers {
104 | brokerHost
105 | brokerId
106 | brokerPort
107 | cpuUsage {
108 | cpuUsage:metric
109 | time
110 | }
111 | numberUnderReplicatedPartitions {
112 | underReplicatedPartitions: metric
113 | time
114 | }
115 | }
116 | }
117 | }`,
118 | });
119 |
120 | expect(Array.isArray(result.data.cluster.brokers)).toBeTruthy();
121 | expect(result.data.cluster.brokers).toEqual(
122 | expect.arrayContaining([
123 | expect.objectContaining({
124 | brokerId: expect.any(Number),
125 | brokerPort: expect.any(Number),
126 | brokerHost: expect.any(String),
127 | cpuUsage: expect.objectContaining({
128 | cpuUsage: expect.any(Number),
129 | time: expect.any(String),
130 | }),
131 | numberUnderReplicatedPartitions: expect.objectContaining({
132 | underReplicatedPartitions: expect.any(Number),
133 | time: expect.any(String),
134 | }),
135 | }),
136 | ])
137 | );
138 | });
139 |
140 | it("A query for the cluster type can return the offline partition count which is an object with a time field and number.", async () => {
141 | const result = await global.testServer.executeOperation({
142 | query: `query Cluster {
143 | cluster {
144 | offlinePartitionCount {
145 | count: metric
146 | time
147 | }
148 | }
149 | }`,
150 | });
151 |
152 | expect(result.errors).toBeUndefined();
153 | expect(result.data.cluster).toHaveProperty("offlinePartitionCount");
154 | expect(result.data.cluster.offlinePartitionCount).toEqual(
155 | expect.objectContaining({
156 | count: expect.any(Number),
157 | time: expect.any(String),
158 | })
159 | );
160 | });
161 |
162 | it("The cluster type can be queried to return a boolean if a topic can be delete.", async () => {
163 | const result = await global.testServer.executeOperation({
164 | query: `query Cluster {
165 | cluster {
166 | deleteTopic
167 | }
168 | }`,
169 | });
170 |
171 | expect(result.errors).toBeUndefined();
172 | expect(typeof result.data.cluster.deleteTopic).toBe("boolean");
173 | });
174 | });
175 |
176 | describe("Broker Queries", () => {
177 | it("A query for a valid broker will have fields: brokerId: Int!, brokerPort: Int!, brokerHost: String!, brokerCpuUsage: BrokerCpuUsage, numberUnderReplicatedPartitions.", async () => {
178 | const result = await global.testServer.executeOperation({
179 | query: `query Broker($brokerId: Int!) {
180 | broker(brokerId: $brokerId) {
181 | cpuUsage {
182 | cpuUsage: metric
183 | time
184 | }
185 | numberUnderReplicatedPartitions {
186 | underReplicatedPartitions: metric
187 | time
188 | }
189 | brokerHost
190 | brokerPort
191 | brokerId
192 | }
193 | }`,
194 | variables: { brokerId: 1 },
195 | });
196 |
197 | expect(result.errors).toBeUndefined();
198 | expect(typeof result.data.broker.brokerId).toBe("number");
199 | expect(typeof result.data.broker.brokerHost).toBe("string");
200 | expect(typeof result.data.broker.brokerPort).toBe("number");
201 | expect(typeof result.data.broker.cpuUsage.cpuUsage).toBe("number");
202 | expect(typeof result.data.broker.cpuUsage.time).toBe("string");
203 | expect(
204 | typeof result.data.broker.numberUnderReplicatedPartitions
205 | .underReplicatedPartitions
206 | ).toBe("number");
207 | expect(
208 | typeof result.data.broker.numberUnderReplicatedPartitions.time
209 | ).toBe("string");
210 | });
211 |
212 | it("A query for brokers will be an array of brokers", async () => {
213 | const result = await global.testServer.executeOperation({
214 | query: `query Brokers {
215 | brokers {
216 | brokerHost
217 | brokerId
218 | brokerPort
219 | cpuUsage {
220 | cpuUsage:metric
221 | time
222 | }
223 | numberUnderReplicatedPartitions {
224 | underReplicatedPartitions: metric
225 | time
226 | }
227 | }
228 | }`,
229 | });
230 |
231 | expect(Array.isArray(result.data.brokers)).toBeTruthy();
232 | expect(result.data.brokers).toEqual(
233 | expect.arrayContaining([
234 | expect.objectContaining({
235 | brokerId: expect.any(Number),
236 | brokerPort: expect.any(Number),
237 | brokerHost: expect.any(String),
238 | cpuUsage: expect.objectContaining({
239 | cpuUsage: expect.any(Number),
240 | time: expect.any(String),
241 | }),
242 | numberUnderReplicatedPartitions: expect.objectContaining({
243 | underReplicatedPartitions: expect.any(Number),
244 | time: expect.any(String),
245 | }),
246 | }),
247 | ])
248 | );
249 | });
250 |
251 | it("A query for broker can return a field disk usage which is an object with a time field and number.", async () => {
252 | const result = await global.testServer.executeOperation({
253 | query: `query Broker($brokerId: Int!) {
254 | broker(brokerId: $brokerId) {
255 | JVMMemoryUsage {
256 | JVMMemoryUsage: metric
257 | time
258 | }
259 | }
260 | }`,
261 | variables: {
262 | brokerId: 1,
263 | },
264 | });
265 | expect(result.errors).toBeUndefined();
266 | expect(result.data.broker).toHaveProperty("JVMMemoryUsage");
267 | expect(result.data.broker.JVMMemoryUsage).toEqual(
268 | expect.objectContaining({
269 | JVMMemoryUsage: expect.any(Number),
270 | time: expect.any(String),
271 | })
272 | );
273 | });
274 | });
275 | });
276 |
277 | describe("GraphQL Mutations", () => {
278 | describe("Delete Topic", () => {
279 | let topicName;
280 | beforeEach(async () => {
281 | topicName = `test-topic-${crypto.randomUUID()}`;
282 | await global.testServer.executeOperation({
283 | query: `mutation AddTopic($name: String!) {
284 | addTopic(name: $name) {
285 | name
286 | }
287 | }`,
288 | variables: {
289 | name: topicName,
290 | },
291 | });
292 | });
293 |
294 | it("The delete topic mutation returns the topic that was deleted.", async () => {
295 | const result = await global.testServer.executeOperation({
296 | query: `mutation DeleteTopic($name: String!) {
297 | deleteTopic(name: $name) {
298 | name
299 | }
300 | }`,
301 | variables: { name: topicName },
302 | });
303 |
304 | expect(result.errors).toBeUndefined();
305 | expect(result.data).toEqual({
306 | deleteTopic: {
307 | name: topicName,
308 | },
309 | });
310 | });
311 |
312 | it("Deleting a topic removes it from the cluster and it can no longer be found in the cluster.", async () => {
313 | await global.testServer.executeOperation({
314 | query: `mutation DeleteTopic($name: String!) {
315 | deleteTopic(name: $name) {
316 | name
317 | }
318 | }`,
319 | variables: { name: topicName },
320 | });
321 |
322 | jest.spyOn(console, "warn").mockImplementation(() => {
323 | return;
324 | });
325 | jest.spyOn(console, "log").mockImplementation(() => {
326 | return;
327 | });
328 | const response = await global.testServer.executeOperation({
329 | query: `query topic($name: String!) {
330 | topic(name: $name) {
331 | name
332 | }
333 | }`,
334 |
335 | variables: { name: topicName },
336 | });
337 |
338 | expect(response.errors).toBeUndefined();
339 | expect(response.data.topic).toBeNull();
340 | });
341 | });
342 |
343 | describe("Add Topic", () => {
344 | let topicName;
345 | afterEach(async () => {
346 | await global.testServer.executeOperation({
347 | query: `mutation DeleteTopic($name: String!) {
348 | deleteTopic(name: $name) {
349 | name
350 | }
351 | }`,
352 | variables: {
353 | name: topicName,
354 | },
355 | });
356 | });
357 |
358 | it("The add topic mutation returns the topic that was created.", async () => {
359 | topicName = `test-topic-${crypto.randomUUID()}`;
360 | const result = await global.testServer.executeOperation({
361 | query: `mutation AddTopic($name: String!) {
362 | addTopic(name: $name) {
363 | name
364 | }
365 | }`,
366 | variables: { name: topicName },
367 | });
368 |
369 | expect(result.errors).toBeUndefined();
370 | expect(result.data).toEqual({
371 | addTopic: {
372 | name: topicName,
373 | },
374 | });
375 | });
376 |
377 | it("Adding a topic allows for the topic to be found in the cluster.", async () => {
378 | topicName = `test-topic-${crypto.randomUUID()}`;
379 | const result = await global.testServer.executeOperation({
380 | query: `mutation AddTopic($name: String!, $replicationFactor: Int, $numPartitions: Int, $configEntries: [ConfigEntry]) {
381 | addTopic(name: $name, replicationFactor: $replicationFactor, numPartitions: $numPartitions, configEntries: $configEntries) {
382 | name
383 | numPartitions
384 | }
385 | }`,
386 | variables: { name: topicName },
387 | });
388 |
389 | const response = await global.testServer.executeOperation({
390 | query: `query topic($name: String!) {
391 | topic(name: $name) {
392 | name
393 | }
394 | }`,
395 |
396 | variables: {
397 | name: topicName,
398 | },
399 | });
400 |
401 | expect(result.errors).toBeUndefined();
402 | expect(response.data.topic.name).toBe(result.data.addTopic.name);
403 | });
404 | });
405 |
406 | describe("Reassign Partitions", () => {
407 | let topicName;
408 | beforeAll(async () => {
409 | topicName = `test-topic-${crypto.randomUUID()}`;
410 | return await global.admin.createTopics({
411 | topics: [
412 | {
413 | topic: topicName,
414 | replicaAssignment: [{ partition: 0, replicas: [1, 0] }],
415 | },
416 | ],
417 | });
418 | });
419 |
420 | afterAll(async () => {
421 | jest.setTimeout(10000);
422 | return await global.admin.deleteTopics({
423 | topics: [topicName],
424 | });
425 | });
426 |
427 | it("Returns ongoing partition reassignment", async () => {
428 | const result = await global.testServer.executeOperation({
429 | query: `mutation ReassignPartitions($topics: [PartitionReassignment]) {
430 | reassignPartitions(topics: $topics) {
431 | name
432 | partitions {
433 | partition
434 | replicas
435 | addingReplicas
436 | removingReplicas
437 | }
438 | }
439 | }`,
440 | variables: {
441 | topics: [
442 | {
443 | topic: topicName,
444 | partitionAssignment: [
445 | {
446 | partition: 0,
447 | replicas: [3, 4],
448 | },
449 | ],
450 | },
451 | ],
452 | },
453 | });
454 |
455 | expect(result.errors).toBeUndefined();
456 | expect(
457 | result.data.reassignPartitions.filter(
458 | (topic) => topic.name === topicName
459 | )
460 | ).toEqual([
461 | {
462 | name: topicName,
463 | partitions: [
464 | {
465 | partition: 0,
466 | replicas: [3, 4, 1, 0],
467 | addingReplicas: [3, 4],
468 | removingReplicas: [1, 0],
469 | },
470 | ],
471 | },
472 | ]);
473 | });
474 | });
475 | });
476 |
--------------------------------------------------------------------------------
/assets/franzView_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oslabs-beta/franzView/29b099915ce94ea50b6f528e26a77ef21dbc1ccb/assets/franzView_logo.png
--------------------------------------------------------------------------------
/configs/jmx_exporter/jmx_prometheus_javaagent-0.16.1.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oslabs-beta/franzView/29b099915ce94ea50b6f528e26a77ef21dbc1ccb/configs/jmx_exporter/jmx_prometheus_javaagent-0.16.1.jar
--------------------------------------------------------------------------------
/configs/jmx_exporter/kafka.yml:
--------------------------------------------------------------------------------
1 | ---
2 | startDelaySeconds: 120
3 | lowercaseOutputName: true
4 | lowercaseOutputLabelNames: true
5 | cacheRules: true
6 |
7 | whitelistObjectNames:
8 | - java.lang:*
9 | - kafka.cluster:*
10 | - kafka.controller:*
11 | - kafka.log:*
12 | - kafka.server:type=app-info,id=*
13 | - kafka.server:type=KafkaServer,name=BrokerState
14 | - kafka.server:type=KafkaRequestHandlerPool,*
15 | - kafka.server:type=BrokerTopicMetrics,*
16 | - kafka.server:type=DelayedFetchMetrics,*
17 | - kafka.server:type=DelayedOperationPurgatory,*
18 | - kafka.server:type=FetcherLagMetrics,*
19 | - kafka.server:type=FetcherStats,*
20 | - kafka.server:type=Request,*
21 | - kafka.server:type=Fetch,*
22 | - kafka.server:type=Produce,*
23 | - kafka.server:type=ReplicaManager,*
24 | - kafka.server:type=ReplicaFetcherManager,*
25 | - kafka.server:type=SessionExpireListener,*
26 | - kafka.server:type=controller-channel-metrics,*
27 | - kafka.server:type=socket-server-metrics,*
28 | - kafka.network:type=RequestChannel,*
29 | - kafka.network:type=Processor,*
30 | - kafka.network:type=SocketServer,*
31 | - kafka.network:type=RequestMetrics,*
32 | - kafka.network:type=RequestMetrics,name=*,request=OffsetCommit,version=*
33 | - kafka.network:type=RequestMetrics,name=*,request=FetchConsumer,version=*
34 | - kafka.network:type=RequestMetrics,name=*,request=FetchFollower,version=*
35 | - kafka.network:type=RequestMetrics,name=*,request=Produce,version=*
36 | - kafka.network:type=RequestMetrics,name=*,request=Metadata,version=*
37 | - kafka.coordinator.group:*
38 |
39 | rules:
40 | # This is by far the biggest contributor to the number of sheer metrics being produced.
41 | # Always keep it on the top for the case of probability when so many metrics will hit the first condition and exit.
42 | # "kafka.cluster:type=*, name=*, topic=*, partition=*"
43 | # "kafka.log:type=*,name=*, topic=*, partition=*"
44 | - pattern: kafka.(\w+)<>Value
45 | name: kafka_$1_$2_$3
46 | type: GAUGE
47 | labels:
48 | topic: "$4"
49 | partition: "$5"
50 | # "kafka.server:type=*,name=*, client-id=*, topic=*, partition=*"
51 | - pattern: kafka.server<>Value
52 | name: kafka_server_$1_$2
53 | type: GAUGE
54 | labels:
55 | clientId: "$3"
56 | topic: "$4"
57 | partition: "$5"
58 | - pattern: kafka.server<>StartTimeMs
59 | name: kafka_server_$1_$2
60 | type: GAUGE
61 | labels:
62 | brokerId: "$2"
63 | - pattern: kafka.server<>Value
64 | name: kafka_server_$1_$2
65 | type: GAUGE
66 | labels:
67 | clientId: "$3"
68 | broker: "$4:$5"
69 | # "kafka.network:type=*, name=*, request=*, error=*"
70 | # "kafka.network:type=*, name=*, request=*, version=*"
71 | - pattern: kafka.(\w+)<>(Count|Value)
72 | name: kafka_$1_$2_$3
73 | labels:
74 | "$4": "$5"
75 | "$6": "$7"
76 | - pattern: kafka.(\w+)<>(\d+)thPercentile
77 | name: kafka_$1_$2_$3
78 | type: GAUGE
79 | labels:
80 | "$4": "$5"
81 | "$6": "$7"
82 | quantile: "0.$8"
83 | # "kafka.rest:type=*, topic=*, partition=*, client-id=*"
84 | # "kafka.rest:type=*, cipher=*, protocol=*, client-id=*"
85 | - pattern: kafka.(\w+)<>Value
86 | name: kafka_$1_$2
87 | labels:
88 | "$3": "$4"
89 | "$5": "$6"
90 | "$7": "$8"
91 | # Count and Value
92 | # "kafka.server:type=*, name=*, topic=*"
93 | # "kafka.server:type=*, name=*, clientId=*"
94 | # "kafka.server:type=*, name=*, delayedOperation=*"
95 | # "kafka.server:type=*, name=*, fetcherType=*"
96 | # "kafka.network:type=*, name=*, networkProcessor=*"
97 | # "kafka.network:type=*, name=*, processor=*"
98 | # "kafka.network:type=*, name=*, request=*"
99 | # "kafka.network:type=*, name=*, listener=*"
100 | # "kafka.log:type=*, name=*, logDirectory=*"
101 | # "kafka.log:type=*, name=*, op=*"
102 | # "kafka.rest:type=*, node-id=*, client-id=*"
103 | - pattern: kafka.(\w+)<>(Count|Value)
104 | name: kafka_$1_$2_$3
105 | labels:
106 | "$4": "$5"
107 | # "kafka.consumer:type=*, topic=*, client-id=*"
108 | # "kafka.producer:type=*, topic=*, client-id=*"
109 | # "kafka.rest:type=*, topic=*, client-id=*"
110 | # "kafka.server:type=*, broker-id=*, fetcher-id=*"
111 | # "kafka.server:type=*, listener=*, networkProcessor=*"
112 | - pattern: kafka.(\w+)<>(Count|Value)
113 | name: kafka_$1_$2
114 | labels:
115 | "$3": "$4"
116 | "$5": "$6"
117 | # "kafka.network:type=*, name=*"
118 | # "kafka.server:type=*, name=*"
119 | # "kafka.controller:type=*, name=*"
120 | # "kafka.databalancer:type=*, name=*"
121 | # "kafka.log:type=*, name=*"
122 | # "kafka.utils:type=*, name=*"
123 | - pattern: kafka.(\w+)<>(Count|Value)
124 | name: kafka_$1_$2_$3
125 | # "kafka.producer:type=*, client-id=*"
126 | # "kafka.producer:type=*, id=*"
127 | # "kafka.rest:type=*, client-id=*"
128 | # "kafka.rest:type=*, http-status-code=*"
129 | # "kafka.server:type=*, BrokerId=*"
130 | # "kafka.server:type=*, listener=*"
131 | # "kafka.server:type=*, id=*"
132 | - pattern: kafka.(\w+)<>Value
133 | name: kafka_$1_$2
134 | labels:
135 | "$3": "$4"
136 |
137 | - pattern: kafka.server<>OneMinuteRate
138 | name: kafka_server_kafkarequesthandlerpool_requesthandleravgidlepercent_total
139 | type: GAUGE
140 | # "kafka.server:type=*, listener=*, networkProcessor=*, clientSoftwareName=*, clientSoftwareVersion=*"
141 | - pattern: kafka.server<>connections
142 | name: kafka_server_socketservermetrics_connections
143 | type: GAUGE
144 | labels:
145 | client_software_name: "$1"
146 | client_software_version: "$2"
147 | listener: "$3"
148 | network_processor: "$4"
149 | - pattern: "kafka.server<>(.+):"
150 | name: kafka_server_socketservermetrics_$3
151 | type: GAUGE
152 | labels:
153 | listener: "$1"
154 | network_processor: "$2"
155 | # "kafka.coordinator.group:type=*, name=*"
156 | # "kafka.coordinator.transaction:type=*, name=*"
157 | - pattern: kafka.coordinator.(\w+)<>(Count|Value)
158 | name: kafka_coordinator_$1_$2_$3
159 | # Percentile
160 | - pattern: kafka.(\w+)<>(\d+)thPercentile
161 | name: kafka_$1_$2_$3
162 | type: GAUGE
163 | labels:
164 | "$4": "$5"
165 | quantile: "0.$6"
166 | - pattern: kafka.(\w+)<>(\d+)thPercentile
167 | name: kafka_$1_$2_$3
168 | type: GAUGE
169 | labels:
170 | quantile: "0.$4"
171 |
172 | # Quotas
173 | - pattern : 'kafka.server<>(.+):'
174 | name: kafka_server_$1_$4
175 | type: GAUGE
176 | labels:
177 | user: "$2"
178 | client-id: "$3"
179 |
180 | - pattern : 'kafka.server<>(.+):'
181 | name: kafka_server_$1_$3
182 | type: GAUGE
183 | labels:
184 | user: "$2"
185 |
186 | - pattern : 'kafka.server<>(.+):'
187 | name: kafka_server_$1_$3
188 | type: GAUGE
189 | labels:
190 | client-id: "$2"
191 |
--------------------------------------------------------------------------------
/configs/jmx_exporter/zookeeper.yml:
--------------------------------------------------------------------------------
1 | rules:
2 | # replicated Zookeeper
3 | - pattern: "org.apache.ZooKeeperService<>(\\w+)"
4 | name: "zookeeper_$2"
5 | type: GAUGE
6 | - pattern: "org.apache.ZooKeeperService<>(\\w+)"
7 | name: "zookeeper_$3"
8 | type: GAUGE
9 | labels:
10 | replicaId: "$2"
11 | - pattern: "org.apache.ZooKeeperService<>(Packets\\w+)"
12 | name: "zookeeper_$4"
13 | type: COUNTER
14 | labels:
15 | replicaId: "$2"
16 | memberType: "$3"
17 | - pattern: "org.apache.ZooKeeperService<>(\\w+)"
18 | name: "zookeeper_$4"
19 | type: GAUGE
20 | labels:
21 | replicaId: "$2"
22 | memberType: "$3"
23 | - pattern: "org.apache.ZooKeeperService<>(\\w+)"
24 | name: "zookeeper_$4_$5"
25 | type: GAUGE
26 | labels:
27 | replicaId: "$2"
28 | memberType: "$3"
29 | # standalone Zookeeper
30 | - pattern: "org.apache.ZooKeeperService<>(\\w+)"
31 | type: GAUGE
32 | name: "zookeeper_$2"
33 | - pattern: "org.apache.ZooKeeperService<>(\\w+)"
34 | type: GAUGE
35 | name: "zookeeper_$2"
--------------------------------------------------------------------------------
/configs/prometheus/prometheus.example.yml:
--------------------------------------------------------------------------------
1 | # This file is used as an example to run with docker-compose-kafka-prom.yml for demo purposes.
2 | global:
3 | scrape_interval: 10s
4 | evaluation_interval: 10s
5 | scrape_configs:
6 | - job_name: 'kafka'
7 | static_configs:
8 | - targets:
9 | - kafka1:8081
10 | - kafka2:8081
11 | - kafka3:8081
--------------------------------------------------------------------------------
/configs/prometheus/prometheus.template.yml:
--------------------------------------------------------------------------------
1 | global:
2 | scrape_interval: 10s
3 | evaluation_interval: 10s
4 | scrape_configs:
5 | - job_name: 'kafka'
6 | static_configs:
7 | - targets:
8 | # Update the list below with your Kafka listener and the port you set when you added the JMX exporter
9 | - kafka1:8081
10 | - kafka2:8081
11 | - kafka3:8081
12 | - kafka4:8081
13 | - kafka5:8081
--------------------------------------------------------------------------------
/configs/prometheus/prometheus.yml:
--------------------------------------------------------------------------------
1 | # This file is used as an example to run with docker-compose-prom-only.yml for demo purposes.
2 | # Update it with the relevant targets for your cluster
3 | global:
4 | scrape_interval: 10s
5 | evaluation_interval: 10s
6 | scrape_configs:
7 | - job_name: 'kafka'
8 | static_configs:
9 | - targets:
10 | - localhost:8081
11 | - localhost:8082
12 | - localhost:8083
--------------------------------------------------------------------------------
/docker-compose-kafka-only.yml:
--------------------------------------------------------------------------------
1 | version: '2.1'
2 |
3 | services:
4 | zoo1:
5 | image: confluentinc/cp-zookeeper:7.0.1
6 | hostname: zoo1
7 | container_name: zoo1
8 | ports:
9 | - "2181:2181"
10 | environment:
11 | ZOOKEEPER_CLIENT_PORT: 2181
12 | ZOOKEEPER_SERVER_ID: 1
13 | ZOOKEEPER_SERVERS: zoo1:2888:3888
14 | EXTRA_ARGS: -javaagent:/usr/share/jmx_exporter/jmx_prometheus_javaagent-0.16.1.jar=8081:/usr/share/jmx_exporter/zookeeper.yml
15 | KAFKA_HEAP_OPTS: "-Xmx512M -Xms512M"
16 | volumes:
17 | - ./configs/jmx_exporter:/usr/share/jmx_exporter/
18 |
19 |
20 | kafka1:
21 | image: confluentinc/cp-kafka:7.0.1
22 | hostname: kafka1
23 | container_name: kafka1
24 | ports:
25 | - "9092:9092"
26 | - "8081:8081"
27 | environment:
28 | KAFKA_ADVERTISED_LISTENERS: LISTENER_INTERNAL://kafka1:19092,LISTENER_EXTERNAL://localhost:9092
29 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_INTERNAL:PLAINTEXT,LISTENER_EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT
30 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_INTERNAL
31 | KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
32 | KAFKA_BROKER_ID: 1
33 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
34 | KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
35 | KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
36 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2
37 | KAFKA_OPTS: -javaagent:/usr/share/jmx_exporter/jmx_prometheus_javaagent-0.16.1.jar=8081:/usr/share/jmx_exporter/kafka.yml
38 | KAFKA_HEAP_OPTS: "-Xmx512M -Xms512M"
39 | depends_on:
40 | - zoo1
41 | volumes:
42 | - ./configs/jmx_exporter:/usr/share/jmx_exporter/
43 |
44 | kafka2:
45 | image: confluentinc/cp-kafka:7.0.1
46 | hostname: kafka2
47 | container_name: kafka2
48 | ports:
49 | - "9093:9093"
50 | - "8082:8081"
51 | environment:
52 | KAFKA_ADVERTISED_LISTENERS: LISTENER_INTERNAL://kafka2:19093,LISTENER_EXTERNAL://localhost:9093
53 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_INTERNAL:PLAINTEXT,LISTENER_EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT
54 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_INTERNAL
55 | KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
56 | KAFKA_BROKER_ID: 2
57 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
58 | KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
59 | KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
60 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2
61 | KAFKA_OPTS: -javaagent:/usr/share/jmx_exporter/jmx_prometheus_javaagent-0.16.1.jar=8081:/usr/share/jmx_exporter/kafka.yml
62 | KAFKA_HEAP_OPTS: "-Xmx512M -Xms512M"
63 | depends_on:
64 | - zoo1
65 | volumes:
66 | - ./configs/jmx_exporter:/usr/share/jmx_exporter/
67 |
68 |
69 | kafka3:
70 | image: confluentinc/cp-kafka:7.0.1
71 | hostname: kafka3
72 | container_name: kafka3
73 | ports:
74 | - "9094:9094"
75 | - "8083:8081"
76 | environment:
77 | KAFKA_ADVERTISED_LISTENERS: LISTENER_INTERNAL://kafka3:19094,LISTENER_EXTERNAL://localhost:9094
78 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_INTERNAL:PLAINTEXT,LISTENER_EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT
79 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_INTERNAL
80 | KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
81 | KAFKA_BROKER_ID: 3
82 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
83 | KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
84 | KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
85 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2
86 | KAFKA_OPTS: -javaagent:/usr/share/jmx_exporter/jmx_prometheus_javaagent-0.16.1.jar=8081:/usr/share/jmx_exporter/kafka.yml
87 | KAFKA_HEAP_OPTS: "-Xmx512M -Xms512M"
88 | depends_on:
89 | - zoo1
90 | volumes:
91 | - ./configs/jmx_exporter:/usr/share/jmx_exporter/
92 |
--------------------------------------------------------------------------------
/docker-compose-kafka-prom.yml:
--------------------------------------------------------------------------------
1 | version: '2.1'
2 |
3 | services:
4 | zoo1:
5 | image: confluentinc/cp-zookeeper:7.0.1
6 | hostname: zoo1
7 | container_name: zoo1
8 | ports:
9 | - "2181:2181"
10 | environment:
11 | ZOOKEEPER_CLIENT_PORT: 2181
12 | ZOOKEEPER_SERVER_ID: 1
13 | ZOOKEEPER_SERVERS: zoo1:2888:3888
14 | EXTRA_ARGS: -javaagent:/usr/share/jmx_exporter/jmx_prometheus_javaagent-0.16.1.jar=8081:/usr/share/jmx_exporter/zookeeper.yml
15 | KAFKA_HEAP_OPTS: "-Xmx512M -Xms512M"
16 | volumes:
17 | - ./configs/jmx_exporter:/usr/share/jmx_exporter/
18 |
19 |
20 | kafka1:
21 | image: confluentinc/cp-kafka:7.0.1
22 | hostname: kafka1
23 | container_name: kafka1
24 | ports:
25 | - "9092:9092"
26 | - "8081:8081"
27 | environment:
28 | KAFKA_ADVERTISED_LISTENERS: LISTENER_INTERNAL://kafka1:19092,LISTENER_EXTERNAL://localhost:9092
29 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_INTERNAL:PLAINTEXT,LISTENER_EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT
30 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_INTERNAL
31 | KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
32 | KAFKA_BROKER_ID: 1
33 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
34 | KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
35 | KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
36 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2
37 | KAFKA_OPTS: -javaagent:/usr/share/jmx_exporter/jmx_prometheus_javaagent-0.16.1.jar=8081:/usr/share/jmx_exporter/kafka.yml
38 | KAFKA_HEAP_OPTS: "-Xmx512M -Xms512M"
39 | depends_on:
40 | - zoo1
41 | volumes:
42 | - ./configs/jmx_exporter:/usr/share/jmx_exporter/
43 |
44 | kafka2:
45 | image: confluentinc/cp-kafka:7.0.1
46 | hostname: kafka2
47 | container_name: kafka2
48 | ports:
49 | - "9093:9093"
50 | - "8082:8081"
51 | environment:
52 | KAFKA_ADVERTISED_LISTENERS: LISTENER_INTERNAL://kafka2:19093,LISTENER_EXTERNAL://localhost:9093
53 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_INTERNAL:PLAINTEXT,LISTENER_EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT
54 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_INTERNAL
55 | KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
56 | KAFKA_BROKER_ID: 2
57 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
58 | KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
59 | KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
60 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2
61 | KAFKA_OPTS: -javaagent:/usr/share/jmx_exporter/jmx_prometheus_javaagent-0.16.1.jar=8081:/usr/share/jmx_exporter/kafka.yml
62 | KAFKA_HEAP_OPTS: "-Xmx512M -Xms512M"
63 | depends_on:
64 | - zoo1
65 | volumes:
66 | - ./configs/jmx_exporter:/usr/share/jmx_exporter/
67 |
68 |
69 | kafka3:
70 | image: confluentinc/cp-kafka:7.0.1
71 | hostname: kafka3
72 | container_name: kafka3
73 | ports:
74 | - "9094:9094"
75 | - "8083:8081"
76 | environment:
77 | KAFKA_ADVERTISED_LISTENERS: LISTENER_INTERNAL://kafka3:19094,LISTENER_EXTERNAL://localhost:9094
78 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_INTERNAL:PLAINTEXT,LISTENER_EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT
79 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_INTERNAL
80 | KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
81 | KAFKA_BROKER_ID: 3
82 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
83 | KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
84 | KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
85 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2
86 | KAFKA_OPTS: -javaagent:/usr/share/jmx_exporter/jmx_prometheus_javaagent-0.16.1.jar=8081:/usr/share/jmx_exporter/kafka.yml
87 | KAFKA_HEAP_OPTS: "-Xmx512M -Xms512M"
88 | depends_on:
89 | - zoo1
90 | volumes:
91 | - ./configs/jmx_exporter:/usr/share/jmx_exporter/
92 |
93 | prometheus:
94 | image: prom/prometheus
95 | container_name: prometheus
96 | ports:
97 | - 9090:9090
98 | volumes:
99 | - ./configs/prometheus/prometheus.example.yml:/etc/prometheus/prometheus.yml
100 |
--------------------------------------------------------------------------------
/docker-compose-prom-only.yml:
--------------------------------------------------------------------------------
1 | version: '2.1'
2 |
3 | services:
4 | prometheus:
5 | image: prom/prometheus
6 | container_name: prometheus
7 | ports:
8 | - 9090:9090
9 | volumes:
10 | - ./configs/prometheus:/etc/prometheus
--------------------------------------------------------------------------------
/jest.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
2 | module.exports = {
3 | preset: "ts-jest",
4 | testEnvironment: "node",
5 | clearMocks: true,
6 | collectCoverage: true,
7 | coverageDirectory: "coverage",
8 | coveragePathIgnorePatterns: ["/node_modules/"],
9 | coverageProvider: "v8",
10 | moduleFileExtensions: [
11 | "js",
12 | "mjs",
13 | "cjs",
14 | "jsx",
15 | "ts",
16 | "tsx",
17 | "json",
18 | "node",
19 | ],
20 | testMatch: ["**/__tests__/**/*.[jt]s?(x)", "**/?(*.)+(spec|test).[tj]s?(x)"],
21 | testPathIgnorePatterns: ["/node_modules/"],
22 | verbose: true,
23 | };
24 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "franz",
3 | "version": "1.0.0",
4 | "description": "Kafka Manager",
5 | "main": "index.js",
6 | "types": "types/index.d.ts",
7 | "scripts": {
8 | "prod": "npm run build && npm run start",
9 | "build": "cross-env NODE_ENV=production webpack",
10 | "start": "cross-env NODE_ENV=production ts-node src/server/server.ts",
11 | "prepare": "husky install",
12 | "dev": "concurrently \"cross-env NODE_ENV=development webpack serve --open\" \"cross-env NODE_ENV=development nodemon src/server/server.ts\"",
13 | "test": "jest",
14 | "windev": "concurrently \"cross-env NODE_ENV=development webpack-dev-server --open --hot\" \"nodemon src/server/server.ts\""
15 | },
16 | "repository": {
17 | "type": "git",
18 | "url": "git+https://github.com/oslabs-beta/franz.git"
19 | },
20 | "author": "Rachelle Maiorca, Ryan Hastie, Chris Dunleavy, Patrick Reid, Jonathan Haviv",
21 | "license": "MIT",
22 | "bugs": {
23 | "url": "https://github.com/oslabs-beta/franz/issues"
24 | },
25 | "homepage": "https://github.com/oslabs-beta/franz#readme",
26 | "devDependencies": {
27 | "@babel/core": "^7.17.10",
28 | "@babel/preset-env": "^7.17.10",
29 | "@babel/preset-react": "^7.16.7",
30 | "@testing-library/react": "^12.1.5",
31 | "@types/express": "^4.17.13",
32 | "@types/jest": "^27.5.2",
33 | "@types/react": "^17.0.1",
34 | "@types/react-router": "^5.1.18",
35 | "@types/react-router-dom": "^5.3.3",
36 | "@types/supertest": "^2.0.12",
37 | "@typescript-eslint/eslint-plugin": "^5.23.0",
38 | "@typescript-eslint/parser": "^5.23.0",
39 | "babel-jest": "^28.1.0",
40 | "babel-loader": "^8.2.5",
41 | "concurrently": "^7.2.1",
42 | "cross-env": "^7.0.3",
43 | "css-loader": "^6.7.1",
44 | "eslint": "^8.15.0",
45 | "eslint-config-prettier": "^8.5.0",
46 | "eslint-plugin-jest-dom": "^4.0.2",
47 | "eslint-plugin-react": "^7.29.4",
48 | "eslint-plugin-testing-library": "^5.5.1",
49 | "file-loader": "^6.2.0",
50 | "html-webpack-harddisk-plugin": "^2.0.0",
51 | "html-webpack-plugin": "^5.5.0",
52 | "husky": "^8.0.1",
53 | "jest": "^28.1.0",
54 | "jest-environment-jsdom": "^28.1.1",
55 | "lint-staged": "^12.4.1",
56 | "nodemon": "^2.0.16",
57 | "prettier": "^2.6.2",
58 | "source-map-loader": "^3.0.1",
59 | "style-loader": "^3.3.1",
60 | "supertest": "^6.2.3",
61 | "ts-jest": "^28.0.3",
62 | "ts-loader": "^9.3.0",
63 | "ts-node": "^10.8.0",
64 | "typescript": "^4.6.4",
65 | "webpack": "^5.72.1",
66 | "webpack-bundle-analyzer": "^4.5.0",
67 | "webpack-cli": "^4.9.2",
68 | "webpack-dev-server": "^4.9.0"
69 | },
70 | "dependencies": {
71 | "@apollo/client": "3.5.4",
72 | "@emotion/react": "^11.9.0",
73 | "@emotion/styled": "^11.9.0",
74 | "@fortawesome/fontawesome-svg-core": "^6.1.1",
75 | "@fortawesome/free-regular-svg-icons": "^6.1.1",
76 | "@fortawesome/free-solid-svg-icons": "^6.1.1",
77 | "@fortawesome/react-fontawesome": "^0.1.18",
78 | "@mui/icons-material": "^5.8.0",
79 | "@mui/material": "^5.8.1",
80 | "@mui/x-data-grid": "^5.12.0",
81 | "apollo-datasource-rest": "^3.6.0",
82 | "apollo-server-core": "^3.7.0",
83 | "apollo-server-express": "^3.7.0",
84 | "chart.js": "^3.8.0",
85 | "chartjs-adapter-luxon": "^1.1.0",
86 | "chartjs-plugin-streaming": "^2.0.0",
87 | "dotenv": "^16.0.1",
88 | "express": "^4.18.1",
89 | "graphql": "^16.5.0",
90 | "kafkajs": "^2.2.0-beta.2",
91 | "luxon": "^2.4.0",
92 | "react": "^17.0.2",
93 | "react-chartjs-2": "^4.1.0",
94 | "react-dom": "^17.0.2",
95 | "react-router": "^6.3.0",
96 | "react-router-dom": "^6.3.0",
97 | "ts-node": "^10.8.0"
98 | },
99 | "lint-staged": {
100 | "*.{js,ts,jsx,tsx}": "eslint --cache --fix",
101 | "*.{js,css,md,ts,tsx,jsx}": "prettier --write"
102 | },
103 | "optionalDependencies": {
104 | "fsevents": "^2.3.2"
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/public/index.html:
--------------------------------------------------------------------------------
1 | Franz
--------------------------------------------------------------------------------
/src/client/App.tsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import { createTheme, ThemeProvider } from "@mui/material/styles";
3 | import Dashboard from "./pages/Dashboard";
4 | import Brokers from "./pages/Brokers";
5 | import Topics from "./pages/Topics";
6 | import AddTopic from "./components/AddTopic";
7 | import TopicsList from "./pages/TopicsList";
8 | import { BrowserRouter, Routes, Route } from "react-router-dom";
9 | import { ReassignPartitions } from "./components/ReassignPartitions";
10 | import { ApolloClient, InMemoryCache, ApolloProvider } from "@apollo/client";
11 | import { BatchHttpLink } from "@apollo/client/link/batch-http";
12 | import { Chart } from "chart.js";
13 | import { Layout } from "./Layout/Layout";
14 | // Create a batch link to have reduce network requests needed to query data
15 | const link = new BatchHttpLink({
16 | uri: "/graphql",
17 | batchMax: 6,
18 | batchInterval: 20,
19 | batchDebounce: true,
20 | });
21 |
22 | const client = new ApolloClient({
23 | link,
24 | cache: new InMemoryCache({
25 | typePolicies: {
26 | Broker: {
27 | keyFields: ["brokerId"],
28 | merge: true,
29 | fields: {},
30 | },
31 | Cluster: {
32 | keyFields: [],
33 | },
34 | },
35 | }),
36 | });
37 |
38 | const darkTheme = createTheme({
39 | palette: {
40 | mode: "dark",
41 | info: {
42 | main: "#9d5ee1",
43 | },
44 | },
45 | });
46 |
47 | Chart.defaults.color = darkTheme.palette.text.primary;
48 | Chart.defaults.borderColor = darkTheme.palette.divider;
49 |
50 | //`http://localhost:${process?.env.PORT || 3000}/graphql`,
51 |
52 | const App: React.FC = () => {
53 | return (
54 |
55 |
56 |
57 |
58 |
59 | } />
60 | } />
61 | } />
62 | } />
63 | } />
64 | }
67 | />
68 |
72 | nothing here!
73 |
74 | }
75 | />
76 |
77 |
78 |
79 |
80 |
81 | );
82 | };
83 |
84 | export default App;
85 |
--------------------------------------------------------------------------------
/src/client/Layout/Layout.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import { styled, useTheme, Theme, CSSObject } from "@mui/material/styles";
3 | import Box from "@mui/material/Box";
4 | import MuiDrawer from "@mui/material/Drawer";
5 | import MuiAppBar, { AppBarProps as MuiAppBarProps } from "@mui/material/AppBar";
6 | import Toolbar from "@mui/material/Toolbar";
7 | import List from "@mui/material/List";
8 | import CssBaseline from "@mui/material/CssBaseline";
9 | import Typography from "@mui/material/Typography";
10 | import Divider from "@mui/material/Divider";
11 | import IconButton from "@mui/material/IconButton";
12 | import MenuIcon from "@mui/icons-material/Menu";
13 | import ChevronLeftIcon from "@mui/icons-material/ChevronLeft";
14 | import ChevronRightIcon from "@mui/icons-material/ChevronRight";
15 | import ListItem from "@mui/material/ListItem";
16 | import ListItemButton from "@mui/material/ListItemButton";
17 | import ListItemIcon from "@mui/material/ListItemIcon";
18 | import ListItemText from "@mui/material/ListItemText";
19 | import Badge from "@mui/material/Badge";
20 | import NotificationsIcon from "@mui/icons-material/Notifications";
21 | import DashboardIcon from "@mui/icons-material/Dashboard";
22 | import DynamicFeed from "@mui/icons-material/DynamicFeed";
23 | import Topic from "@mui/icons-material/Topic";
24 | import Link from "@mui/material/Link";
25 | import AddBoxIcon from "@mui/icons-material/AddBox";
26 | import { Link as RouterLink } from "react-router-dom";
27 | import ListAltIcon from "@mui/icons-material/ListAlt";
28 |
29 | const drawerWidth = 240;
30 |
31 | function Copyright(props: any) {
32 | return (
33 |
39 | {"Copyright © "}
40 |
41 | OSLabs Beta
42 | {" "}
43 | {new Date().getFullYear()}
44 | {"."}
45 |
46 | );
47 | }
48 |
49 | const menuItems = [
50 | {
51 | text: "Home",
52 | icon: ,
53 | link: "/",
54 | },
55 | {
56 | text: "Broker",
57 | icon: ,
58 | link: "/brokers",
59 | },
60 | {
61 | text: "Topics",
62 | icon: ,
63 | link: "/topics",
64 | },
65 | ];
66 |
67 | const secondaryMenuItems = [
68 | {
69 | text: "Manage Topics",
70 | icon: ,
71 | link: "/topicslist",
72 | },
73 | {
74 | text: "Create a Topic",
75 | icon: ,
76 | link: "/addtopic",
77 | },
78 | ];
79 |
80 | const openedMixin = (theme: Theme): CSSObject => ({
81 | width: drawerWidth,
82 | transition: theme.transitions.create("width", {
83 | easing: theme.transitions.easing.sharp,
84 | duration: theme.transitions.duration.enteringScreen,
85 | }),
86 | overflowX: "hidden",
87 | });
88 |
89 | const closedMixin = (theme: Theme): CSSObject => ({
90 | transition: theme.transitions.create("width", {
91 | easing: theme.transitions.easing.sharp,
92 | duration: theme.transitions.duration.leavingScreen,
93 | }),
94 | overflowX: "hidden",
95 | width: `calc(${theme.spacing(7)} + 1px)`,
96 | [theme.breakpoints.up("sm")]: {
97 | width: `calc(${theme.spacing(8)} + 1px)`,
98 | },
99 | });
100 |
101 | const DrawerHeader = styled("div")(({ theme }) => ({
102 | display: "flex",
103 | alignItems: "center",
104 | justifyContent: "flex-end",
105 | padding: theme.spacing(0, 1),
106 | // necessary for content to be below app bar
107 | ...theme.mixins.toolbar,
108 | }));
109 |
110 | interface AppBarProps extends MuiAppBarProps {
111 | open?: boolean;
112 | }
113 |
114 | const AppBar = styled(MuiAppBar, {
115 | shouldForwardProp: (prop) => prop !== "open",
116 | })(({ theme, open }) => ({
117 | zIndex: theme.zIndex.drawer + 1,
118 | transition: theme.transitions.create(["width", "margin"], {
119 | easing: theme.transitions.easing.sharp,
120 | duration: theme.transitions.duration.leavingScreen,
121 | }),
122 | ...(open && {
123 | marginLeft: drawerWidth,
124 | width: `calc(100% - ${drawerWidth}px)`,
125 | transition: theme.transitions.create(["width", "margin"], {
126 | easing: theme.transitions.easing.sharp,
127 | duration: theme.transitions.duration.enteringScreen,
128 | }),
129 | }),
130 | }));
131 |
132 | const Drawer = styled(MuiDrawer, {
133 | shouldForwardProp: (prop) => prop !== "open",
134 | })(({ theme, open }) => ({
135 | width: drawerWidth,
136 | flexShrink: 0,
137 | whiteSpace: "nowrap",
138 | boxSizing: "border-box",
139 | ...(open && {
140 | ...openedMixin(theme),
141 | "& .MuiDrawer-paper": openedMixin(theme),
142 | }),
143 | ...(!open && {
144 | ...closedMixin(theme),
145 | "& .MuiDrawer-paper": closedMixin(theme),
146 | }),
147 | }));
148 |
149 | export function Layout({ children }: { children: React.ReactNode }) {
150 | const theme = useTheme();
151 | const [open, setOpen] = React.useState(false);
152 |
153 | const handleDrawerOpen = () => {
154 | setOpen(true);
155 | };
156 |
157 | const handleDrawerClose = () => {
158 | setOpen(false);
159 | };
160 |
161 | return (
162 |
163 |
164 |
165 |
166 |
176 |
177 |
178 |
185 | FranzView
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 | {theme.direction === "rtl" ? (
198 |
199 | ) : (
200 |
201 | )}
202 |
203 |
204 |
205 |
206 | {menuItems.map((el) => (
207 |
208 |
217 |
224 | {el.icon}
225 |
226 |
230 |
231 |
232 | ))}
233 |
234 |
235 |
236 | {secondaryMenuItems.map((el) => (
237 |
238 |
247 |
254 | {el.icon}
255 |
256 |
260 |
261 |
262 | ))}
263 |
264 |
265 |
266 |
267 |
271 | theme.palette.mode === "light"
272 | ? theme.palette.grey[100]
273 | : theme.palette.grey[900],
274 | flexGrow: 1,
275 | overflow: "auto",
276 | }}
277 | >
278 | {children}
279 |
280 |
281 |
282 |
283 | );
284 | }
285 |
--------------------------------------------------------------------------------
/src/client/components/AddTopic.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState } from "react";
2 | import Container from "@mui/material/Container";
3 | import Grid from "@mui/material/Grid";
4 | import TextField from "@mui/material/TextField";
5 | import Button from "@mui/material/Button";
6 | import Box from "@mui/material/Box";
7 | import { useMutation, useQuery } from "@apollo/client";
8 | import { ADD_TOPIC, CORE_ALL_BROKERS_QUERY } from "../models/queries";
9 | import { useNavigate } from "react-router-dom";
10 |
11 | function AddTopic() {
12 | const [topicName, setTopicName] = useState("");
13 | const [topicNameInvalid, setTopicNameInvalid] = useState(false);
14 | const [replicationFactor, setReplicationFactor] = useState("");
15 | const [replicationFactorInvalid, setReplicationFactorInvalid] =
16 | useState(false);
17 | const [numPartitions, setNumPartitions] = useState("");
18 | const brokers = useQuery(CORE_ALL_BROKERS_QUERY, {
19 | fetchPolicy: "cache-and-network",
20 | });
21 | const [addTopic, { loading, error }] = useMutation(ADD_TOPIC);
22 | const navigate = useNavigate();
23 |
24 | const onSubmit = (e) => {
25 | e.preventDefault();
26 |
27 | let invalidSubmission = false;
28 |
29 | if (topicName === "") {
30 | setTopicNameInvalid(true);
31 | invalidSubmission = true;
32 | }
33 |
34 | if (Number(replicationFactor) > brokers.data.brokers.length) {
35 | setReplicationFactorInvalid(true);
36 | invalidSubmission = true;
37 | }
38 |
39 | if (invalidSubmission) return;
40 |
41 | addTopic({
42 | variables: {
43 | name: topicName.replaceAll(" ", "-").toLowerCase(),
44 | replicationFactor:
45 | Number(replicationFactor) <= 0 ? -1 : Number(replicationFactor),
46 | numPartitions: Number(numPartitions) <= 0 ? -1 : Number(numPartitions),
47 | },
48 | });
49 |
50 | if (!loading && !error) {
51 | setTopicNameInvalid(false);
52 | setReplicationFactorInvalid(false);
53 | setTopicName("");
54 | setReplicationFactor("");
55 | setNumPartitions("");
56 | navigate("/topicslist", { replace: true });
57 | }
58 | };
59 |
60 | return (
61 | <>
62 |
63 | {loading}
64 | Create a Topic
65 | onSubmit(e)}
74 | >
75 |
76 | setTopicName(e.target.value)}
91 | />
92 |
93 |
94 |
95 | setReplicationFactor(e.target.value)}
110 | />
111 |
112 |
113 | setNumPartitions(e.target.value)}
123 | />
124 |
125 |
126 |
127 | Submit
128 |
129 |
130 |
131 |
132 | >
133 | );
134 | }
135 |
136 | export default AddTopic;
137 |
--------------------------------------------------------------------------------
/src/client/components/CPUUsage.tsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import {
3 | Chart as ChartJS,
4 | CategoryScale,
5 | LinearScale,
6 | PointElement,
7 | LineElement,
8 | Title,
9 | Tooltip,
10 | Legend,
11 | } from "chart.js";
12 | import { Line } from "react-chartjs-2";
13 |
14 | // import type { ChartData, ChartOptions } from "chart.js";
15 | // interface LineProps {
16 | // options: ChartOptions<"line">;
17 | // data: ChartData<"line">;
18 | // }
19 |
20 | ChartJS.register(
21 | CategoryScale,
22 | LinearScale,
23 | PointElement,
24 | LineElement,
25 | Title,
26 | Tooltip,
27 | Legend
28 | );
29 |
30 | const options = {
31 | responsive: true,
32 | // scales: {
33 | // x: { type: "time" },
34 | // time: { unit: "seconds" },
35 | // },
36 | plugins: {
37 | legend: {
38 | position: "top" as const,
39 | },
40 | title: {
41 | display: true,
42 | text: "CPU Usage",
43 | },
44 | },
45 | };
46 |
47 | const labels = [0, 1, 2, 3, 4, 5];
48 |
49 | const data = {
50 | labels,
51 | datasets: [
52 | {
53 | label: "Broker 1",
54 | data: labels.map(() => Math.floor(Math.random() * 1000)),
55 | borderColor: "rgb(255, 99, 132)",
56 | backgroundColor: "rgba(255, 99, 132, 0.5)",
57 | },
58 | {
59 | label: "Broker 2",
60 | data: labels.map(() => Math.floor(Math.random() * 1000)),
61 | borderColor: "rgb(53, 162, 235)",
62 | backgroundColor: "rgba(53, 162, 235, 0.5)",
63 | },
64 | {
65 | label: "Broker 3",
66 | data: labels.map(() => Math.floor(Math.random() * 1000)),
67 | borderColor: "rgb(255, 99, 132)",
68 | backgroundColor: "rgba(75, 192, 192, 0.5)",
69 | },
70 | ],
71 | };
72 |
73 | export default function Chart() {
74 | return ;
75 | }
76 |
--------------------------------------------------------------------------------
/src/client/components/ConfirmationDialog.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState } from "react";
2 | import Button from "@mui/material/Button";
3 | import TextField from "@mui/material/TextField";
4 | import Dialog from "@mui/material/Dialog";
5 | import DialogActions from "@mui/material/DialogActions";
6 | import DialogContent from "@mui/material/DialogContent";
7 | import DialogContentText from "@mui/material/DialogContentText";
8 | import DialogTitle from "@mui/material/DialogTitle";
9 | import { validate } from "../utils/validate";
10 | import { DialogProps } from "../../../types/types";
11 | import { useMutation } from "@apollo/client";
12 |
13 | export default function ConfirmationDialog({
14 | title,
15 | content,
16 | label,
17 | actions,
18 | control,
19 | args,
20 | variant,
21 | color,
22 | cta,
23 | disabled,
24 | update,
25 | }: DialogProps) {
26 | const [value, setValue] = useState("");
27 | const [formError, setFormError] = useState(false);
28 | const [open, setOpen] = useState(false);
29 | const [mutation, { loading, error }] = useMutation(actions);
30 |
31 | const handleOpen = () => {
32 | setOpen(true);
33 | };
34 |
35 | const handleClose = () => {
36 | setOpen(false);
37 | };
38 |
39 | const handleSubmit = () => {
40 | if (validate(value, control)) {
41 | mutation({
42 | variables: {
43 | ...args,
44 | },
45 | onCompleted: () => {
46 | update();
47 | },
48 | });
49 | if (!loading && !error) {
50 | setValue("");
51 | setFormError(false);
52 | handleClose();
53 | }
54 | } else {
55 | setFormError(true);
56 | }
57 | };
58 |
59 | return (
60 |
61 |
67 | {cta}
68 |
69 |
70 | {title}
71 |
72 | {content}
73 | setValue(e.target.value)}
83 | />
84 |
85 |
86 | Cancel
87 |
88 | {cta}
89 |
90 |
91 |
92 |
93 | );
94 | }
95 |
--------------------------------------------------------------------------------
/src/client/components/ConsumerCard.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import Accordion from "@mui/material/Accordion";
3 | import AccordionSummary from "@mui/material/AccordionSummary";
4 | import AccordionDetails from "@mui/material/AccordionDetails";
5 | import Typography from "@mui/material/Typography";
6 | import Box from "@mui/material/Box";
7 | import Title from "./Title";
8 | import ExpandMoreIcon from "@mui/icons-material/ExpandMore";
9 | import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
10 | import { faHeartPulse } from "@fortawesome/free-solid-svg-icons";
11 |
12 | export default function ConsumerCard() {
13 | return (
14 |
15 |
Consumers and Consumer Groups
16 |
17 | }
19 | aria-controls="panel1a-content"
20 | id="panel1a-header"
21 | >
22 |
23 |
24 |
25 | Consumer id: 1
26 |
27 |
28 |
29 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse
30 | malesuada lacus ex, sit amet blandit leo lobortis eget.
31 |
32 |
33 |
34 |
35 | }
37 | aria-controls="panel2a-content"
38 | id="panel2a-header"
39 | >
40 |
41 |
42 |
43 | Consumer id: 2
44 |
45 |
46 |
47 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse
48 | malesuada lacus ex, sit amet blandit leo lobortis eget.
49 |
50 |
51 |
52 |
53 | }
55 | aria-controls="panel3a-content"
56 | id="panel3a-header"
57 | >
58 |
59 |
60 |
61 | Consumer id: 3
62 |
63 |
64 |
65 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse
66 | malesuada lacus ex, sit amet blandit leo lobortis eget.
67 |
68 |
69 |
70 |
71 | );
72 | }
73 |
--------------------------------------------------------------------------------
/src/client/components/EditableField.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState } from "react";
2 | import Button from "@mui/material/Button";
3 | import TextField from "@mui/material/TextField";
4 |
5 | interface EditFieldProps {
6 | initial: string;
7 | update: (...args: unknown[]) => void;
8 | revert: (...args: unknown[]) => void;
9 | id: number | string;
10 | }
11 |
12 | function EditableField({ initial, update, revert, id }: EditFieldProps) {
13 | const [editing, setEditing] = useState(false);
14 | const [changed, setChanged] = useState(false);
15 | const [draft, setDraft] = useState(initial);
16 |
17 | const save = () => {
18 | if (draft.replace(" ", "") !== initial) {
19 | setChanged(true);
20 | const replicas = draft.split(",").map((replica) => new Number(replica));
21 | update(id, replicas);
22 | }
23 | return setEditing(false);
24 | };
25 |
26 | const cancel = () => {
27 | setDraft(initial);
28 | setChanged(false);
29 | revert(id);
30 | if (editing) setEditing(false);
31 | return;
32 | };
33 |
34 | return (
35 | <>
36 | {!editing && (
37 | <>
38 | {draft}
39 | setEditing(true)}
41 | variant="contained"
42 | color="success"
43 | size="small"
44 | sx={{
45 | color: "#F8F0E3",
46 | mx: 1,
47 | fontWeight: "bold",
48 | width: "100px",
49 | verticalAlign: "baseline",
50 | }}
51 | >
52 | Edit
53 |
54 | {changed && (
55 |
67 | Revert
68 |
69 | )}
70 | >
71 | )}
72 | {editing && (
73 |
74 | setDraft(e.target.value)}
78 | margin="none"
79 | inputProps={{
80 | style: { fontSize: ".875rem", height: "1em" },
81 | }}
82 | />
83 |
95 | Save
96 |
97 |
109 | Cancel
110 |
111 |
112 | )}
113 | >
114 | );
115 | }
116 |
117 | export { EditableField };
118 |
--------------------------------------------------------------------------------
/src/client/components/Header.tsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import { styled } from "@mui/material/styles";
3 | import MuiAppBar, { AppBarProps as MuiAppBarProps } from "@mui/material/AppBar";
4 | import Toolbar from "@mui/material/Toolbar";
5 | import Typography from "@mui/material/Typography";
6 | import IconButton from "@mui/material/IconButton";
7 | import Badge from "@mui/material/Badge";
8 | import MenuIcon from "@mui/icons-material/Menu";
9 | import NotificationsIcon from "@mui/icons-material/Notifications";
10 |
11 | interface AppBarProps extends MuiAppBarProps {
12 | open?: boolean;
13 | }
14 |
15 | const drawerWidth = 240;
16 |
17 | const AppBar = styled(MuiAppBar, {
18 | shouldForwardProp: (prop) => prop !== "open",
19 | })(({ theme, open }) => ({
20 | zIndex: theme.zIndex.drawer + 1,
21 | transition: theme.transitions.create(["width", "margin"], {
22 | easing: theme.transitions.easing.sharp,
23 | duration: theme.transitions.duration.leavingScreen,
24 | }),
25 | ...(open && {
26 | marginLeft: drawerWidth,
27 | width: `calc(100% - ${drawerWidth}px)`,
28 | transition: theme.transitions.create(["width", "margin"], {
29 | easing: theme.transitions.easing.sharp,
30 | duration: theme.transitions.duration.enteringScreen,
31 | }),
32 | }),
33 | }));
34 |
35 | const Header = () => {
36 | return (
37 |
38 |
43 |
52 |
53 |
54 |
61 | Franz
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 | );
71 | };
72 |
73 | export default Header;
74 |
--------------------------------------------------------------------------------
/src/client/components/MetricsCard.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import Typography from "@mui/material/Typography";
3 | import Title from "./Title";
4 | import Box from "@mui/material/Box";
5 | import ListItemIcon from "@mui/material/ListItemIcon";
6 |
7 | import { useQuery } from "@apollo/client";
8 |
9 | import { MetricsCardProps } from "../../../types/types";
10 |
11 | const keySearch = (obj, string) => {
12 | for (const key in obj) {
13 | if (key === string) {
14 | const output = obj[key];
15 | return output;
16 | } else if (typeof obj[key] === "object") {
17 | return keySearch(obj[key], string);
18 | }
19 | }
20 | };
21 |
22 | const MetricsCard = ({
23 | value,
24 | title,
25 | description,
26 | icon,
27 | query,
28 | variables,
29 | searchingFor,
30 | }: MetricsCardProps) => {
31 | if (query) {
32 | const { loading, data } = useQuery(query, { ...variables });
33 | value = loading ? "Loading..." : keySearch(data, searchingFor);
34 | }
35 |
36 | return (
37 |
38 | {title}
39 |
40 | {value}
41 |
42 |
43 |
44 | {description}
45 |
46 | {icon}
47 |
48 |
49 | );
50 | };
51 |
52 | export default MetricsCard;
53 |
--------------------------------------------------------------------------------
/src/client/components/PopoverMoreInfo.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import Popover from "@mui/material/Popover";
3 | import Typography from "@mui/material/Typography";
4 | import Button from "@mui/material/Button";
5 |
6 | import { MoreInfoProps } from "../../../types/types";
7 |
8 | export default function MoreInfo({ icon, content }: MoreInfoProps) {
9 | const [anchorEl, setAnchorEl] = React.useState(
10 | null
11 | );
12 |
13 | const handleClick = (event: React.MouseEvent) => {
14 | setAnchorEl(event.currentTarget);
15 | };
16 |
17 | const handleClose = () => {
18 | setAnchorEl(null);
19 | };
20 |
21 | const open = Boolean(anchorEl);
22 | const id = open ? "simple-popover" : undefined;
23 |
24 | return (
25 |
26 |
27 | {icon}
28 |
29 |
39 | {content}
40 |
41 |
42 | );
43 | }
44 |
--------------------------------------------------------------------------------
/src/client/components/RealTimeLineChart.tsx:
--------------------------------------------------------------------------------
1 | import React, { useEffect, useState, useRef, useMemo } from "react";
2 | import {
3 | Chart as ChartJS,
4 | CategoryScale,
5 | LinearScale,
6 | PointElement,
7 | LineElement,
8 | Title,
9 | Tooltip,
10 | Legend,
11 | TimeScale,
12 | ChartOptions,
13 | } from "chart.js";
14 | import "chartjs-adapter-luxon";
15 | import { Line } from "react-chartjs-2";
16 | import { GqlChartProps } from "../../../types/types";
17 | import { useQuery } from "@apollo/client";
18 | import ChartStreaming from "chartjs-plugin-streaming";
19 |
20 | ChartJS.register(
21 | CategoryScale,
22 | LinearScale,
23 | PointElement,
24 | LineElement,
25 | Title,
26 | Tooltip,
27 | Legend,
28 | TimeScale,
29 | ChartStreaming
30 | );
31 |
32 | export default function RealTimeLineChart({
33 | query,
34 | metric,
35 | duration,
36 | step,
37 | pollInterval,
38 | title,
39 | xAxisLabel,
40 | yAxisLabel,
41 | resource,
42 | label,
43 | args,
44 | }: GqlChartProps) {
45 | const timeNow = useRef(new Date());
46 | const loaded = useRef(false);
47 | const chartRef = useRef(null);
48 | const [chartData, setChartData] = useState({
49 | labels: [],
50 | datasets: [],
51 | });
52 |
53 | const colors = ["00f5d4", "00bbf9", "9b5de5", "f15bb5", "93c8f7"];
54 |
55 | const options: ChartOptions<"line"> = {
56 | responsive: true,
57 | parsing: {
58 | xAxisKey: "time",
59 | yAxisKey: metric,
60 | },
61 | plugins: {
62 | legend: {
63 | position: "top" as const,
64 | },
65 | title: {
66 | display: true,
67 | text: title,
68 | },
69 | streaming: {
70 | duration: duration * 60000,
71 | delay: pollInterval * 1000,
72 | refresh: pollInterval * 1000,
73 | onRefresh: (chart) => {
74 | const variables = {
75 | start: timeNow.current.toString(),
76 | end: new Date().toString(),
77 | step: step,
78 | ...args,
79 | };
80 | timeNow.current = new Date(variables.end);
81 | refetch({ ...variables }).then((result) => {
82 | if (loaded.current) {
83 | result.data[resource].forEach((series, index) => {
84 | series[`${metric}`].forEach((point) => {
85 | chart.data.datasets[index].data.push(point);
86 | });
87 | });
88 | }
89 |
90 | chart.update("quiet");
91 | });
92 | },
93 | },
94 | },
95 | scales: {
96 | xAxes: {
97 | title: {
98 | display: xAxisLabel ? true : false,
99 | text: xAxisLabel,
100 | },
101 | type: "realtime",
102 | time: {
103 | unit: "minute",
104 | parser: (label: string) => new Date(label).getTime(),
105 | stepSize: 0.5,
106 | displayFormats: {
107 | minute: "HH:mm:ss",
108 | },
109 | },
110 | adapters: {
111 | date: {
112 | local: "en-us",
113 | setZone: true,
114 | },
115 | },
116 | ticks: {
117 | autoSkip: false,
118 | maxRotation: 45,
119 | minRotation: 45,
120 | },
121 | },
122 | yAxes: {
123 | title: {
124 | display: yAxisLabel ? true : false,
125 | text: yAxisLabel,
126 | },
127 | },
128 | },
129 | };
130 |
131 | const { loading, data, refetch } = useQuery(query, {
132 | variables: {
133 | start: new Date(
134 | timeNow.current.valueOf() - duration * 60000 * 2
135 | ).toString(),
136 | end: timeNow.current.toString(),
137 | step: step,
138 | ...args,
139 | },
140 | fetchPolicy: "network-only",
141 | nextFetchPolicy: "network-only",
142 | notifyOnNetworkStatusChange: true,
143 | });
144 |
145 | useEffect(() => {
146 | if (loading || loaded.current) return;
147 | const datasets = [];
148 | const labels = [];
149 | data[resource].forEach((series, index) => {
150 | const seriesData: any = {};
151 | seriesData.label = `${resource}: ${series[label]}`;
152 | seriesData.backgroundColor = `#${colors[index]}`;
153 | seriesData.borderColor = seriesData.backgroundColor;
154 | seriesData.pointRadius = 0;
155 | seriesData.tension = 0.2;
156 |
157 | seriesData.data = series[`${metric}`];
158 |
159 | datasets.push(seriesData);
160 | });
161 |
162 | setChartData({
163 | labels,
164 | datasets,
165 | });
166 |
167 | return () => (loaded.current = true);
168 | }, [loading]);
169 |
170 | useEffect(() => {
171 | loaded.current = false;
172 | }, [args]);
173 |
174 | return (
175 | <>
176 | {useMemo(() => {
177 | return loading && !loaded.current ? (
178 | Loading...
179 | ) : (
180 |
181 | );
182 | }, [chartData])}
183 | >
184 | );
185 | }
186 |
--------------------------------------------------------------------------------
/src/client/components/ReassignPartitions.tsx:
--------------------------------------------------------------------------------
1 | import React, { useEffect, useState } from "react";
2 | import { useMutation, useQuery } from "@apollo/client";
3 | import { useParams } from "react-router";
4 | import { TOPIC_QUERY, REASSIGN_PARTITIONS } from "../models/queries";
5 | import Container from "@mui/material/Container";
6 | import Table from "@mui/material/Table";
7 | import TableBody from "@mui/material/TableBody";
8 | import TableCell from "@mui/material/TableCell";
9 | import TableContainer from "@mui/material/TableContainer";
10 | import TableHead from "@mui/material/TableHead";
11 | import TableRow from "@mui/material/TableRow";
12 | import Paper from "@mui/material/Paper";
13 | import { EditableField } from "./EditableField";
14 | import Button from "@mui/material/Button";
15 | import { useNavigate } from "react-router";
16 |
17 | function ReassignPartitions() {
18 | const { topicName } = useParams();
19 | const [initial, setInitial] = useState([]);
20 | const [proposed, setProposed] = useState([]);
21 | const navigate = useNavigate();
22 | const { data, loading } = useQuery(TOPIC_QUERY, {
23 | variables: {
24 | name: topicName,
25 | },
26 | });
27 |
28 | const [errorMessage, setErrorMessage] = useState("");
29 |
30 | const [reassignPartitions, { error }] = useMutation(REASSIGN_PARTITIONS);
31 |
32 | useEffect(() => {
33 | if (loading) return;
34 | else {
35 | const { partitions } = data.topic;
36 | const rows = partitions
37 | .map(({ partitionId, replicas, leader }) => {
38 | return {
39 | partitionId,
40 | replicas: replicas.map((broker) => broker.brokerId),
41 | leader: leader.brokerId,
42 | };
43 | })
44 | .sort((a, b) => a.partitionId - b.partitionId);
45 | setInitial(rows);
46 | return;
47 | }
48 | }, [data]);
49 |
50 | const newAssignment = (partition: number, replicas: [number]): void => {
51 | const replicaAssignment = {
52 | partition,
53 | replicas,
54 | };
55 |
56 | return setProposed([...proposed, replicaAssignment]);
57 | };
58 |
59 | const deleteAssignment = (partition: number): void => {
60 | const draftProposed = proposed;
61 |
62 | return setProposed(
63 | draftProposed.filter(
64 | (replicaAssignment) => replicaAssignment.partition !== partition
65 | )
66 | );
67 | };
68 |
69 | const save = async () => {
70 | if (proposed.length < 1)
71 | return setErrorMessage("There are no changed assignments.");
72 | await reassignPartitions({
73 | variables: {
74 | topics: [
75 | {
76 | topic: topicName,
77 | partitionAssignment: proposed,
78 | },
79 | ],
80 | },
81 | });
82 |
83 | if (error)
84 | return setErrorMessage("There was an issue starting the reassigment.");
85 | else setErrorMessage("");
86 |
87 | navigate("../", { replace: true });
88 | };
89 |
90 | return (
91 | <>
92 |
93 | Reassign Partitions for topic: {topicName}
94 | {errorMessage !== "" && Error: {errorMessage}
}
95 |
100 |
101 |
102 |
103 | Partition ID
104 | Leader
105 | Replicas
106 |
107 |
108 |
109 | {!loading &&
110 | initial.map((row) => {
111 | return (
112 |
113 | {row.partitionId}
114 | {row.leader}
115 |
116 |
122 |
123 |
124 | );
125 | })}
126 |
127 |
128 |
129 |
135 | Save
136 |
137 | navigate(-1)}
139 | color="error"
140 | variant="contained"
141 | sx={{ color: "#F8F0E3", m: 1, fontWeight: "bold", width: "100px" }}
142 | >
143 | Cancel
144 |
145 |
146 | >
147 | );
148 | }
149 |
150 | export { ReassignPartitions };
151 |
--------------------------------------------------------------------------------
/src/client/components/Searchbar.tsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import TextField from "@mui/material/TextField";
3 | import Autocomplete from "@mui/material/Autocomplete";
4 | import SearchIcon from "@mui/icons-material/Search";
5 | import { IconButton, InputAdornment } from "@mui/material";
6 | import { DocumentNode } from "graphql";
7 | import { useQuery } from "@apollo/client";
8 | import { keyMap } from "../models/typeKeyMap";
9 | export interface searchProps {
10 | query: DocumentNode;
11 | searchingFor: string;
12 | setFilter: React.Dispatch>;
13 | }
14 |
15 | function SearchBar({ query, searchingFor, setFilter }: searchProps) {
16 | const { loading, data } = useQuery(query);
17 | return (
18 | {
23 | let option = "";
24 | let value;
25 | for (const key of Object.keys(item)) {
26 | if (key === "__typename") continue;
27 | option += `${key}: ${item[key]} `;
28 | if (key === keyMap[searchingFor]) value = item[key];
29 | }
30 | option = option.trimEnd();
31 | return { label: option, id: value };
32 | })
33 | }
34 | isOptionEqualToValue={(
35 | option: any | Array,
36 | value: any | Array
37 | ) => {
38 | return option.id === value.id;
39 | }}
40 | onChange={(event, value: any | Array) => {
41 | setFilter([value.id]);
42 | return value;
43 | }}
44 | renderInput={(params) => (
45 |
51 |
52 |
53 |
54 |
55 | ),
56 | }}
57 | />
58 | )}
59 | />
60 | );
61 | }
62 |
63 | export default SearchBar;
64 |
--------------------------------------------------------------------------------
/src/client/components/Sidebar.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import { styled } from "@mui/material/styles";
3 | import MuiDrawer from "@mui/material/Drawer";
4 | import Toolbar from "@mui/material/Toolbar";
5 | import List from "@mui/material/List";
6 | import Divider from "@mui/material/Divider";
7 | import IconButton from "@mui/material/IconButton";
8 | import ChevronLeftIcon from "@mui/icons-material/ChevronLeft";
9 |
10 | import { mainListItems, secondaryListItems } from "./listItems";
11 |
12 | const drawerWidth = 240;
13 |
14 | const Drawer = styled(MuiDrawer, {
15 | shouldForwardProp: (prop) => prop !== "open",
16 | })(({ theme, open }) => ({
17 | "& .MuiDrawer-paper": {
18 | position: "relative",
19 | whiteSpace: "nowrap",
20 | width: drawerWidth,
21 | transition: theme.transitions.create("width", {
22 | easing: theme.transitions.easing.sharp,
23 | duration: theme.transitions.duration.enteringScreen,
24 | }),
25 | boxSizing: "border-box",
26 | ...(!open && {
27 | overflowX: "hidden",
28 | transition: theme.transitions.create("width", {
29 | easing: theme.transitions.easing.sharp,
30 | duration: theme.transitions.duration.leavingScreen,
31 | }),
32 | width: theme.spacing(7),
33 | [theme.breakpoints.up("sm")]: {
34 | width: theme.spacing(9),
35 | },
36 | }),
37 | },
38 | }));
39 |
40 | const Header = () => {
41 | const [open, setOpen] = React.useState(true);
42 | const toggleDrawer = () => {
43 | setOpen(!open);
44 | };
45 |
46 | return (
47 |
48 |
56 |
57 |
58 |
59 |
60 |
61 |
62 | {mainListItems}
63 |
64 | {secondaryListItems}
65 |
66 |
67 | );
68 | };
69 |
70 | export default Header;
71 |
--------------------------------------------------------------------------------
/src/client/components/Title.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import Typography from "@mui/material/Typography";
3 |
4 | interface TitleProps {
5 | children?: React.ReactNode;
6 | }
7 |
8 | export default function Title(props: TitleProps) {
9 | return (
10 |
11 | {props.children}
12 |
13 | );
14 | }
15 |
--------------------------------------------------------------------------------
/src/client/components/TopicGrid.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import { useState } from "react";
3 | import { DataGrid, GridToolbar, GridColDef } from "@mui/x-data-grid";
4 | import Title from "./Title";
5 | import { TOPIC_DATAGRID_QUERY, DELETE_TOPIC } from "../models/queries";
6 | import { useQuery } from "@apollo/client";
7 | import ConfirmationDialog from "./ConfirmationDialog";
8 | import Button from "@mui/material/Button";
9 | import { Navigate, useNavigate } from "react-router-dom";
10 |
11 | // onQueryCallback with use query
12 |
13 | // data grid schema
14 | // const columns: GridColDef[] = [
15 | // { field: "topic", headerName: "Topic", width: 150 },
16 | // {
17 | // field: "partitionNum",
18 | // headerName: "Number of Partitions",
19 | // type: "number",
20 | // width: 150,
21 | // },
22 | // {
23 | // field: "partitionRep",
24 | // headerName: "Replicas per partition",
25 | // type: "number",
26 | // width: 150,
27 | // },
28 | // {
29 | // field: "underMinISR",
30 | // headerName: "Under Min ISR",
31 | // type: "number",
32 | // width: 150,
33 | // },
34 | // { field: "logSize", headerName: "Log Size(GB)", type: "number", width: 120 },
35 | // {
36 | // field: "delete",
37 | // headerName: "",
38 | // filterable: false,
39 | // align: "center",
40 | // width: 180,
41 | // renderCell: (params) => {
42 | // return params.value ? (
43 | //
57 | // ) : (
58 | //
72 | // );
73 | // },
74 | // },
75 | // {
76 | // field: "reassignPartitions",
77 | // headerName: "",
78 | // filterable: false,
79 | // align: "center",
80 | // width: 180,
81 | // },
82 | // ];
83 |
84 | interface TopicGridProps {
85 | title?: string;
86 | rowCount: number;
87 | }
88 |
89 | export default function TopicGrid({ title, rowCount }: TopicGridProps) {
90 | const [rowData, setRowData] = useState([]);
91 | const [pageSize, setPageSize] = useState(rowCount);
92 | const navigate = useNavigate();
93 | const { loading, data, refetch } = useQuery(TOPIC_DATAGRID_QUERY);
94 |
95 | React.useEffect(() => {
96 | if (loading) return;
97 | else {
98 | const newRowData = data.topics.map((item, index) => {
99 | return {
100 | id: index,
101 | topic: item.name,
102 | partitionNum: item.numPartitions,
103 | partitionRep: item.totalReplicas,
104 | underMinISR: `${item.totalIsrs - item.totalReplicas}`,
105 | delete: data.cluster.deleteTopic,
106 | logSize: item.logSize,
107 | update: refetch,
108 | };
109 | });
110 |
111 | setRowData(newRowData);
112 | }
113 | }, [data]);
114 | const columns: GridColDef[] = [
115 | { field: "topic", headerName: "Topic", width: 150 },
116 | {
117 | field: "partitionNum",
118 | headerName: "Number of Partitions",
119 | type: "number",
120 | width: 150,
121 | },
122 | {
123 | field: "partitionRep",
124 | headerName: "Replicas per partition",
125 | type: "number",
126 | width: 150,
127 | },
128 | {
129 | field: "underMinISR",
130 | headerName: "Under Min ISR",
131 | type: "number",
132 | width: 150,
133 | },
134 | {
135 | field: "logSize",
136 | headerName: "Log Size(GB)",
137 | type: "number",
138 | width: 120,
139 | },
140 | {
141 | field: "delete",
142 | headerName: "",
143 | filterable: false,
144 | align: "center",
145 | width: 180,
146 | renderCell: (params) => {
147 | return params.value ? (
148 |
162 | ) : (
163 |
177 | );
178 | },
179 | },
180 | {
181 | field: "reassignPartitions",
182 | headerName: "",
183 | filterable: false,
184 | align: "center",
185 | width: 200,
186 | renderCell: (params) => {
187 | return (
188 | navigate(`../reassign/${params.row.topic}`)}
192 | >
193 | REASSIGN PARTITIONS
194 |
195 | );
196 | },
197 | },
198 | ];
199 |
200 | return (
201 |
202 | {title &&
{title} }
203 | {loading ? (
204 |
Loading...
205 | ) : (
206 |
207 | setPageSize(pageSize)}
213 | rowsPerPageOptions={[5, 10, 25, 50]}
214 | style={{ height: 52 * rowCount + 147 }}
215 | />
216 |
217 | )}
218 |
219 | );
220 | }
221 |
--------------------------------------------------------------------------------
/src/client/components/listItems.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import ListItemButton from "@mui/material/ListItemButton";
3 | import ListItemIcon from "@mui/material/ListItemIcon";
4 | import ListItemText from "@mui/material/ListItemText";
5 | import ListSubheader from "@mui/material/ListSubheader";
6 | import DashboardIcon from "@mui/icons-material/Dashboard";
7 | import DynamicFeed from "@mui/icons-material/DynamicFeed";
8 | import Login from "@mui/icons-material/Login";
9 | import Logout from "@mui/icons-material/Logout";
10 | import Topic from "@mui/icons-material/Topic";
11 | import { BugReport } from "@mui/icons-material";
12 |
13 | export const mainListItems = (
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 | );
47 |
48 | export const secondaryListItems = (
49 |
50 |
51 | More Features
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 | );
61 |
--------------------------------------------------------------------------------
/src/client/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | FranzView
6 |
7 |
8 |
9 |
10 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/src/client/index.tsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import ReactDOM from "react-dom";
3 | import App from "./App";
4 |
5 | ReactDOM.render(
6 |
7 |
8 | ,
9 | document.getElementById("root")
10 | );
11 |
--------------------------------------------------------------------------------
/src/client/models/queries.tsx:
--------------------------------------------------------------------------------
1 | import { gql } from "@apollo/client";
2 |
3 | export const DASHBOARD_CARD_METRICS_QUERY = gql`
4 | query Cluster {
5 | cluster {
6 | activeControllerCount {
7 | count: metric
8 | }
9 | offlinePartitionCount {
10 | count: metric
11 | }
12 | numberUnderReplicatedPartitions {
13 | underReplicatedPartitions: metric
14 | }
15 | }
16 | }
17 | `;
18 |
19 | //originally broker_metric_query
20 | export const TOPIC_DATAGRID_QUERY = gql`
21 | query Topics {
22 | topics {
23 | name
24 | numPartitions
25 | totalReplicas
26 | totalIsrs
27 | logSize
28 | }
29 | cluster {
30 | deleteTopic
31 | }
32 | }
33 | `;
34 |
35 | export const TOPIC_PAGE_QUERY = gql`
36 | query Cluster {
37 | cluster {
38 | underMinIsr {
39 | metric
40 | }
41 | }
42 | topics {
43 | logSize
44 | }
45 | }
46 | `;
47 |
48 | export const TOPIC_QUERY = gql`
49 | query Topic($name: String!) {
50 | topic(name: $name) {
51 | name
52 | partitions {
53 | partitionId
54 | leader {
55 | brokerId
56 | }
57 | replicas {
58 | brokerId
59 | }
60 | }
61 | }
62 | }
63 | `;
64 |
65 | //Add additional query for metrics on broker page only
66 |
67 | export const ALL_BROKER_CPU_USAGE = gql`
68 | query BrokersCPUUsage($start: String, $end: String, $step: String) {
69 | broker: brokers(start: $start, end: $end, step: $step) {
70 | brokerId
71 | cpuUsage: cpuUsageOverTime {
72 | cpuUsage: metric
73 | time
74 | }
75 | }
76 | }
77 | `;
78 |
79 | export const ALL_BROKER_DISK_USAGE = gql`
80 | query BrokersJVMMemoryUsage($start: String, $end: String, $step: String) {
81 | broker: brokers(start: $start, end: $end, step: $step) {
82 | brokerId
83 | JVMMemoryUsage: JVMMemoryUsageOverTime {
84 | JVMMemoryUsage: metric
85 | time
86 | }
87 | }
88 | }
89 | `;
90 |
91 | export const BROKER_FRAGMENT = gql`
92 | fragment CoreBrokerFields on Broker {
93 | brokerId
94 | brokerPort
95 | brokerHost
96 | }
97 | `;
98 |
99 | export const CORE_ALL_BROKERS_QUERY = gql`
100 | ${BROKER_FRAGMENT}
101 | query Brokers {
102 | brokers {
103 | ...CoreBrokerFields
104 | }
105 | }
106 | `;
107 |
108 | export const ALL_BROKERS_TIME_MS = gql`
109 | ${BROKER_FRAGMENT}
110 | query BrokerTimeMs {
111 | brokers {
112 | produceTotalTimeMs {
113 | totalTimeMs: metric
114 | time
115 | }
116 | consumerTotalTimeMs {
117 | totalTimeMs: metric
118 | time
119 | }
120 | followerTotalTimeMs {
121 | totalTimeMs: metric
122 | time
123 | }
124 | ...CoreBrokerFields
125 | }
126 | }
127 | `;
128 |
129 | export const AVERAGE_TOTALTIMEMS = gql`
130 | query totalTimeMs($request: String!, $brokerIds: [Int]) {
131 | totalTimeMs(request: $request, brokerIds: $brokerIds) {
132 | totalTimeMs: metric
133 | time
134 | }
135 | }
136 | `;
137 |
138 | export const BYTES_IN_PER_SECOND = gql`
139 | query BytesInPerSecondOverTime(
140 | $start: String!
141 | $end: String!
142 | $step: String!
143 | $brokerIds: [Int]
144 | ) {
145 | topic: bytesInPerSecondOverTime(
146 | start: $start
147 | end: $end
148 | step: $step
149 | brokerIds: $brokerIds
150 | ) {
151 | topic
152 | bytesInPerSecond: values {
153 | time
154 | bytesInPerSecond: metric
155 | }
156 | }
157 | }
158 | `;
159 |
160 | export const BYTES_OUT_PER_SECOND = gql`
161 | query BytesOutPerSecondOverTime(
162 | $start: String!
163 | $end: String!
164 | $step: String!
165 | $brokerIds: [Int]
166 | ) {
167 | topic: bytesOutPerSecondOverTime(
168 | start: $start
169 | end: $end
170 | step: $step
171 | brokerIds: $brokerIds
172 | ) {
173 | topic
174 | bytesOutPerSecond: values {
175 | time
176 | bytesOutPerSecond: metric
177 | }
178 | }
179 | }
180 | `;
181 |
182 | export const MESSAGES_IN_PER_SEC = gql`
183 | query MessagesInPerSec(
184 | $start: String!
185 | $end: String!
186 | $step: String!
187 | $brokerIds: [Int]
188 | ) {
189 | topic: messagesInPerSec(
190 | start: $start
191 | end: $end
192 | step: $step
193 | brokerIds: $brokerIds
194 | ) {
195 | topic
196 | messagesInPerSecond: values {
197 | time
198 | messagesInPerSecond: metric
199 | }
200 | }
201 | }
202 | `;
203 |
204 | export const ADD_TOPIC = gql`
205 | mutation AddTopic(
206 | $name: String!
207 | $replicationFactor: Int
208 | $numPartitions: Int
209 | ) {
210 | addTopic(
211 | name: $name
212 | replicationFactor: $replicationFactor
213 | numPartitions: $numPartitions
214 | ) {
215 | name
216 | }
217 | }
218 | `;
219 |
220 | export const DELETE_TOPIC = gql`
221 | mutation DeleteTopic($name: String!) {
222 | deleteTopic(name: $name) {
223 | name
224 | }
225 | }
226 | `;
227 |
228 | export const REASSIGN_PARTITIONS = gql`
229 | mutation ReassignPartitions($topics: [PartitionReassignment]) {
230 | reassignPartitions(topics: $topics) {
231 | name
232 | }
233 | }
234 | `;
235 |
236 | export const UNDERMIN_ISR = gql`
237 | query UnderMinIsr(
238 | $start: String!
239 | $end: String!
240 | $step: String!
241 | $brokerIds: [Int]
242 | ) {
243 | topic: underMinIsr(
244 | start: $start
245 | end: $end
246 | step: $step
247 | brokerIds: $brokerIds
248 | ) {
249 | topic
250 | underMinIsr: values {
251 | time
252 | underMinIsr: metric
253 | }
254 | }
255 | }
256 | `;
257 |
258 | export const UNDERREPLICATED_PARTITIONS = gql`
259 | query UnderreplicatedPartitions(
260 | $start: String!
261 | $end: String!
262 | $step: String!
263 | $brokerIds: [Int]
264 | ) {
265 | topic: underreplicatedPartitions(
266 | start: $start
267 | end: $end
268 | step: $step
269 | brokerIds: $brokerIds
270 | ) {
271 | topic
272 | underreplicatedPartitions: values {
273 | time
274 | underreplicatedPartitions: metric
275 | }
276 | }
277 | }
278 | `;
279 |
280 | export const TOTAL_LOG_SIZE = gql`
281 | query LogSize(
282 | $start: String!
283 | $end: String!
284 | $step: String!
285 | $brokerIds: [Int]
286 | ) {
287 | topic: logSize(
288 | start: $start
289 | end: $end
290 | step: $step
291 | brokerIds: $brokerIds
292 | ) {
293 | topic
294 | logSize: values {
295 | time
296 | logSize: metric
297 | }
298 | }
299 | }
300 | `;
301 |
--------------------------------------------------------------------------------
/src/client/models/typeKeyMap.tsx:
--------------------------------------------------------------------------------
1 | export const keyMap = {
2 | brokers: "brokerId",
3 | topics: "name",
4 | };
5 |
--------------------------------------------------------------------------------
/src/client/pages/Brokers.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState } from "react";
2 | import Container from "@mui/material/Container";
3 | import SearchBar from "../components/Searchbar";
4 | import { CORE_ALL_BROKERS_QUERY } from "../models/queries";
5 | import Grid from "@mui/material/Grid";
6 | import Paper from "@mui/material/Paper";
7 | import MetricsCard from "../components/MetricsCard";
8 | import { useQuery } from "@apollo/client";
9 | import RealTimeLineChart from "../components/RealTimeLineChart";
10 | import {
11 | BYTES_IN_PER_SECOND,
12 | BYTES_OUT_PER_SECOND,
13 | AVERAGE_TOTALTIMEMS,
14 | } from "../models/queries";
15 |
16 | //Move real-line charts to top of page
17 | const Brokers = () => {
18 | const [filter, setFilter] = useState([]);
19 |
20 | const produce = useQuery(AVERAGE_TOTALTIMEMS, {
21 | variables: {
22 | request: "Produce",
23 | brokerIds: filter.length > 0 ? filter : null,
24 | },
25 | pollInterval: 20000,
26 | });
27 |
28 | const consumer = useQuery(AVERAGE_TOTALTIMEMS, {
29 | variables: {
30 | request: "FetchConsumer",
31 | brokerIds: filter.length > 0 ? filter : null,
32 | },
33 | pollInterval: 20000,
34 | });
35 |
36 | const follower = useQuery(AVERAGE_TOTALTIMEMS, {
37 | variables: {
38 | request: "FetchFollower",
39 | brokerIds: filter.length > 0 ? filter : null,
40 | },
41 | pollInterval: 20000,
42 | });
43 |
44 | return (
45 | <>
46 |
47 | Brokers
48 |
53 |
54 |
55 | {/* Bytes in per second chart */}
56 |
57 |
65 | 0 ? filter : null }}
76 | />
77 |
78 |
79 |
80 | {/* BYTES OUT PER SEC LINE CHART */}
81 |
82 |
90 | 0 ? filter : null }}
101 | />
102 |
103 |
104 |
105 | {/* Metric card 1 - Reduce request */}
106 |
107 |
116 |
125 |
126 |
127 |
128 | {/* Metrics Card 2 - Consumer Request*/}
129 |
130 |
139 |
148 |
149 |
150 |
151 | {/* Metrics Card 3 - Follower Request */}
152 |
153 |
162 |
171 |
172 |
173 |
174 |
175 | >
176 | );
177 | };
178 |
179 | export default Brokers;
180 |
--------------------------------------------------------------------------------
/src/client/pages/Dashboard.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import Container from "@mui/material/Container";
3 | import Grid from "@mui/material/Grid";
4 | import Paper from "@mui/material/Paper";
5 | import RealTimeLineChart from "../components/RealTimeLineChart";
6 | import MetricsCard from "../components/MetricsCard";
7 | import TopicGrid from "../components/TopicGrid";
8 | import { MonitorHeartTwoTone } from "@mui/icons-material";
9 | import MoreInfo from "../components/PopoverMoreInfo";
10 |
11 | import {
12 | ALL_BROKER_CPU_USAGE,
13 | DASHBOARD_CARD_METRICS_QUERY,
14 | ALL_BROKER_DISK_USAGE,
15 | } from "../models/queries";
16 | import { useQuery } from "@apollo/client";
17 |
18 | function DashboardContent() {
19 | const { loading, data } = useQuery(DASHBOARD_CARD_METRICS_QUERY, {
20 | pollInterval: 60000,
21 | });
22 |
23 | return (
24 | <>
25 |
26 |
27 | {/* Chart */}
28 |
29 |
37 |
48 |
49 |
50 |
51 | {/* Chart 2 */}
52 |
53 |
61 |
72 |
73 |
74 |
75 | {/* Metrics Card */}
76 |
77 |
86 | }
98 | content="This metric should be 0 in a healthy cluster. If a broker becomes unavailable, this metric will increase sharply. Any non-zero value lets the developer know that there is potentially something wrong with the cluster and action is warranted."
99 | />
100 | }
101 | />
102 |
103 |
104 |
105 | {/* Metrics Card 2 */}
106 |
107 |
116 | }
122 | content="If this value is 0, there is a high potential for lost data. If this value is greater than 1 and the higher value persists for more than a minute (when active controllers may be switching between brokers) the cluster may be suffering from 'split brain.' Start troubleshooting!"
123 | />
124 | }
125 | query={DASHBOARD_CARD_METRICS_QUERY}
126 | searchingFor="count"
127 | variables={{ pollInterval: 60000 }}
128 | />
129 |
130 |
131 |
132 | {/* Metrics Card 3 */}
133 |
134 |
143 | }
152 | />
153 |
154 |
155 |
156 | {/* Broker Component */}
157 |
158 |
162 |
163 |
164 |
165 |
166 |
167 | >
168 | );
169 | }
170 |
171 | export default function Dashboard() {
172 | return ;
173 | }
174 |
--------------------------------------------------------------------------------
/src/client/pages/Topics.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState } from "react";
2 | import Container from "@mui/material/Container";
3 | // import SearchBar from "../components/Searchbar";
4 | // import { CORE_ALL_BROKERS_QUERY } from "../models/queries";
5 | import Grid from "@mui/material/Grid";
6 | import Paper from "@mui/material/Paper";
7 | import MetricsCard from "../components/MetricsCard";
8 | import { useQuery } from "@apollo/client";
9 | import RealTimeLineChart from "../components/RealTimeLineChart";
10 | import TopicGrid from "../components/TopicGrid";
11 | import {
12 | BYTES_IN_PER_SECOND,
13 | BYTES_OUT_PER_SECOND,
14 | // AVERAGE_TOTALTIMEMS,
15 | DASHBOARD_CARD_METRICS_QUERY,
16 | TOPIC_PAGE_QUERY,
17 | MESSAGES_IN_PER_SEC,
18 | // TOTAL_LOG_SIZE,
19 | // TOPIC_DATAGRID_QUERY,
20 | } from "../models/queries";
21 |
22 | const Topics = () => {
23 | const [filter, setFilter] = useState([]);
24 |
25 | // Metric cards -
26 | const counts = useQuery(DASHBOARD_CARD_METRICS_QUERY, {
27 | variables: {
28 | request: "FetchUnderRep",
29 | brokerIds: filter.length > 0 ? filter : null,
30 | },
31 | pollInterval: 20000,
32 | });
33 |
34 | const topicCardQuery = useQuery(TOPIC_PAGE_QUERY, {
35 | variables: {
36 | request: "FetchUnderRep",
37 | brokerIds: filter.length > 0 ? filter : null,
38 | },
39 | pollInterval: 20000,
40 | });
41 |
42 | return (
43 | <>
44 |
45 |
46 | {/*Bytes in per second chart*/}
47 |
48 |
56 | 0 ? filter : null }}
67 | />
68 |
69 |
70 |
71 | {/* BYTES OUT PER SEC LINE CHART */}
72 |
73 |
81 | 0 ? filter : null }}
92 | />
93 |
94 |
95 |
96 | {/* MESSAGES IN PER SEC LINE CHART */}
97 |
98 |
106 | 0 ? filter : null }}
117 | />
118 |
119 |
120 |
121 | {/* Cards */}
122 |
123 |
124 | {/* UNDERREPLICATED PARTITIONS CARD */}
125 |
126 |
127 |
136 |
146 |
147 |
148 |
149 | {/* UNDER MIN ISR CARD */}
150 |
151 |
160 |
169 |
170 |
171 |
172 | {/* TOTAL LOG SIZE */}
173 |
174 |
183 | {
189 | return (acc += val.logSize);
190 | }, 0)
191 | .toFixed(2)
192 | }
193 | title="Total Log Size"
194 | description="Shown in GB."
195 | />
196 |
197 |
198 |
199 |
200 |
201 | {/* Datagrid */}
202 |
203 |
207 |
208 |
209 |
210 |
211 |
212 | >
213 |
214 | // charts
215 | // Bytes in
216 | // Average
217 | // per topic when clicked
218 | // bytes out
219 | // Average
220 | // per topic when clicked
221 |
222 | // card
223 | // under replicated partitions
224 | // Total undermin ISR
225 | // total log
226 |
227 | // data grid of topics
228 | // replace ISR per partition with undermin ISR
229 | );
230 | };
231 |
232 | export default Topics;
233 |
--------------------------------------------------------------------------------
/src/client/pages/TopicsList.tsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import TopicGrid from "../components/TopicGrid";
3 | import Container from "@mui/material/Container";
4 | import Button from "@mui/material/Button";
5 |
6 | const TopicsList = () => {
7 | return (
8 | <>
9 |
10 |
17 |
Topics
18 |
19 | Add Topic
20 |
21 |
22 |
23 |
24 | >
25 | );
26 | };
27 |
28 | export default TopicsList;
29 |
--------------------------------------------------------------------------------
/src/client/utils/validate.ts:
--------------------------------------------------------------------------------
1 | export const validate = (test: string, control: string): boolean => {
2 | return test === control;
3 | };
4 |
--------------------------------------------------------------------------------
/src/server/graphql/datasources/brokerAdmin.ts:
--------------------------------------------------------------------------------
1 | import { admin } from "../../kafka/kafka";
2 | import {
3 | ConfigResourceTypes,
4 | PartitionReassignment,
5 | OngoingTopicReassignment,
6 | ITopicConfig,
7 | } from "kafkajs";
8 | import { Cluster, Broker, ConfigEntries } from "../../../../types/types";
9 |
10 | export async function getClusterInfo(): Promise {
11 | try {
12 | const info = await admin.describeCluster();
13 | const brokers: Broker[] = [];
14 | for (let i = 0; i < info.brokers.length; i++) {
15 | brokers.push({
16 | brokerId: info.brokers[i].nodeId,
17 | brokerPort: info.brokers[i].port,
18 | brokerHost: info.brokers[i].host,
19 | });
20 | }
21 |
22 | const cluster: Cluster = {
23 | brokers,
24 | activeController: brokers.filter(
25 | (broker) => broker.brokerId === info.controller
26 | )[0],
27 | };
28 |
29 | return cluster;
30 | } catch (error) {
31 | console.log(error);
32 | }
33 | }
34 |
35 | export async function getSingleTopic(name: string) {
36 | try {
37 | const topic = await admin
38 | .fetchTopicMetadata({ topics: [name] })
39 | .then((topics) => topics.topics[0]);
40 |
41 | return topic;
42 | } catch (error) {
43 | console.log(`Kafka Admin Error getting single topic ${name}: ${error}`);
44 | }
45 | }
46 |
47 | export async function getAllTopics() {
48 | try {
49 | const names = await admin.listTopics();
50 | const topics = await admin.fetchTopicMetadata({ topics: names });
51 |
52 | return topics.topics;
53 | } catch (error) {
54 | console.log(`Kafka Admin Error getting single topic: ${error}`);
55 | }
56 | }
57 |
58 | export async function createTopic(
59 | topic: string,
60 | replicationFactor: number,
61 | numPartitions: number,
62 | configEntries: ConfigEntries[]
63 | ) {
64 | const topicConfig: ITopicConfig = {
65 | topic,
66 | replicationFactor,
67 | numPartitions,
68 | };
69 |
70 | if (configEntries) topicConfig.configEntries = configEntries;
71 |
72 | try {
73 | const topicCreated = await admin.createTopics({ topics: [topicConfig] });
74 | if (topicCreated) {
75 | const topics = await admin.fetchTopicMetadata({ topics: [topic] });
76 | return topics.topics[0];
77 | }
78 | } catch (error) {
79 | console.warn(`Error when creating topic: ${topic}. Error: ${error}`);
80 | }
81 | }
82 |
83 | export async function canDelete() {
84 | try {
85 | const cluster = await admin.describeCluster();
86 | const canDelete = await admin.describeConfigs({
87 | includeSynonyms: true,
88 | resources: [
89 | {
90 | type: ConfigResourceTypes.BROKER,
91 | name: cluster.brokers[0].nodeId.toString(),
92 | configNames: ["delete.topic.enable"],
93 | },
94 | ],
95 | });
96 |
97 | return canDelete.resources[0].configEntries[0].configValue === "true";
98 | } catch (error) {
99 | console.log(error);
100 | return error;
101 | }
102 | }
103 |
104 | export async function deleteTopic(topic: string) {
105 | try {
106 | if (!(await canDelete()))
107 | throw "Delete topic is not enabled on this cluster.";
108 | const topicToDelete = await getSingleTopic(topic);
109 | await admin.deleteTopics({ topics: [topic] });
110 | return topicToDelete;
111 | } catch (error) {
112 | console.log(error);
113 | return error;
114 | }
115 | }
116 |
117 | export async function reassignPartitions(
118 | topics: PartitionReassignment[]
119 | ): Promise {
120 | try {
121 | await admin.alterPartitionReassignments({ topics });
122 | const result = await admin.listPartitionReassignments({});
123 |
124 | return result.topics;
125 | } catch (error) {
126 | console.warn(`Error occured reassigning partitions: ${error}`);
127 | return error;
128 | }
129 | }
130 |
--------------------------------------------------------------------------------
/src/server/graphql/datasources/models/promQueries.ts:
--------------------------------------------------------------------------------
1 | import { PromQuery } from "../../../../../types/types";
2 |
3 | /* Broker Queries */
4 | export const BROKER_CPU_USAGE: PromQuery = {
5 | name: "Broker CPU Usage",
6 | query:
7 | 'rate(process_cpu_seconds_total{job="kafka", instance=~"filter"}[1m])*100',
8 | type: "broker",
9 | };
10 |
11 | export const JVM_MEMORY_USAGE: PromQuery = {
12 | name: "JVM Memory Usage",
13 | query:
14 | '(sum(avg_over_time(jvm_memory_bytes_used{area="heap", job!="zookeeper", instance=~"filter"}[1m]))by(application,instance)/sum(avg_over_time(jvm_memory_bytes_committed{area="heap", job!="zookeeper", instance=~"filter"}[1m]))by(application,instance))*100',
15 | type: "broker",
16 | };
17 |
18 | export const BYTES_IN_PER_SEC: PromQuery = {
19 | name: "Bytes in Per Sec (Brokers)",
20 | query:
21 | 'sum(rate(kafka_server_brokertopicmetrics_bytesinpersec{topic!="", instance=~"filter"}[60s]))by(topic)',
22 | type: "broker",
23 | };
24 |
25 | export const BYTES_OUT_PER_SEC: PromQuery = {
26 | name: "Bytes out Per Sec (Brokers)",
27 | query:
28 | 'sum(rate(kafka_server_brokertopicmetrics_bytesoutpersec{topic!="", instance=~"filter"}[60s]))by(topic)',
29 | type: "broker",
30 | };
31 |
32 | /* This is can be a query on the cluster or broker, and uses type broker to filter correctly */
33 | export const TOTAL_UNDER_REPLICATED_PARTITIONS: PromQuery = {
34 | name: "Under Replicated Partitions",
35 | query:
36 | 'sum(kafka_server_replicamanager_underreplicatedpartitions{instance=~"filter"})',
37 | type: "broker",
38 | };
39 |
40 | /* This can be altered to be topic based */
41 | export const MESSAGES_IN_PER_SEC: PromQuery = {
42 | name: "Message in per Second",
43 | query:
44 | 'sum(rate(kafka_server_brokertopicmetrics_messagesinpersec{topic!="", instance=~"filter"}[60s]))by(topic)',
45 | type: "broker",
46 | };
47 |
48 | /* Topic Queries */
49 | export const GET_TOTAL_REPLICAS: PromQuery = {
50 | name: "Get Total Replicas",
51 | query:
52 | '(sum(kafka_cluster_partition_replicascount{topic=~"filter"})by(topic))>0',
53 | type: "topic",
54 | };
55 |
56 | export const REPLICAS_PER_BROKER: PromQuery = {
57 | name: "Replicas per broker",
58 | query:
59 | '(sum(kafka_cluster_partition_replicascount{topic=~"filter"})by(instance))>0',
60 | type: "topic",
61 | };
62 |
63 | export const TOTAL_ISRS: PromQuery = {
64 | name: "Total ISRs",
65 | query:
66 | '(sum(kafka_cluster_partition_insyncreplicascount{topic=~"filter"})by(topic))',
67 | type: "topic",
68 | };
69 |
70 | export const LOG_SIZE: PromQuery = {
71 | name: "Log Size",
72 | query: '(sum(kafka_log_log_size{topic=~"filter"})by(topic))',
73 | type: "topic",
74 | };
75 |
76 | /* Cluster Queries */
77 | export const GET_ACTIVE_CONTROLLER_COUNT: PromQuery = {
78 | name: "Active Controller Count",
79 | query: "sum(kafka_controller_kafkacontroller_activecontrollercount)",
80 | type: "cluster",
81 | };
82 |
83 | export const OFFLINE_PARTITION_COUNT: PromQuery = {
84 | name: "Offline Partition Count",
85 | query: "sum(kafka_controller_kafkacontroller_offlinepartitionscount)",
86 | type: "cluster",
87 | };
88 |
89 | /* This query can be used with a filter on topics */
90 | export const UNDER_MIN_ISR: PromQuery = {
91 | name: "Under Min ISR",
92 | query: 'sum(kafka_cluster_partition_underminisr{topic=~"filter"})',
93 | type: "cluster",
94 | };
95 |
--------------------------------------------------------------------------------
/src/server/graphql/datasources/prometheusAPI.ts:
--------------------------------------------------------------------------------
1 | import "dotenv/config";
2 | import { RESTDataSource } from "apollo-datasource-rest";
3 | /**
4 | * TODO: Create a way for a user to provide their Prometheus URL
5 | * TODO: Map prometheus instance to brokerId
6 | */
7 |
8 | class PromAPI extends RESTDataSource {
9 | brokerMap: any;
10 | reverseMap: any;
11 | mapped: boolean;
12 | }
13 |
14 | class PrometheusAPI extends PromAPI {
15 | constructor(baseURL: string = process.env.PROMETHEUS_URL) {
16 | super();
17 | this.baseURL = baseURL;
18 | this.brokerMap = {};
19 | this.reverseMap = {};
20 | this.mapped = false;
21 | }
22 |
23 | async mapBrokers() {
24 | if (this.mapped === true) return true;
25 | const query = "query={brokerid!=''}";
26 | try {
27 | const result = await this.get(`api/v1/query?${query}`);
28 |
29 | result.data.result.forEach((broker) => {
30 | this.brokerMap[broker.metric.instance] = broker.metric.brokerid;
31 | this.reverseMap[broker.metric.brokerid] = broker.metric.instance;
32 | });
33 |
34 | this.mapped = true;
35 | return true;
36 | } catch (error) {
37 | console.log(`Error with mapping brokers. Error: ${error}.`);
38 | return false;
39 | }
40 | }
41 |
42 | async queryData(query, filter?) {
43 | let queryString = `query=${query.query}`;
44 | const regex = /filter/g;
45 |
46 | try {
47 | if (filter && filter.length >= 1) {
48 | if (query.type === "broker") filter = await this.filter(filter);
49 | queryString = queryString.replace(regex, filter);
50 | } else {
51 | queryString = queryString.replace(regex, ".*");
52 | }
53 |
54 | const result = await this.get(`api/v1/query?${queryString}`);
55 | const data = result.data.result;
56 |
57 | return await this.formatResponse(data);
58 | } catch (error) {
59 | console.log(`Error occured with ${query.name}.
60 | Error: ${error}
61 | Query: ${queryString}`);
62 | }
63 | }
64 |
65 | async queryDataRange(query, start, end, step, filter?) {
66 | let queryString = `query=${query.query}`;
67 | const unixStart = Math.round(new Date(start).getTime() / 1000);
68 | const unixEnd = Math.round(new Date(end).getTime() / 1000);
69 | const regex = /filter/g;
70 | try {
71 | if (!unixStart || !unixEnd || isNaN(unixStart) || isNaN(unixEnd))
72 | throw "Date input incorrect";
73 |
74 | if (filter && filter.length >= 1) {
75 | if (query.type === "broker") filter = await this.filter(filter);
76 | queryString = queryString.replace(regex, filter);
77 | } else {
78 | queryString = queryString.replace(regex, ".*");
79 | }
80 |
81 | queryString += `&start=${unixStart}&end=${unixEnd}&step=${step}`;
82 | const result = await this.get(`api/v1/query_range?${queryString}`);
83 | const data = result.data.result;
84 |
85 | return await this.formatResponseSeries(data);
86 | } catch (error) {
87 | console.log(`Error occured with ${query.name}.
88 | Error: ${error}
89 | Query: ${queryString}`);
90 | }
91 | }
92 |
93 | async getMedianTotalTimeMs(requestType, filter) {
94 | const query = `query=kafka_network_requestmetrics_totaltimems{request=~"${requestType}", quantile=~"0.50"${
95 | filter ? `,instance=~"${await this.filter(filter)}"` : ""
96 | }}`;
97 | const result = await this.get(`api/v1/query?${query}`);
98 | const data = result.data.result;
99 |
100 | return this.formatResponse(data);
101 | }
102 |
103 | async getAvgTotalTimeMs(requestType, filter) {
104 | const query = `query=avg(kafka_network_requestmetrics_totaltimems{request=~"${requestType}", quantile=~"0.50"${
105 | filter ? `,instance=~"${await this.filter(filter)}"` : ""
106 | }})by(quantile)`;
107 | const result = await this.get(`api/v1/query?${query}`);
108 | const data = result.data.result;
109 | console.log(query);
110 | return this.formatResponse(data);
111 | }
112 |
113 | // eslint-disable-next-line @typescript-eslint/no-explicit-any
114 | async formatResponse(data: any[]) {
115 | if (!this.mapped) await this.mapBrokers();
116 |
117 | const formattedData = [];
118 | data.forEach((result) => {
119 | const obj = {
120 | time: new Date(result.value[0] * 1000).toString(),
121 | resource: result.metric.instance,
122 | brokerId: Number(this.brokerMap[result.metric.instance]),
123 | topic: result.metric.topic,
124 | };
125 | obj["metric"] = Number(result.value[1]);
126 | formattedData.push(obj);
127 | });
128 |
129 | return formattedData;
130 | }
131 |
132 | // eslint-disable-next-line @typescript-eslint/no-explicit-any
133 | async formatResponseSeries(data: any[]) {
134 | if (!this.mapped) await this.mapBrokers();
135 |
136 | const formattedData = [];
137 | data.forEach((result) => {
138 | const obj = {
139 | resource: result.metric.instance,
140 | brokerId: Number(this.brokerMap[result.metric.instance]),
141 | values: [],
142 | topic: result.metric.topic,
143 | };
144 | result.values.forEach((value) => {
145 | const point = {
146 | time: new Date(value[0] * 1000).toLocaleString("en-US", {
147 | timeStyle: "long",
148 | dateStyle: "short",
149 | hour12: false,
150 | }),
151 | };
152 | point["metric"] = Number(value[1]).toFixed(2);
153 | obj.values.push(point);
154 | });
155 | formattedData.push(obj);
156 | });
157 |
158 | return formattedData;
159 | }
160 |
161 | async filter(brokers: number[]) {
162 | if (!this.mapped) await this.mapBrokers();
163 |
164 | let filter = "";
165 | brokers.forEach((broker) => (filter += `${this.reverseMap[broker]}|`));
166 | return filter;
167 | }
168 | }
169 |
170 | export default PrometheusAPI;
171 |
--------------------------------------------------------------------------------
/src/server/graphql/resolvers.ts:
--------------------------------------------------------------------------------
1 | import * as brokerData from "./datasources/brokerAdmin";
2 | import { Broker, Cluster, Count } from "../../../types/types";
3 | import { OngoingTopicReassignment } from "kafkajs";
4 | import {
5 | BROKER_CPU_USAGE,
6 | BYTES_IN_PER_SEC,
7 | BYTES_OUT_PER_SEC,
8 | GET_ACTIVE_CONTROLLER_COUNT,
9 | GET_TOTAL_REPLICAS,
10 | JVM_MEMORY_USAGE,
11 | LOG_SIZE,
12 | MESSAGES_IN_PER_SEC,
13 | OFFLINE_PARTITION_COUNT,
14 | REPLICAS_PER_BROKER,
15 | TOTAL_ISRS,
16 | TOTAL_UNDER_REPLICATED_PARTITIONS,
17 | UNDER_MIN_ISR,
18 | } from "./datasources/models/promQueries";
19 |
20 | const resolvers = {
21 | Broker: {
22 | bytesInPerSecondOverTime: async (
23 | parent,
24 | args,
25 | { dataSources }
26 | ): Promise => {
27 | try {
28 | const brokerBytesInPerSecond =
29 | await dataSources.prometheusAPI.queryDataRange(
30 | BYTES_IN_PER_SEC,
31 | parent.start,
32 | parent.end,
33 | parent.step,
34 | [parent.brokerId]
35 | );
36 |
37 | console.log(brokerBytesInPerSecond);
38 |
39 | return brokerBytesInPerSecond;
40 | } catch (error) {
41 | console.log(
42 | `An error occured with Query Broker Bytes In Per Second Over Time: ${error}`
43 | );
44 | }
45 | },
46 |
47 | bytesOutPerSecondOverTime: async (
48 | parent,
49 | args,
50 | { dataSources }
51 | ): Promise => {
52 | try {
53 | const brokerBytesOutPerSecond =
54 | await dataSources.prometheusAPI.queryDataRange(
55 | BYTES_OUT_PER_SEC,
56 | parent.start,
57 | parent.end,
58 | parent.step,
59 | [parent.brokerId]
60 | );
61 |
62 | return brokerBytesOutPerSecond;
63 | } catch (error) {
64 | console.log(
65 | `An error occured with Query Broker Bytes In Per Second Over Time: ${error}`
66 | );
67 | }
68 | },
69 |
70 | cpuUsage: async (parent, args, { dataSources }): Promise => {
71 | try {
72 | const [brokerCpu] = await dataSources.prometheusAPI.queryData(
73 | BROKER_CPU_USAGE,
74 | [parent.brokerId]
75 | );
76 |
77 | return brokerCpu;
78 | } catch (error) {
79 | console.log(`An error occured with Query Broker CPU Usage: ${error}`);
80 | }
81 | },
82 |
83 | cpuUsageOverTime: async (
84 | parent,
85 | args,
86 | { dataSources }
87 | ): Promise => {
88 | try {
89 | const [brokerCpu] = await dataSources.prometheusAPI.queryDataRange(
90 | BROKER_CPU_USAGE,
91 | parent.start,
92 | parent.end,
93 | parent.step,
94 | [parent.brokerId]
95 | );
96 |
97 | return brokerCpu.values;
98 | } catch (error) {
99 | console.log(
100 | `An error occured with Query Broker CPU Usage Over Time: ${error}`
101 | );
102 | }
103 | },
104 |
105 | JVMMemoryUsageOverTime: async (
106 | parent,
107 | args,
108 | { dataSources }
109 | ): Promise => {
110 | try {
111 | const [brokerJVMMemoryUsage] =
112 | await dataSources.prometheusAPI.queryDataRange(
113 | JVM_MEMORY_USAGE,
114 | parent.start,
115 | parent.end,
116 | parent.step,
117 | [parent.brokerId]
118 | );
119 |
120 | return brokerJVMMemoryUsage.values;
121 | } catch (error) {
122 | console.log(
123 | `An error occured with Query Broker Disk Usage Over Time: ${error}`
124 | );
125 | }
126 | },
127 |
128 | JVMMemoryUsage: async (parent, args, { dataSources }): Promise => {
129 | try {
130 | const [brokerJVMMemoryUsage] =
131 | await dataSources.prometheusAPI.queryData(JVM_MEMORY_USAGE, [
132 | parent.brokerId,
133 | ]);
134 |
135 | return brokerJVMMemoryUsage;
136 | } catch (error) {
137 | console.log(
138 | `An error has occured with Query Broker Disk Usage: ${error}`
139 | );
140 | }
141 | },
142 |
143 | numberUnderReplicatedPartitions: async (
144 | parent,
145 | args,
146 | { dataSources }
147 | ): Promise => {
148 | try {
149 | const [totalUnderReplicatedPartitions] =
150 | await dataSources.prometheusAPI.queryData(
151 | TOTAL_UNDER_REPLICATED_PARTITIONS,
152 | [parent.brokerId]
153 | );
154 |
155 | return totalUnderReplicatedPartitions;
156 | } catch (error) {
157 | console.log(
158 | `An error occured with Query Broker numberUnderReplicatedPartitions: ${error}`
159 | );
160 | }
161 | },
162 |
163 | produceTotalTimeMs: async (
164 | parent,
165 | args,
166 | { dataSources }
167 | ): Promise => {
168 | try {
169 | const totalProduceTimeMS =
170 | await dataSources.prometheusAPI.getMedianTotalTimeMs("Produce");
171 | const produceTotalTimeMs = totalProduceTimeMS.filter(
172 | (elem) => elem.brokerId === parent.brokerId
173 | )[0];
174 | return produceTotalTimeMs;
175 | } catch (error) {
176 | console.log(
177 | `An error has occured with Query Produce Total Time MS: ${error}`
178 | );
179 | }
180 | },
181 |
182 | consumerTotalTimeMs: async (
183 | parent,
184 | args,
185 | { dataSources }
186 | ): Promise => {
187 | try {
188 | const totalConsumerTotalTimeMs =
189 | await dataSources.prometheusAPI.getMedianTotalTimeMs("FetchConsumer");
190 | const consumerTotalTimeMs = totalConsumerTotalTimeMs.filter(
191 | (elem) => elem.brokerId === parent.brokerId
192 | )[0];
193 |
194 | return consumerTotalTimeMs;
195 | } catch (error) {
196 | console.log(
197 | `An error has occured with Query Consumer Total Time MS: ${error}`
198 | );
199 | }
200 | },
201 |
202 | followerTotalTimeMs: async (
203 | parent,
204 | args,
205 | { dataSources }
206 | ): Promise => {
207 | try {
208 | const totalFollowerTotalTimeMs =
209 | await dataSources.prometheusAPI.getMedianTotalTimeMs("FetchFollower");
210 | const followerTotalTimeMs = totalFollowerTotalTimeMs.filter(
211 | (elem) => elem.brokerId === parent.brokerId
212 | )[0];
213 | return followerTotalTimeMs;
214 | } catch (error) {
215 | console.log(
216 | `An error has occured with Query Follower Total Time MS: ${error}`
217 | );
218 | }
219 | },
220 | },
221 |
222 | Cluster: {
223 | activeControllerCount: async (
224 | parent,
225 | args,
226 | { dataSources }
227 | ): Promise => {
228 | const [activeControllerCount] = await dataSources.prometheusAPI.queryData(
229 | GET_ACTIVE_CONTROLLER_COUNT
230 | );
231 |
232 | return activeControllerCount;
233 | },
234 |
235 | offlinePartitionCount: async (
236 | parent,
237 | args,
238 | { dataSources }
239 | ): Promise => {
240 | const [offlinePartitionCount] = await dataSources.prometheusAPI.queryData(
241 | OFFLINE_PARTITION_COUNT
242 | );
243 |
244 | return offlinePartitionCount;
245 | },
246 |
247 | underMinIsr: async (parent, args, { dataSources }): Promise => {
248 | const [underMinIsr] = await dataSources.prometheusAPI.queryData(
249 | UNDER_MIN_ISR
250 | );
251 |
252 | return underMinIsr;
253 | },
254 |
255 | numberUnderReplicatedPartitions: async (
256 | parent,
257 | args,
258 | { dataSources }
259 | ): Promise => {
260 | const [underReplicatedPartitions] =
261 | await dataSources.prometheusAPI.queryData(
262 | TOTAL_UNDER_REPLICATED_PARTITIONS
263 | );
264 |
265 | return underReplicatedPartitions;
266 | },
267 |
268 | deleteTopic: async () => {
269 | return await brokerData.canDelete();
270 | },
271 | },
272 |
273 | Topic: {
274 | numPartitions: (parent): number => {
275 | return parent.partitions.length;
276 | },
277 |
278 | totalReplicas: async ({ name }, args, { dataSources }): Promise => {
279 | const metric = await dataSources.prometheusAPI.queryData(
280 | GET_TOTAL_REPLICAS,
281 | name
282 | );
283 | if (metric.length === 0) {
284 | return metric.reduce(
285 | (prev, current) => prev + current.metric.length,
286 | 0
287 | );
288 | }
289 |
290 | return metric[0].metric;
291 | },
292 |
293 | totalIsrs: async ({ name }, args, { dataSources }): Promise => {
294 | const metric = await dataSources.prometheusAPI.queryData(
295 | TOTAL_ISRS,
296 | name
297 | );
298 | if (metric.length === 0) {
299 | return metric.reduce(
300 | (prev, current) => prev + current.metric.length,
301 | 0
302 | );
303 | }
304 | return metric[0].metric;
305 | },
306 |
307 | brokersWithReplicas: async (
308 | { name },
309 | args,
310 | { dataSources }
311 | ): Promise => {
312 | const metric = await dataSources.prometheusAPI.queryData(
313 | REPLICAS_PER_BROKER,
314 | name
315 | );
316 | const brokersWithReplicas: number[] = [];
317 | metric.forEach((result) => brokersWithReplicas.push(result.brokerId));
318 |
319 | return brokersWithReplicas;
320 | },
321 |
322 | logSize: async ({ name }, args, { dataSources }): Promise => {
323 | const metric = await dataSources.prometheusAPI.queryData(LOG_SIZE, name);
324 | const logSizeGB = Number((metric[0].metric / 1000000000).toFixed(2));
325 |
326 | return logSizeGB;
327 | },
328 | },
329 |
330 | Partition: {
331 | leader: (parent) => {
332 | parent.leader = { brokerId: parent.leader };
333 | return parent.leader;
334 | },
335 |
336 | replicas: (parent) => {
337 | return parent.replicas.map(
338 | (replica) => (replica = { brokerId: replica })
339 | );
340 | },
341 |
342 | isr: (parent) => {
343 | if (parent.isr.length === 0) return null;
344 | return parent.isr.map((replica) => (replica = { brokerId: replica }));
345 | },
346 | },
347 |
348 | Query: {
349 | brokers: async (
350 | parent,
351 | { start, end, step, brokerIds }
352 | ): Promise => {
353 | const clusterInfo = await brokerData.getClusterInfo();
354 | if (start) {
355 | clusterInfo.brokers.forEach((broker) => {
356 | broker.start = start;
357 | broker.end = end;
358 | broker.step = step;
359 | });
360 | }
361 | if (brokerIds) {
362 | clusterInfo.brokers = clusterInfo.brokers.filter((broker) =>
363 | brokerIds.includes(broker.brokerId)
364 | );
365 | }
366 |
367 | return clusterInfo.brokers.sort((a, b) => a.brokerId - b.brokerId);
368 | },
369 |
370 | broker: async (
371 | parent: Broker,
372 | { brokerId, start, end, step }
373 | ): Promise => {
374 | try {
375 | const cluster = await brokerData.getClusterInfo();
376 | const broker = cluster.brokers.filter(
377 | (elem) => elem.brokerId === brokerId
378 | )[0];
379 |
380 | broker.start = start;
381 | broker.end = end;
382 | broker.step = step;
383 |
384 | return broker;
385 | } catch (error) {
386 | console.log(`An error occured with Query Broker: ${error}`);
387 | }
388 | },
389 |
390 | cluster: async (): Promise => {
391 | const clusterInfo = await brokerData.getClusterInfo();
392 | return clusterInfo;
393 | },
394 |
395 | topic: async (parent, { name }): Promise => {
396 | const topic = await brokerData.getSingleTopic(name);
397 |
398 | return topic;
399 | },
400 |
401 | topics: async (): Promise => {
402 | const topics = await brokerData.getAllTopics();
403 |
404 | return topics;
405 | },
406 |
407 | totalTimeMs: async (
408 | parent,
409 | { request, brokerIds },
410 | { dataSources }
411 | ): Promise => {
412 | try {
413 | const totalTimeMs = await dataSources.prometheusAPI.getAvgTotalTimeMs(
414 | request,
415 | brokerIds
416 | );
417 |
418 | return totalTimeMs[0];
419 | } catch (error) {
420 | console.log(`An error has occured with Query Total Time MS: ${error}`);
421 | }
422 | },
423 |
424 | bytesInPerSecondOverTime: async (
425 | parent,
426 | { brokerIds, topics, start, step, end },
427 | { dataSources }
428 | ): Promise => {
429 | try {
430 | let allBytesInPerSecond =
431 | await dataSources.prometheusAPI.queryDataRange(
432 | BYTES_IN_PER_SEC,
433 | start,
434 | end,
435 | step,
436 | brokerIds
437 | );
438 |
439 | if (topics) {
440 | allBytesInPerSecond = allBytesInPerSecond.filter((el) =>
441 | topics.includes(el.topic)
442 | );
443 | }
444 |
445 | return allBytesInPerSecond;
446 | } catch (error) {
447 | console.log(`An error has occured with Query Total Time MS: ${error}`);
448 | }
449 | },
450 |
451 | bytesOutPerSecondOverTime: async (
452 | parent,
453 | { brokerIds, topics, start, step, end },
454 | { dataSources }
455 | ): Promise => {
456 | try {
457 | let allBytesOutPerSecond =
458 | await dataSources.prometheusAPI.queryDataRange(
459 | BYTES_OUT_PER_SEC,
460 | start,
461 | end,
462 | step,
463 | brokerIds
464 | );
465 |
466 | if (topics) {
467 | allBytesOutPerSecond = allBytesOutPerSecond.filter((el) =>
468 | topics.includes(el.topic)
469 | );
470 | }
471 |
472 | return allBytesOutPerSecond;
473 | } catch (error) {
474 | console.log(`An error has occured with Query Total Time MS: ${error}`);
475 | }
476 | },
477 |
478 | messagesInPerSec: async (
479 | parent,
480 | { brokerIds, topics, start, step, end },
481 | { dataSources }
482 | ): Promise => {
483 | try {
484 | let allMessagesInPerSec =
485 | await dataSources.prometheusAPI.queryDataRange(
486 | MESSAGES_IN_PER_SEC,
487 | start,
488 | end,
489 | step,
490 | brokerIds
491 | );
492 |
493 | if (topics) {
494 | allMessagesInPerSec = allMessagesInPerSec.filter((el) =>
495 | topics.includes(el.topic)
496 | );
497 | }
498 |
499 | return allMessagesInPerSec;
500 | } catch (error) {
501 | console.log(
502 | `An error has occured with Query messagesInPerSec: ${error}`
503 | );
504 | }
505 | },
506 | },
507 |
508 | Mutation: {
509 | addTopic: async (
510 | parent,
511 | { name, replicationFactor = -1, numPartitions = -1, configEntries }
512 | ) => {
513 | try {
514 | const topic = await brokerData.createTopic(
515 | name,
516 | replicationFactor,
517 | numPartitions,
518 | configEntries
519 | );
520 | return topic;
521 | } catch (error) {
522 | console.warn(
523 | `Mutation addTopic failed for topic: ${name}. Error: ${error}`
524 | );
525 | }
526 | },
527 |
528 | deleteTopic: async (parent, { name }) => {
529 | try {
530 | const topic = await brokerData.deleteTopic(name);
531 | return topic;
532 | } catch (error) {
533 | console.warn(
534 | `Mutation deleteTopic failed for topic: ${name}. Error: ${error}`
535 | );
536 | return error;
537 | }
538 | },
539 |
540 | reassignPartitions: async (
541 | parent,
542 | { topics }
543 | ): Promise => {
544 | try {
545 | return await brokerData.reassignPartitions(topics);
546 | } catch (error) {
547 | console.warn(
548 | `Mutation reassignPartitions failed for topics: ${topics}. Error: ${error}`
549 | );
550 | return error;
551 | }
552 | },
553 | },
554 | };
555 |
556 | export default resolvers;
557 |
--------------------------------------------------------------------------------
/src/server/graphql/typeDefs.ts:
--------------------------------------------------------------------------------
1 | import { gql } from "apollo-server-express";
2 |
3 | export const typeDefs = gql`
4 | type Cluster {
5 | activeControllerCount: Metric
6 | activeController: Broker
7 | brokers: [Broker]!
8 | offlinePartitionCount: Metric
9 | numberUnderReplicatedPartitions: Metric
10 | deleteTopic: Boolean
11 | underMinIsr: Metric
12 | }
13 |
14 | type Broker {
15 | brokerId: Int!
16 | brokerPort: Int
17 | brokerHost: String
18 | numberUnderReplicatedPartitions: Metric
19 | cpuUsage: Metric
20 | JVMMemoryUsage: Metric
21 | cpuUsageOverTime: [Metric]
22 | JVMMemoryUsageOverTime: [Metric]
23 | produceTotalTimeMs: Metric
24 | consumerTotalTimeMs: Metric
25 | followerTotalTimeMs: Metric
26 | bytesInPerSecondOverTime: [TimeSeriesMetric]
27 | bytesOutPerSecondOverTime: [TimeSeriesMetric]
28 | messagesInPerSec: [TimeSeriesMetric]
29 | }
30 |
31 | type Topic {
32 | name: String!
33 | numPartitions: Int
34 | totalReplicas: Int
35 | totalIsrs: Int
36 | brokersWithReplicas: [Int]
37 | logSize: Float
38 | partitions: [Partition]
39 | }
40 |
41 | type Partition {
42 | partitionId: Int!
43 | leader: Broker
44 | replicas: [Broker]
45 | isr: [Broker]
46 | }
47 |
48 | type TimeSeriesMetric {
49 | topic: String
50 | values: [Metric]
51 | }
52 |
53 | type Metric {
54 | time: String
55 | metric: Float
56 | }
57 |
58 | type OngoingTopicReassignment {
59 | name: String
60 | partitions: [OngoingPartitionReassignment]
61 | }
62 |
63 | type OngoingPartitionReassignment {
64 | partition: Int
65 | replicas: [Int]
66 | addingReplicas: [Int]
67 | removingReplicas: [Int]
68 | }
69 |
70 | type Query {
71 | brokers(
72 | start: String
73 | end: String
74 | step: String
75 | brokerIds: [Int]
76 | ): [Broker]!
77 | broker(brokerId: Int!, start: String, end: String, step: String): Broker
78 | cluster: Cluster
79 | topic(name: String!): Topic
80 | topics(name: [String]): [Topic]
81 | totalTimeMs(request: String!, brokerIds: [Int]): Metric
82 | bytesInPerSecondOverTime(
83 | brokerIds: [Int]
84 | topics: [String]
85 | start: String!
86 | end: String!
87 | step: String!
88 | ): [TimeSeriesMetric]
89 | bytesOutPerSecondOverTime(
90 | brokerIds: [Int]
91 | topics: [String]
92 | start: String!
93 | end: String!
94 | step: String!
95 | ): [TimeSeriesMetric]
96 | messagesInPerSec(
97 | brokerIds: [Int]
98 | topics: [String]
99 | start: String!
100 | end: String!
101 | step: String!
102 | ): [TimeSeriesMetric]
103 | }
104 |
105 | input ConfigEntry {
106 | name: String!
107 | value: String!
108 | }
109 |
110 | input ReplicaAssignment {
111 | partition: Int!
112 | replicas: [Int]
113 | }
114 |
115 | input PartitionReassignment {
116 | topic: String!
117 | partitionAssignment: [ReplicaAssignment]!
118 | }
119 |
120 | type Mutation {
121 | addTopic(
122 | name: String!
123 | replicationFactor: Int
124 | numPartitions: Int
125 | configEntries: [ConfigEntry]
126 | ): Topic!
127 | deleteTopic(name: String!): Topic
128 | reassignPartitions(
129 | topics: [PartitionReassignment]
130 | ): [OngoingTopicReassignment]
131 | }
132 | `;
133 |
--------------------------------------------------------------------------------
/src/server/kafka/kafka.ts:
--------------------------------------------------------------------------------
1 | import "dotenv/config";
2 | import { Kafka } from "kafkajs";
3 |
4 | const brokers = process.env.KAKFA_BROKER.split(",");
5 | const kafka = new Kafka({
6 | clientId: "franzView-client",
7 | brokers,
8 | });
9 |
10 | const admin = kafka.admin();
11 | const { CONNECT } = admin.events;
12 |
13 | admin.on(CONNECT, () => console.log("Kafka Admin Connected!"));
14 |
15 | async function run() {
16 | return await admin.connect();
17 | }
18 |
19 | run();
20 |
21 | export { admin };
22 |
--------------------------------------------------------------------------------
/src/server/server.ts:
--------------------------------------------------------------------------------
1 | import express from "express";
2 | import path from "path";
3 | import { DefaultErr } from "../../types/types";
4 | import { typeDefs } from "./graphql/typeDefs";
5 | import resolvers from "./graphql/resolvers";
6 | import { ApolloServer } from "apollo-server-express";
7 | import { ApolloServerPluginDrainHttpServer } from "apollo-server-core";
8 | import http from "http";
9 | import PrometheusAPI from "./graphql/datasources/prometheusAPI";
10 |
11 | // Default port as 3000 or PORT defined by a user in an env file.
12 | const PORT: number | string = process.env.PORT || 3000;
13 |
14 | // Initialize express server
15 | const app: express.Express = express();
16 |
17 | // Parse JSON and form data and pass it to req.body
18 | app.use(express.json());
19 | app.use(express.urlencoded({ extended: true }));
20 |
21 | // Serve all compiled files when using production build
22 | app.use("/", express.static(path.resolve(__dirname, "../../public")));
23 |
24 | async function startApolloServer(typeDefs, resolvers) {
25 | const httpServer = http.createServer(app);
26 | const server = new ApolloServer({
27 | typeDefs,
28 | resolvers,
29 | csrfPrevention: true,
30 | plugins: [ApolloServerPluginDrainHttpServer({ httpServer })],
31 | dataSources: () => {
32 | return {
33 | prometheusAPI: new PrometheusAPI(),
34 | };
35 | },
36 | });
37 |
38 | await server.start();
39 | server.applyMiddleware({ app });
40 |
41 | app.get("/*", (req: express.Request, res: express.Response) => {
42 | res.status(200).sendFile(path.resolve(__dirname, "../client/index.html"));
43 | });
44 |
45 | // Set up 404s for invalid requests
46 | app.use("*", (req: express.Request, res: express.Response) => {
47 | res.status(404).send("There was nothing found at this route.");
48 | });
49 |
50 | // Global error handler
51 | app.use(
52 | (
53 | err: express.Errback,
54 | req: express.Request,
55 | res: express.Response,
56 | // eslint-disable-next-line @typescript-eslint/no-unused-vars,
57 | next: express.NextFunction
58 | // eslint-disable-next-line @typescript-eslint/no-explicit-any
59 | ): express.Response> => {
60 | const defaultErr: DefaultErr = {
61 | log: "Express error handler caught unknown middleware error",
62 | status: 500,
63 | message: { err: "An error occurred" },
64 | };
65 | const errorObj: DefaultErr = Object.assign({}, defaultErr, err);
66 | console.log(errorObj.log);
67 | return res.status(errorObj.status).json(errorObj.message);
68 | }
69 | );
70 |
71 | await new Promise((resolve) =>
72 | httpServer.listen({ port: PORT }, resolve)
73 | );
74 | console.log(
75 | `🚀 Server ready at PORT: ${PORT}. Graphql path at ${server.graphqlPath} 🎉!`
76 | );
77 |
78 | return server;
79 | }
80 |
81 | export default startApolloServer(typeDefs, resolvers);
82 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es6",
4 | "lib": ["dom", "dom.iterable", "esnext"],
5 | "allowJs": true,
6 | "skipLibCheck": true,
7 | "esModuleInterop": true,
8 | "allowSyntheticDefaultImports": true,
9 | "strict": false,
10 | "forceConsistentCasingInFileNames": true,
11 | "noFallthroughCasesInSwitch": true,
12 | "module": "CommonJS",
13 | "moduleResolution": "node",
14 | "resolveJsonModule": true,
15 | "isolatedModules": false,
16 | "jsx": "react",
17 | "outDir": "public",
18 | },
19 | "files": ["./types/types.d.ts"],
20 | "include": [ "src", "src/server", "src/client", "__test__", "types"],
21 | "exclude":["node_modules", "public/**/*"]
22 | }
23 |
--------------------------------------------------------------------------------
/types/types.d.ts:
--------------------------------------------------------------------------------
1 | // import { DocumentNode } from "graphql";
2 | import { OverridableStringUnion } from "@mui/types";
3 |
4 | interface PromQuery {
5 | name: string;
6 | query: string;
7 | type: string;
8 | }
9 | export interface DefaultErr {
10 | log: string;
11 | status: number;
12 | message: Messsage;
13 | }
14 |
15 | export interface Messsage {
16 | err: string;
17 | }
18 |
19 | export interface Broker {
20 | brokerId: number;
21 | brokerPort: number;
22 | brokerHost: string;
23 | brokerCpuUsage?: Count;
24 | start?: string;
25 | end?: string;
26 | step?: string;
27 | }
28 |
29 | export interface BrokerCpuUsage {
30 | cpuUsage: number;
31 | time: string;
32 | }
33 |
34 | export interface JVMMemoryUsage extends Metric {
35 | JVMMemoryUsage: number;
36 | }
37 |
38 | export interface Topic {
39 | name: string;
40 | numPartitions: number;
41 | totalReplicas: number;
42 | totalIsrs: number;
43 | brokersWithReplicas: [number];
44 | logSize: number;
45 | }
46 |
47 | export interface Metric {
48 | time: string;
49 | }
50 |
51 | export interface Count extends Metric {
52 | // purposefully using a quick fix. to get code pushed up before correcting things
53 | metric: number;
54 | }
55 |
56 | export interface TimeSeriesCount {
57 | topic?: string;
58 | values: Count[];
59 | }
60 |
61 | export interface Cluster {
62 | activeController: Broker;
63 | brokers: Broker[];
64 | activeControllerCount?: Count;
65 | offlinePartitionCount?: Count;
66 | underMinIsr?: Count;
67 | logSize?: Count;
68 | }
69 |
70 | export interface UnderReplicatedPartitions {
71 | underReplicatedPartitions: number;
72 | time: string;
73 | }
74 |
75 | export interface GqlChartProps {
76 | query: DocumentNode;
77 | metric: string;
78 | duration: number;
79 | step: string;
80 | pollInterval?: number;
81 | title?: string;
82 | xAxisLabel?: string;
83 | yAxisLabel?: string;
84 | resource?: string;
85 | label?: string;
86 | args?: any;
87 | }
88 |
89 | export interface MetricsCardProps {
90 | value?: string | number;
91 | title: string;
92 | description: string;
93 | icon?: React.ReactNode;
94 | query?: DocumentNode;
95 | variables?: any;
96 | searchingFor?: string;
97 | }
98 |
99 | export interface ConfigEntries {
100 | name: string;
101 | value: string;
102 | }
103 |
104 | export interface DialogProps {
105 | title: string;
106 | content: string;
107 | label: string; //id and label will be same, but label may contain spaces?
108 | actions: DocumentNode;
109 | control: string;
110 | args: any;
111 | variant: OverridableStringUnion<"text" | "outlined" | "contained">;
112 | cta: string;
113 | color: OverridableStringUnion<
114 | | "inherit"
115 | | "primary"
116 | | "secondary"
117 | | "success"
118 | | "error"
119 | | "info"
120 | | "warning"
121 | >;
122 | disabled: boolean;
123 | update: () => Promise;
124 | }
125 | export interface MoreInfoProps {
126 | icon: React.ReactNode;
127 | content: string;
128 | }
129 |
--------------------------------------------------------------------------------
/webpack.config.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable @typescript-eslint/no-var-requires */
2 | // eslint-disable-next-line @typescript-eslint/no-unused-vars
3 | const webpack = require("webpack");
4 | const path = require("path");
5 | const HtmlWebpackPlugin = require("html-webpack-plugin");
6 | const BundleAnalyzerPlugin =
7 | require("webpack-bundle-analyzer").BundleAnalyzerPlugin;
8 |
9 | const config = {
10 | mode: process.env.NODE_ENV,
11 | entry: "./src/client/index.tsx",
12 | output: {
13 | path: path.resolve(__dirname, "public"),
14 | filename: "bundle.js",
15 | clean: true,
16 | publicPath: "/",
17 | },
18 | optimization: {
19 | usedExports: true,
20 | },
21 | module: {
22 | rules: [
23 | {
24 | test: /\.(js|jsx)$/,
25 | use: "babel-loader",
26 | exclude: /node_modules/,
27 | },
28 | {
29 | test: /\.css$/,
30 | use: ["style-loader", "css-loader"],
31 | },
32 | {
33 | test: /\.ts(x)?$/,
34 | loader: "ts-loader",
35 | exclude: /node_modules/,
36 | },
37 | {
38 | test: /\.(png|svg|jpg|gif|jpe?g)$/,
39 | type: "asset/resource",
40 | exclude: /node_modules/,
41 | },
42 | ],
43 | },
44 | devtool:
45 | process.env.NODE_ENV === "development" ? "inline-source-map" : "source-map",
46 | devServer: {
47 | proxy: {
48 | "*": "http://localhost:3000",
49 | },
50 | historyApiFallback: true,
51 | hot: true,
52 | },
53 | plugins: [
54 | new HtmlWebpackPlugin({
55 | template: path.join(__dirname, "public/index.html"),
56 | }),
57 | new BundleAnalyzerPlugin({
58 | analyzerMode: "static",
59 | openAnalyzer: false,
60 | }),
61 | ],
62 | resolve: {
63 | extensions: [".tsx", ".ts", ".js", ".jsx"],
64 | },
65 | };
66 |
67 | module.exports = config;
68 |
--------------------------------------------------------------------------------