├── .gitignore ├── Dockerfile ├── License.md ├── README.md ├── client └── src │ ├── App.tsx │ ├── containers │ ├── Authentication │ │ └── Login.tsx │ ├── FailureReports │ │ ├── ErrorTable.tsx │ │ ├── FailureReportScreen.tsx │ │ └── TableFilter.tsx │ ├── HomeScreen.tsx │ ├── HomeScreen │ │ ├── Sidepanel │ │ │ ├── Connect.tsx │ │ │ ├── Groups.tsx │ │ │ ├── NavBar.tsx │ │ │ ├── NotifItems.tsx │ │ │ ├── Oauth.tsx │ │ │ └── User.tsx │ │ ├── TopicsDisplay.tsx │ │ └── TopicsDisplay │ │ │ └── TopicRow.tsx │ ├── Metrics │ │ ├── BarChart.tsx │ │ ├── MetricsScreen.tsx │ │ ├── PieChart.tsx │ │ └── requestParameters.tsx │ └── PartitionScreen │ │ ├── MTPaginationOptions.tsx │ │ ├── MessageTable.tsx │ │ └── PartitionScreen.tsx │ ├── helperFunctions │ ├── populateChart.ts │ └── populateData.ts │ ├── index.html │ ├── index.tsx │ ├── state │ ├── actions │ │ ├── actions.ts │ │ ├── oauthActions.ts │ │ └── userActions.ts │ ├── constants │ │ ├── constants.ts │ │ ├── oauthConstants.ts │ │ └── userConstants.ts │ ├── reducers │ │ ├── index.ts │ │ ├── kafkaDataReducer.ts │ │ ├── metricsReducer.ts │ │ ├── oauthReducer.ts │ │ └── userReducer.ts │ └── store.ts │ └── styles.css ├── dist ├── bundle.js ├── bundle.js.LICENSE.txt └── index.html ├── docker-compose.yml ├── documents ├── KafkaFix (png).png └── KafkaFixIcon.icns ├── license.txt ├── main.js ├── package.json ├── prom-jmx-agent-config.yml ├── prometheus.yml ├── server ├── auth │ ├── auth.controller.ts │ └── auth.routes.ts ├── common │ ├── handleAsync.ts │ ├── index.ts │ ├── logCreator.ts │ ├── mockData.ts │ └── route.config.ts ├── index.ts ├── jmx │ ├── MBeans.ts │ ├── host.metrics.controller.ts │ ├── jmx.routes.ts │ ├── jvm.metrics.controller.ts │ ├── kafka.metrics.controller.ts │ └── zookeeper.metrics.controller.ts ├── kafka │ ├── group │ │ ├── group.controller.ts │ │ └── group.routes.ts │ ├── kafka │ │ ├── consumer.controller.ts │ │ ├── kafka.controller.ts │ │ ├── kafka.routes.ts │ │ └── producer.controller.ts │ └── topic │ │ ├── topic.controller.ts │ │ └── topic.routes.ts ├── log │ ├── log.controller.ts │ └── log.routes.ts ├── oauth │ └── oauth.routes.ts └── users.json ├── tsconfig.json └── webpack.config.js /.gitignore: -------------------------------------------------------------------------------- 1 | **/node_modules 2 | /full-stack 3 | /.env 4 | /zk-kafka 5 | /tsc 6 | error.log 7 | firebase.ts 8 | package-lock.json 9 | /dist -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # FROM wurstmeister/kafka 2 | FROM confluentinc/cp-kafka:5.2.1 3 | # FROM confluentinc/cp-kafka 4 | 5 | ADD prom-jmx-agent-config.yml /usr/app/prom-jmx-agent-config.yml 6 | ADD https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.16.1/jmx_prometheus_javaagent-0.16.1.jar /usr/app/jmx_prometheus_javaagent.jar -------------------------------------------------------------------------------- /License.md: -------------------------------------------------------------------------------- 1 | # CC0 1.0 Universal 2 | 3 | ## Statement of Purpose 4 | 5 | The laws of most jurisdictions throughout the world automatically confer exclusive Copyright and Related Rights (defined below) upon the creator and subsequent owner(s) (each and all, an "owner") of an original work of authorship and/or a database (each, a "Work"). 6 | 7 | Certain owners wish to permanently relinquish those rights to a Work for the purpose of contributing to a commons of creative, cultural and scientific works ("Commons") that the public can reliably and without fear of later claims of infringement build upon, modify, incorporate in other works, reuse and redistribute as freely as possible in any form whatsoever and for any purposes, including without limitation commercial purposes. These owners may contribute to the Commons to promote the ideal of a free culture and the further production of creative, cultural and scientific works, or to gain reputation or greater distribution for their Work in part through the use and efforts of others. 8 | 9 | For these and/or other purposes and motivations, and without any expectation of additional consideration or compensation, the person associating CC0 with a Work (the "Affirmer"), to the extent that he or she is an owner of Copyright and Related Rights in the Work, voluntarily elects to apply CC0 to the Work and publicly distribute the Work under its terms, with knowledge of his or her Copyright and Related Rights in the Work and the meaning and intended legal effect of CC0 on those rights. 10 | 11 | 1. Copyright and Related Rights. 12 | 13 | --- 14 | 15 | A Work made available under CC0 may be protected by copyright and related or neighboring rights ("Copyright and Related Rights"). Copyright and Related Rights include, but are not limited to, the following: 16 | 17 | i. the right to reproduce, adapt, distribute, perform, display, communicate, and translate a Work; 18 | ii. moral rights retained by the original author(s) and/or performer(s); 19 | iii. publicity and privacy rights pertaining to a person's image or likeness depicted in a Work; 20 | iv. rights protecting against unfair competition in regards to a Work, subject to the limitations in paragraph 4(a), below; 21 | v. rights protecting the extraction, dissemination, use and reuse of data in a Work; 22 | vi. database rights (such as those arising under Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, and under any national implementation thereof, including any amended or successor version of such directive); and 23 | vii. other similar, equivalent or corresponding rights throughout the world based on applicable law or treaty, and any national implementations thereof. 24 | 25 | 2. Waiver. 26 | 27 | --- 28 | 29 | To the greatest extent permitted by, but not in contravention of, applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and unconditionally waives, abandons, and surrenders all of Affirmer's Copyright and Related Rights and associated claims and causes of action, whether now known or unknown (including existing as well as future claims and causes of action), in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each member of the public at large and to the detriment of Affirmer's heirs and successors, fully intending that such Waiver shall not be subject to revocation, rescission, cancellation, termination, or any other legal or equitable action to disrupt the quiet enjoyment of the Work by the public as contemplated by Affirmer's express Statement of Purpose. 30 | 31 | 3. Public License Fallback. 32 | 33 | --- 34 | 35 | Should any part of the Waiver for any reason be judged legally invalid or ineffective under applicable law, then the Waiver shall be preserved to the maximum extent permitted taking into account Affirmer's express Statement of Purpose. In addition, to the extent the Waiver is so judged Affirmer hereby grants to each affected person a royalty-free, non transferable, non sublicensable, non exclusive, irrevocable and unconditional license to exercise Affirmer's Copyright and Related Rights in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "License"). The License shall be deemed effective as of the date CC0 was applied by Affirmer to the Work. Should any part of the License for any reason be judged legally invalid or ineffective under applicable law, such partial invalidity or ineffectiveness shall not invalidate the remainder of the License, and in such case Affirmer hereby affirms that he or she will not (i) exercise any of his or her remaining Copyright and Related Rights in the Work or (ii) assert any associated claims and causes of action with respect to the Work, in either case contrary to Affirmer's express Statement of Purpose. 36 | 37 | 4. Limitations and Disclaimers. 38 | 39 | --- 40 | 41 | a. No trademark or patent rights held by Affirmer are waived, abandoned, surrendered, licensed or otherwise affected by this document. 42 | b. Affirmer offers the Work as-is and makes no representations or warranties of any kind concerning the Work, express, implied, statutory or otherwise, including without limitation warranties of title, merchantability, fitness for a particular purpose, non infringement, or the absence of latent or other defects, accuracy, or the present or absence of errors, whether or not discoverable, all to the greatest extent permissible under applicable law. 43 | c. Affirmer disclaims responsibility for clearing rights of other persons that may apply to the Work or any use thereof, including without limitation any person's Copyright and Related Rights in the Work. Further, Affirmer disclaims responsibility for obtaining any necessary consents, permissions or other rights required for any use of the Work. 44 | d. Affirmer understands and acknowledges that Creative Commons is not a party to this document and has no duty or obligation with respect to this CC0 or use of the Work. 45 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![logo]() 2 | 3 | ### [Homepage](https://www.kafkafix.com) 4 | 5 | ### [Repository](https://github.com/oslabs-beta/kafkafix) 6 | 7 | ## Table of Content 8 | 9 | - [Motivation](#-motivation) 10 | - [Features](#-features) 11 | - [Install](#-install) 12 | - [How to Use](#-how-to-use) 13 | - [Contribute](#-contribute) 14 | - [Contributors](#-contributors) 15 | 16 | ## Motivation 17 | 18 | KafkaFix is designed to simple and lightweight local Kafka development tool with support for graphical UI for monitoring JMX attributes with Prometheus. 19 | 20 | Kafkafix aims to get rid of all configurations on the user's end to simplify the process and offer better user experience. Users simply have to fire up the application with their docker compose file. Then, everything is good to go. 21 | 22 | ## Features 23 | 24 | Users are able to fully manage their local Kafka instance and perform any operations, such as, create topics, start consumers and producers, visualize streams of data, manage errors. 25 | 26 | ## Install 27 | 28 | Just visit https://www.kafkafix.com and download and install. 29 | 30 | ## How to Use 31 | 32 | ### Starting Containers and Kafka 33 | 34 | > 1. Start all containers by selecting your docker-compose.yml file. 35 | > 2. Provide a local port number and click Connect 36 | 37 | ### Stopping Containers 38 | 39 | > 1. Click Disconnect button and all containers will be stopped. 40 | 41 | ### Metrics 42 | 43 | > 1. To visualize metrics, click the side panel and then click Metrics 44 | > 2. Here you will have an options to see all possible JMX metrics 45 | > 3. Click on any of the metrics and you will be able to see visual representation of data for that metric. 46 | 47 | ### Failure Reports 48 | 49 | > 1. All failures incurred by Kafka will be saved locally. 50 | > 2. Click on the side panel for full history or error logs or click on top right notification panel for most recent error logs. 51 | 52 | ### Visualize Streams of Data 53 | 54 | > 1. To visualize all streams of data consumed by the conusmer go to right panel and click on the Visualize Streams button. 55 | > 2. All data will be update in real time. 56 | 57 | ### Creating a Topic 58 | 59 | > 1. To create a topic, click on the Create Topic button 60 | > 2. Pass in the topic name and the Number of Partitions you like for the topic to have. 61 | 62 | ### Starting a Producer 63 | 64 | > 1. To start a producer, click on the Start Producer button 65 | > 2. Provide name of the topic you would like producer to 66 | > 3. Kafkafix will start producing to that topic 67 | 68 | ### Starting a Consumer 69 | 70 | > 1. To start a consumer, click on the Start Consumer button 71 | > 2. Provide a topic name and a group id. 72 | > 3. Once it's created, consumer will start consuming messages for the specified topic. 73 | 74 | ### Deleting a Topic 75 | 76 | > 1. Simply click on the Delete button next to the topic name. 77 | 78 | ### Creating a Partition 79 | 80 | > 1. Click on Create a Partition button. 81 | > 2. Then pass in a number of partitions to be added for the topic. 82 | 83 | ## Contribute 84 | 85 | ### New contritbutions to the library are welcome, but we ask that you please follow these guidelines. 86 | 87 | > 1. Before opening a PR for major additions or changes, please test on your local development. This way, it will save overall time spent and allow for faster implementation by maintainers. 88 | > 2. Conside whether your changes are useful for all users. 89 | > 3. Avoid breaking changes unless there is an upcoming major release, which are infrequent. We encourage people to care a lot about backwards compatibility. 90 | 91 | ## Contributors 92 | 93 | ### Andy Wang [github](https://github.com/andywang0121) [linkedIn](https://www.linkedin.com/in/andywang0121/) 94 | 95 | ### Kyu Sung Park [github](https://github.com/qkrrbtjd90) [linkedIn](https://www.linkedin.com/in/kyusungpark/) 96 | 97 | ### Ranisha Rafeeque [github](https://github.com/ranisharafeeque) [linkedIn](https://www.linkedin.com/in/ranisha-rafeeque-s-k/) 98 | 99 | ### Yom Woldemichael [github](https://github.com/yomwold) [linkedIn](https://www.linkedin.com/in/yomfwoldemichael/) 100 | -------------------------------------------------------------------------------- /client/src/App.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC } from 'react'; 2 | import { 3 | HashRouter as Router, 4 | Switch, 5 | Route, 6 | Redirect, 7 | } from 'react-router-dom'; 8 | 9 | import HomeScreen from './containers/HomeScreen'; 10 | import { FailureReportScreen } from './containers/FailureReports/FailureReportScreen'; 11 | import { MetricsScreen } from './containers/Metrics/MetricsScreen'; 12 | import { useSelector } from 'react-redux'; 13 | import { overallState } from './state/reducers/index'; 14 | import { PartitionScreen } from './containers/PartitionScreen/PartitionScreen'; 15 | import { Groups } from './containers/HomeScreen/Sidepanel/Groups'; 16 | import { Login } from './containers/Authentication/Login'; 17 | import { UserState } from './state/reducers/userReducer'; 18 | 19 | const wss = new WebSocket('ws://localhost:3000'); 20 | wss.onopen = () => console.log('connected to websocket'); 21 | 22 | const App: FC = () => { 23 | const email = useSelector( 24 | state => state.user.email 25 | ); 26 | 27 | return ( 28 | <> 29 | 30 | 31 | 32 | 33 | (email.length ? : )} 37 | /> 38 | 39 | 40 | 41 | 42 | ( 45 | 50 | )} 51 | /> 52 | 53 | 54 | 55 | ); 56 | }; 57 | 58 | export default App; 59 | -------------------------------------------------------------------------------- /client/src/containers/Authentication/Login.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { useState, FC } from "react"; 3 | import Button from "@material-ui/core/Button"; 4 | import TextField from "@material-ui/core/TextField"; 5 | import Box from "@material-ui/core/Box"; 6 | import { useHistory } from "react-router-dom"; 7 | import { makeStyles } from "@material-ui/core"; 8 | import { useDispatch, useSelector } from "react-redux"; 9 | import { 10 | setUserActionCreator, 11 | setErrorActionCreator, 12 | loginRequestActionCreator, 13 | loginSuccessActionCreator, 14 | loginFailActionCreator, 15 | signUpRequestActionCreator, 16 | signUpSuccessActionCreator, 17 | signUpFailActionCreator, 18 | } from "../../state/actions/userActions"; 19 | // import { 20 | // OauthLoginRequestActionCreator, 21 | // OauthLoginSuccessActionCreator, 22 | // OauthLoginFailActionCreator, 23 | // OauthSetErrorActionCreator 24 | // } from "../../state/actions/oauthActions"; 25 | import { overallState } from "../../state/reducers/index"; 26 | import { UserState } from "../../state/reducers/userReducer"; 27 | 28 | 29 | 30 | 31 | const useStyles = makeStyles({ 32 | btn: { 33 | fontSize: 20, 34 | color: "white", 35 | backgroundColor: "black", 36 | marginLeft: 25, 37 | }, 38 | loginPage: { 39 | textAlign: 'center', 40 | color: "black", 41 | } 42 | }); 43 | 44 | export const Login: FC = () => { 45 | const classes = useStyles(); 46 | const [isLogin, setIsLogin] = useState(true); 47 | const [email, setEmail] = useState(''); 48 | const [password, setPassword] = useState(''); 49 | const [redirect, setRedirect] = useState(null); 50 | const dispatch = useDispatch(); 51 | const errorMessage = useSelector( 52 | (state) => state.user.error 53 | ); 54 | 55 | function validateForm() { 56 | return email.length > 0 && password.length > 0; 57 | } 58 | 59 | function handleSubmit(event: { preventDefault: () => void }) { 60 | event.preventDefault(); 61 | } 62 | 63 | const signUp = () => { 64 | dispatch(signUpRequestActionCreator()); 65 | fetch('/api/signup', { 66 | method: 'POST', 67 | headers: { 68 | 'Content-Type': 'application/json', 69 | }, 70 | body: JSON.stringify({ email, password }), 71 | credentials: 'include', 72 | }) 73 | .then((response) => response.json()) 74 | .then((data) => { 75 | dispatch(setUserActionCreator(email)); 76 | dispatch(signUpSuccessActionCreator()); 77 | console.log("new user signed up: ", data); 78 | 79 | }) 80 | .catch((error) => { 81 | dispatch(signUpFailActionCreator(error)); 82 | }); 83 | }; 84 | 85 | const login = () => { 86 | dispatch(loginRequestActionCreator()); 87 | fetch('/api/login', { 88 | method: 'POST', 89 | headers: { 90 | 'Content-Type': 'application/json', 91 | }, 92 | body: JSON.stringify({ email, password }), 93 | credentials: 'include', 94 | }) 95 | .then((response) => { 96 | if (response.status != 200) { 97 | throw Error(); 98 | } 99 | return response.json(); 100 | }) 101 | .then((data) => { 102 | dispatch(setUserActionCreator(email)); 103 | dispatch(loginSuccessActionCreator()); 104 | }) 105 | .catch((error) => { 106 | dispatch(loginFailActionCreator(error)); 107 | console.error('Error:', error); 108 | }); 109 | }; 110 | // const oauthLogin = () => { 111 | // dispatch(OauthLoginRequestActionCreator()); 112 | // fetch('/oauth-callback', { 113 | // method: "POST", 114 | // headers: { 115 | // "Content-Type": "application/json", 116 | // }, 117 | // body: JSON.stringify({ email, password }), 118 | // credentials: "include", 119 | // }) 120 | // .then((response) => { 121 | // if (response.status != 200) { 122 | // throw Error(); 123 | // } 124 | // return response.json(); 125 | // }) 126 | // .then((data) => { 127 | // console.log("email is ", email); 128 | // dispatch(OauthLoginSuccessActionCreator()); 129 | // console.log("login", data); 130 | // }) 131 | // .catch((error) => { 132 | // dispatch(OauthLoginFailActionCreator(error)); 133 | // console.error("Error:", error); 134 | // }); 135 | // }; 136 | 137 | const handleSubmitButton = () => { 138 | if (validateForm()) { 139 | if (isLogin) login(); 140 | else signUp(); 141 | } else { 142 | dispatch( 143 | setErrorActionCreator('Cannot leave email/password fields empty') 144 | ); 145 | } 146 | }; 147 | 148 | const handleChangePage = () => { 149 | const emailField: HTMLInputElement | null = 150 | document.querySelector('#emailField'); 151 | const passwordField: HTMLInputElement | null = 152 | document.querySelector('#passwordField'); 153 | if (emailField) { 154 | console.log(emailField); 155 | console.log(emailField.value); 156 | emailField.value = ''; 157 | } 158 | if (passwordField) { 159 | passwordField.value = ''; 160 | } 161 | dispatch(setErrorActionCreator('')); 162 | setIsLogin(!isLogin); 163 | }; 164 | 165 | const handleGithubLogin = () => {}; 166 | 167 | return ( 168 |
169 | 170 |
171 |

{isLogin ? "Log in" : "Sign up"}

172 | 173 |
174 | setEmail(e.target.value)} 180 | label='email' 181 | variant='outlined' 182 | id='emailField' 183 | /> 184 |
185 |
186 | 187 |
188 | setPassword(e.target.value)} 192 | label='password' 193 | variant='outlined' 194 | id='passwordField' 195 | /> 196 |
197 |
198 |
199 |

200 | 203 | {/* */} 206 | 207 |

208 | {isLogin && } 209 |
210 |

211 | {/* {" "} */} 212 | {errorMessage} 213 |

214 | setIsLogin(!isLogin)} 217 | > 218 | {isLogin ? 'Create an account' : 'Already have an account?'} 219 | 220 |
221 |
222 |
223 | 224 |
225 | ); 226 | }; 227 | -------------------------------------------------------------------------------- /client/src/containers/FailureReports/ErrorTable.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC, useState } from 'react'; 2 | import { useSelector } from 'react-redux'; 3 | import { KafkaState } from '../../state/reducers/kafkaDataReducer'; 4 | import { overallState } from '../../state/reducers/index'; 5 | 6 | // importing components 7 | import { 8 | Button, 9 | Table, 10 | TableBody, 11 | TableCell, 12 | TableContainer, 13 | TableHead, 14 | TableRow, 15 | TableFooter, 16 | TablePagination, 17 | Paper, 18 | Typography, 19 | Input, 20 | makeStyles, 21 | Modal, 22 | } from '@material-ui/core'; 23 | 24 | import { MTPaginationOptions } from '../PartitionScreen/MTPaginationOptions'; 25 | 26 | // importing styles 27 | const useRowStyles = makeStyles({ 28 | bigwrapper: { 29 | display: 'flex', 30 | justifyContent: 'center', 31 | alignItems: 'center', 32 | marginTop: 20, 33 | }, 34 | root: { 35 | '& > *': { 36 | borderBottom: 'unset', 37 | }, 38 | }, 39 | tableWrapper: { 40 | marginTop: 30, 41 | }, 42 | tableHeaderRow: { 43 | backgroundColor: 'black', 44 | }, 45 | tableHeaderText: { 46 | color: 'white', 47 | fontWeight: 'bold', 48 | }, 49 | }); 50 | 51 | export const ErrorTable: FC = () => { 52 | const classes = useRowStyles(); 53 | 54 | const errors = useSelector( 55 | (state) => state.kafka.notif 56 | ); 57 | 58 | const [pageSize, setPageSize] = useState(10); 59 | const [pageIndex, setPageIndex] = useState( 60 | Math.floor(errors.length / pageSize) 61 | ); 62 | const [togglePause, setTogglePause] = useState(false); 63 | const start = pageIndex * pageSize; 64 | const end = Math.min(start + pageSize, errors.length); 65 | const showErrors = errors.slice(start, end); 66 | 67 | const numEmptyRows = pageSize - (end - start); 68 | 69 | const emptyRows = []; 70 | for (let i = 0; i < numEmptyRows; i++) { 71 | emptyRows.push( 72 | 73 | 74 | 75 | ); 76 | } 77 | 78 | const handleChangePage = ( 79 | e: React.MouseEvent | null, 80 | pageIndex: number 81 | ) => { 82 | setPageIndex(pageIndex); 83 | }; 84 | 85 | const handleChangePageSize = ( 86 | e: React.ChangeEvent 87 | ) => { 88 | const newPageIndex = Math.floor(start / parseInt(e.target.value)); 89 | if (newPageIndex === Math.floor(errors.length / pageSize)) 90 | setTogglePause(false); 91 | else setTogglePause(true); 92 | setPageSize(parseInt(e.target.value)); 93 | setPageIndex(newPageIndex); 94 | }; 95 | 96 | return ( 97 |
98 | 99 | 100 | 101 | 102 | {errors[0] && 103 | Object.keys(errors[0]).map((key) => ( 104 | {key} 105 | ))} 106 | 107 | 108 | 109 | 110 | {showErrors.map((el, index) => ( 111 | 112 | {Object.values(el).map((value: any) => ( 113 | {value} 114 | ))} 115 | 116 | ))} 117 | {emptyRows} 118 | 119 | 120 | 121 | 122 | ( 134 | 142 | )} 143 | /> 144 | 145 | 146 |
147 |
148 |
149 | ); 150 | }; 151 | -------------------------------------------------------------------------------- /client/src/containers/FailureReports/FailureReportScreen.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC } from 'react'; 2 | import { ErrorTable } from './ErrorTable'; 3 | import NavBar from '../HomeScreen/Sidepanel/NavBar'; 4 | 5 | export const FailureReportScreen: FC = () => { 6 | return ( 7 | <> 8 | 9 | 10 | 11 | ); 12 | }; 13 | -------------------------------------------------------------------------------- /client/src/containers/FailureReports/TableFilter.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC, useState } from 'react'; 2 | 3 | // setting - onlclick - shows up 4 | // filters errors table 5 | import { 6 | Button, 7 | Paper, 8 | Typography, 9 | Input, 10 | makeStyles, 11 | Modal, 12 | } from '@material-ui/core'; 13 | import { Interface } from 'readline'; 14 | // maybe for metrics/ graphics for failure report 15 | // timestamp ( within a range) >=, <=, a < timestamp, b 16 | 17 | // Interface 18 | // importing styles 19 | const colorState: any = { 20 | buttonSelected: 'blue', 21 | buttonNotSelected: 'white', 22 | }; 23 | 24 | const useModalStyles = makeStyles(colorState); 25 | 26 | interface TableFilterProps { 27 | errorMessage: {}[]; 28 | } 29 | 30 | export const TableFilter: FC = ({ errorMessage }) => { 31 | const classes = useModalStyles(); 32 | // clientId = topic1; string 33 | // broker = something; string 34 | // selecting which columns appear in the table 35 | // include = {} 36 | // {namespace, brokerid, clientid, ... } 37 | let defaultObj: any = {}; 38 | if (errorMessage[0]) { 39 | Object.keys(errorMessage[0]).forEach((key) => (defaultObj[key] = false)); 40 | } 41 | const [buttonState, setButtonState] = useState(defaultObj); 42 | 43 | const handleClickButton = (e: any) => { 44 | console.log(e.target); 45 | console.log(e.target.id); 46 | console.log(e.target.classList); // [] 47 | const key = e.target.id; 48 | setButtonState({ ...buttonState, [key]: !buttonState[key] }); 49 | }; 50 | return ( 51 | <> 52 | {/*
{buttonState}
*/} 53 | {/* to show selected filters */} 54 | 55 | {errorMessage[0] && 56 | Object.keys(errorMessage[0]).map((key) => ( 57 | 65 | ))} 66 | 67 | 68 | ); 69 | }; 70 | -------------------------------------------------------------------------------- /client/src/containers/HomeScreen.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | /* ----------------- Update ----------------- 4 | I removed the side panel component and moved Navbar and connect to be part of Homescreen. 5 | 6 | I removed the partiton component from homescreen. Clicking on topic dropsdown to show more info about topic(including partitions) -- clicking on a partition opens a new window (which shows the live data stream) 7 | ---------------------------------------------*/ 8 | 9 | // importing TopicsDisplay 10 | import TopicsDisplay from "./HomeScreen/TopicsDisplay"; 11 | 12 | // importing Navbar 13 | import NavBar from "./HomeScreen/Sidepanel/NavBar"; 14 | 15 | // importing connect 16 | import Connect from "./HomeScreen/Sidepanel/Connect"; 17 | 18 | import { useDispatch, useSelector } from "react-redux"; 19 | import { UserState } from "../state/reducers/userReducer"; 20 | import { overallState } from "../state/reducers/index"; 21 | import { 22 | Redirect 23 | } from "react-router-dom"; 24 | // import {logoutActionCreator} from '../state/actions/userActions'; 25 | 26 | // importing components from Material UI 27 | import { 28 | Button, 29 | Card, 30 | Divider, 31 | Typography, 32 | makeStyles, 33 | } from "@material-ui/core"; 34 | 35 | // styles for homescreen component 36 | const useStyles = makeStyles({ 37 | homeWrapper: { 38 | display: "flex", 39 | height: "100vh", 40 | alignItems: "start", 41 | }, 42 | rightSideWrapper: { 43 | height: "100%", 44 | }, 45 | }); 46 | 47 | const HomeScreen = () => { 48 | // creating a classes variable to customize styles 49 | const classes = useStyles(); 50 | const dispatch = useDispatch(); 51 | const email = useSelector( 52 | (state) => state.user.email 53 | ); 54 | 55 | return ( 56 | 57 | 58 |
59 | 60 | 61 |
62 | {/* */} 63 | {!email && } 64 |
65 | ); 66 | }; 67 | 68 | export default HomeScreen; 69 | -------------------------------------------------------------------------------- /client/src/containers/HomeScreen/Sidepanel/Connect.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC } from 'react'; 2 | import { useSelector, useDispatch } from 'react-redux'; 3 | import { KafkaState } from '../../../state/reducers/kafkaDataReducer'; 4 | import { overallState } from '../../../state/reducers'; 5 | import { 6 | connectedActionCreator, 7 | populateNotifActionCreator, 8 | } from '../../../state/actions/actions'; 9 | import { populateData } from '../../../helperFunctions/populateData'; 10 | 11 | // importing IPC renderer form Electron 12 | const { ipcRenderer } = window.require('electron'); 13 | 14 | // importing componenets from Material UI 15 | import { 16 | Button, 17 | Card, 18 | List, 19 | Divider, 20 | Typography, 21 | Input, 22 | makeStyles, 23 | } from '@material-ui/core'; 24 | 25 | interface Options { 26 | headers: {}; 27 | body: string; 28 | method: string; 29 | } 30 | 31 | // styles for connect Component 32 | const useStyles = makeStyles({ 33 | form: { 34 | alignSelf: 'start', 35 | margin: 30, 36 | }, 37 | card: { 38 | display: 'flex', 39 | flexDirection: 'column', 40 | height: 'auto', 41 | width: 'auto', 42 | 43 | alignItems: 'center', 44 | padding: 10, 45 | }, 46 | title: { 47 | fontWeight: 'bold', 48 | textAlign: 'center', 49 | }, 50 | overline: { 51 | fontWeight: 'lighter', 52 | textAlign: 'center', 53 | }, 54 | button: { 55 | marginTop: 10, 56 | backgroundColor: 'red', 57 | }, 58 | }); 59 | 60 | const Connect: FC = props => { 61 | // display form function -> onSubmit -> send fetch request to backend with Broker URI 62 | const isConnected = useSelector( 63 | state => state.kafka.isConnected 64 | ); 65 | 66 | const dispatch = useDispatch(); 67 | 68 | // creating a classes variable to customize styles 69 | const classes = useStyles(); 70 | 71 | const handleSubmit = (e: any) => { 72 | e.preventDefault(); 73 | let method; 74 | let inputField: HTMLInputElement | null = 75 | document.querySelector('#brokerID'); 76 | let body; 77 | if (inputField) { 78 | body = JSON.stringify({ PORT: inputField.value }); 79 | } else { 80 | alert('Cannot connect because Broker ID field is empty'); 81 | return; 82 | } 83 | 84 | if (!isConnected) { 85 | method = 'POST'; 86 | console.log('106 =>', inputField); 87 | } else { 88 | method = 'PUT'; 89 | } 90 | 91 | const options: Options = { 92 | method, 93 | headers: { 'content-type': 'application/json' }, 94 | body, 95 | }; 96 | 97 | fetch('/api/connect', options) 98 | .then(data => data.json()) 99 | .then(data => { 100 | fetch('/api/notification', { 101 | method: 'GET', 102 | headers: { 'content-type': 'application/json' }, 103 | }) 104 | .then((data: any) => data.json()) 105 | .then((data: Error[]) => { 106 | console.log(data); 107 | dispatch(populateNotifActionCreator(data)); 108 | 109 | // open a websocket connection 110 | // const ws = new WebSocket('ws://localhost:3000/errors'); 111 | // ws.onopen = () => { 112 | // console.log('connected to websocket for error'); 113 | // ws.send('Errors'); 114 | // }; 115 | // ws.onmessage = (data) => { 116 | // console.log('error message received:', data); 117 | // // dispatch(appendNotifActionCreator(data)); 118 | // }; 119 | }) 120 | .catch((e: any) => 121 | console.log('error in fetching data from notifs', e) 122 | ); 123 | dispatch(connectedActionCreator()); 124 | populateData(data, dispatch); 125 | }) 126 | .catch(e => { 127 | console.log(e); 128 | }); 129 | }; 130 | 131 | const handleUpload = (e: any) => { 132 | ipcRenderer.send('upload-file'); 133 | }; 134 | 135 | return ( 136 |
137 | 138 | 139 | Enter Your Broker Port Number 140 | 141 | 142 | 150 | 158 |
159 | 160 | Upload Your Docker-compose File 161 | 162 | 171 |
172 |
173 | ); 174 | }; 175 | 176 | export default Connect; 177 | -------------------------------------------------------------------------------- /client/src/containers/HomeScreen/Sidepanel/Groups.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC, useState } from 'react'; 2 | import NavBar from './NavBar'; 3 | 4 | export const Groups = () => { 5 | return ( 6 | <> 7 | 8 |

This is the groups page

9 | 10 | ); 11 | }; 12 | -------------------------------------------------------------------------------- /client/src/containers/HomeScreen/Sidepanel/NavBar.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC, useState } from 'react'; 2 | import { useDispatch } from 'react-redux'; 3 | 4 | // importing items for menu from Material-UI 5 | import { 6 | AppBar, 7 | Toolbar, 8 | Drawer, 9 | Button, 10 | List, 11 | Divider, 12 | ListItem, 13 | ListItemText, 14 | Typography, 15 | ButtonGroup, 16 | Paper, 17 | } from '@material-ui/core'; 18 | 19 | // importing icons 20 | import { 21 | Notifications, 22 | AccountCircle, 23 | Menu, 24 | Assessment, 25 | BugReport, 26 | Brightness4, 27 | Description, 28 | } from '@material-ui/icons'; 29 | 30 | import GroupIcon from '@material-ui/icons/Group'; 31 | import TableChartIcon from '@material-ui/icons/TableChart'; 32 | // importing Link from react router dom 33 | import { Link } from 'react-router-dom'; 34 | 35 | // imports for customizing styles 36 | import { makeStyles } from '@material-ui/core/styles'; 37 | 38 | import { NotifItems } from './NotifItems'; 39 | import {logoutActionCreator} from '../../../state/actions/userActions'; 40 | 41 | // styles for Navbar component - using makeStyles hook - invoked within function 42 | const useStyles = makeStyles({ 43 | navbar: { 44 | marginBottom: 70, 45 | }, 46 | appbar: { 47 | display: 'flex', 48 | flexDirection: 'row', 49 | justifyContent: 'space-between', 50 | backgroundColor: 'black', 51 | }, 52 | list: { 53 | display: 'flex', 54 | flexDirection: 'column', 55 | width: 200, 56 | height: '100%', 57 | justifyContent: 'space-between', 58 | alignItems: 'center', 59 | }, 60 | topHalfList: { 61 | marginTop: 25, 62 | }, 63 | bottomHalfList: { 64 | justifySelf: 'end', 65 | }, 66 | listItem: { 67 | display: 'flex', 68 | flexDirection: 'row', 69 | justifyContent: 'space-around', 70 | }, 71 | listItemText: { 72 | marginLeft: 10, 73 | }, 74 | logo: { 75 | color: 'white', 76 | textDecoration: 'none', 77 | }, 78 | logoOnMenu: { 79 | color: 'black', 80 | textDecoration: 'none', 81 | }, 82 | buttonGroup: { 83 | color: 'black', 84 | }, 85 | imgIcon: { 86 | height: '100%', 87 | width: '100%', 88 | }, 89 | }); 90 | 91 | interface Error { 92 | level: string; 93 | namespace: string; 94 | message: string; 95 | error: string; 96 | clientId: string; 97 | broker: string; 98 | timestamp: string; 99 | } 100 | 101 | const NavBar: FC = () => { 102 | // state to determine if menu is open or close 103 | const [state, setState] = useState({ open: false }); 104 | 105 | // state to determine if notifications is open or closed 106 | const [notifState, setNotif] = useState({ open: false }); 107 | 108 | // creating a classes variable to customize styles 109 | const classes = useStyles(); 110 | 111 | const dispatch = useDispatch(); 112 | 113 | // function that returns menu items 114 | const menuItems = () => { 115 | return ( 116 |
setState({ open: false })} 119 | className={classes.list} 120 | > 121 | 122 | {/* KafkaFix Logo */} 123 | 124 | 133 | 134 | {/* list item 1 */} 135 | 136 | 137 | 138 | 139 | 143 | 144 | 145 | 146 | {/* list item 2 */} 147 | 151 | 152 | 153 | 157 | 158 | 159 | 160 | 164 | 169 | 170 | 174 | Message Streams 175 | 176 | 177 | 178 | 179 | {/* New Item - Groups */} 180 | {/* 181 | 182 | 183 | 187 | 188 | 189 | 190 | */} 191 | 192 | 193 | 194 | {/* list item 3 */} 195 | 196 | 197 | 198 | 202 | 203 | 204 | {/* list item 4 -- need to add a link to our documentation */} 205 | 206 | 207 | 208 | 212 | 213 | 214 | {/* List item 5 -- need to add a link to our privacy policies and need to add a link to our Terms and conditions */} 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 |
224 | ); 225 | }; 226 | 227 | return ( 228 |
229 | 230 | 231 | {/* menu button on nav bar */} 232 | 233 | 236 | 237 | 238 | {/* Kafka fix logo on Nav bar - takes you back home */} 239 | 240 | 241 | 250 | 251 | 252 | 253 | {/* Notifications and login on Nav bar - open drawers on click*/} 254 | 255 | 258 | 263 | 264 | 265 | {/* Drawer for menu click */} 266 | setState({ open: false })} 270 | > 271 | {state.open ? menuItems() : null} 272 | 273 | 274 | {/* Drawer for notifications */} 275 | setNotif({ open: false })} 279 | > 280 | {notifState.open ? : null} 281 | 282 | 283 | 284 |
285 | ); 286 | }; 287 | 288 | export default NavBar; 289 | -------------------------------------------------------------------------------- /client/src/containers/HomeScreen/Sidepanel/NotifItems.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC } from 'react'; 2 | import { useSelector } from 'react-redux'; 3 | import { populateNotifActionCreator } from '../../../state/actions/actions'; 4 | import { KafkaState } from '../../../state/reducers/kafkaDataReducer'; 5 | import { overallState } from '../../../state/reducers'; 6 | 7 | // importing component 8 | import { Typography, Card, makeStyles } from '@material-ui/core'; 9 | 10 | // fucntion that returns the object to be saved in state 11 | interface Error { 12 | level: string; 13 | namespace: string; 14 | message: string; 15 | error: string; 16 | clientId: string; 17 | broker: string; 18 | timestamp: string; 19 | } 20 | // fucntion that calls the action creator on the return value of 21 | // const populateNotif = (data: any, dispatch: any) => { 22 | // dispatch(dispatch(populateNotifActionCreator(data))); 23 | // }; 24 | 25 | interface notifItemsProps { 26 | setNotif: any; 27 | } 28 | 29 | const useStyles = makeStyles({ 30 | list: { 31 | display: 'flex', 32 | flexDirection: 'column', 33 | width: 420, 34 | margin: 20, 35 | // marginBottom: 100, 36 | justifyContent: 'center', 37 | }, 38 | divForNotifs: { 39 | display: 'flex', 40 | flexDirection: 'column', 41 | // justifyContent: 'center', 42 | alignItems: 'center', 43 | width: 400, 44 | marginBottom: 20, 45 | padding: 10, 46 | backgroundColor: 'white', 47 | }, 48 | span: { 49 | display: 'flex', 50 | justifyContent: 'space-between', 51 | }, 52 | errorMessage: { 53 | fontWeight: 'bold', 54 | textAlign: 'center', 55 | margin: 10, 56 | }, 57 | }); 58 | 59 | export const NotifItems: FC = ({ setNotif }) => { 60 | const classes = useStyles(); 61 | 62 | // fetch request for new notifs 63 | 64 | // fetch('/api/notification') 65 | // .then((data: any) => data.json()) 66 | // .then((data: Error[]) => { 67 | // populateNotif(data, dispatch); 68 | // }) 69 | // .catch((e: any) => console.log('error in fetching data from notifs', e)); 70 | 71 | // init a websocket connection 72 | // can edit the slices to depend on a state/allow user to config 73 | const notifs = useSelector((state) => 74 | state.kafka.notif.slice(-10) 75 | ); 76 | 77 | return ( 78 | 79 |
setNotif({ open: false })} 83 | > 84 | {notifs.map((el) => ( 85 | 86 |
87 | {el.namespace} 88 | {el.broker} 89 |
90 | 91 | 92 | {el.error} 93 | 94 | 95 |
96 | {el.clientID} 97 | {el.message} 98 |
99 |
100 | ))} 101 |
102 |
103 | ); 104 | }; 105 | -------------------------------------------------------------------------------- /client/src/containers/HomeScreen/Sidepanel/Oauth.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Link } from 'react-router-dom'; 3 | 4 | export const Oauth = () => { 5 | return ( 6 |
7 | 8 | 11 | 12 |
13 | ) 14 | }; -------------------------------------------------------------------------------- /client/src/containers/HomeScreen/Sidepanel/User.tsx: -------------------------------------------------------------------------------- 1 | // import { Provider } from 'react-redux'; 2 | 3 | import React from 'react'; 4 | import { Link } from 'react-router-dom'; 5 | 6 | export const User = () => { 7 | return ( 8 |
9 | 10 | 13 | 14 | 15 | 18 | 19 | 20 | 23 | 24 |
25 | ) 26 | }; -------------------------------------------------------------------------------- /client/src/containers/HomeScreen/TopicsDisplay.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC, useState } from 'react'; 2 | import { useSelector, useDispatch } from 'react-redux'; 3 | import { overallState } from '../../state/reducers'; 4 | import { KafkaState } from '../../state/reducers/kafkaDataReducer'; 5 | import { TopicRow } from './TopicsDisplay/TopicRow'; 6 | import { populateData } from '../../helperFunctions/populateData'; 7 | import { 8 | Button, 9 | Table, 10 | TableBody, 11 | TableCell, 12 | TableContainer, 13 | TableHead, 14 | TableRow, 15 | Paper, 16 | Typography, 17 | Input, 18 | makeStyles, 19 | Modal, 20 | } from '@material-ui/core'; 21 | import { ErrorRounded } from '@material-ui/icons'; 22 | 23 | import { KeyboardArrowDown, KeyboardArrowUp } from '@material-ui/icons'; 24 | import { Link } from 'react-router-dom'; 25 | 26 | const useRowStyles = makeStyles({ 27 | root: { 28 | '& > *': { 29 | borderBottom: 'unset', 30 | }, 31 | }, 32 | tableWrapper: { 33 | margin: 30, 34 | }, 35 | buttonsWrapper: { 36 | display: 'flex', 37 | justifyContent: 'space-around', 38 | }, 39 | tableHeaderRow: { 40 | backgroundColor: 'black', 41 | }, 42 | tableHeaderText: { 43 | color: 'white', 44 | fontWeight: 'bold', 45 | }, 46 | buttonNotSelected: { 47 | backgroundColor: 'white', 48 | }, 49 | buttonSelected: { 50 | backgroundColor: 'blue', 51 | }, 52 | partitionButtons: { 53 | backgroundColor: 'white', 54 | }, 55 | primaryButtons: { 56 | backgroundColor: 'white', 57 | justifySelf: 'center', 58 | color: 'black', 59 | }, 60 | modal: { 61 | display: 'flex', 62 | alignItems: 'center', 63 | justifyContent: 'center', 64 | }, 65 | insideModalDiv: { 66 | display: 'flex', 67 | width: 300, 68 | height: 300, 69 | flexDirection: 'column', 70 | justifyContent: 'center', 71 | alignItems: 'center', 72 | backgroundColor: 'white', 73 | borderRadius: '5%', 74 | }, 75 | button: { 76 | marginTop: 10, 77 | backgroundColor: 'red', 78 | }, 79 | }); 80 | 81 | interface Options { 82 | method: string; 83 | body: string; 84 | headers: any; 85 | } 86 | 87 | const TopicsDisplay: FC = () => { 88 | const classes = useRowStyles(); 89 | const isConnected = useSelector( 90 | (state) => state.kafka.isConnected 91 | ); 92 | 93 | const rows = useSelector( 94 | (state) => state.kafka.data 95 | ); 96 | 97 | const dispatch = useDispatch(); 98 | 99 | // local state to create a topic 100 | const [modalForCreateTopic, setModalForCreateTopic] = useState(false); 101 | 102 | const toggleCreateTopicModal = () => { 103 | setModalForCreateTopic(!modalForCreateTopic); 104 | }; 105 | 106 | // state for modal 107 | const [modalForConsumer, setModalForConsumer] = useState(false); 108 | 109 | // state for button 110 | const [isConsumerStarted, setIsConsumerStarted] = useState(false); 111 | 112 | const toggleConsumerModal = () => { 113 | setModalForConsumer(!modalForConsumer); 114 | }; 115 | 116 | // state for modal 117 | const [modalForProducer, setModalForProducer] = useState(false); 118 | 119 | // state for button 120 | const [isProducerStarted, setIsProducerStarted] = useState(false); 121 | 122 | const toggleProducerModal = () => { 123 | setModalForProducer(!modalForProducer); 124 | }; 125 | 126 | const handleCreateTopic = () => { 127 | const topicName: HTMLInputElement | null = 128 | document.querySelector('#inputTopic'); 129 | 130 | const numberOfPartitions: HTMLInputElement | null = document.querySelector( 131 | '#inputNumberOfPartitions' 132 | ); 133 | 134 | // sending topic name and number of partitions when creating partitions 135 | if ( 136 | topicName && 137 | topicName.value && 138 | numberOfPartitions && 139 | numberOfPartitions.value 140 | ) { 141 | const options: Options = { 142 | method: 'POST', 143 | body: JSON.stringify({ 144 | topic: topicName.value, 145 | numPartitions: numberOfPartitions.value, 146 | }), 147 | headers: { 'Content-Type': 'application/json' }, 148 | }; 149 | 150 | fetch('/api/topic', options) 151 | .then((data) => data.json()) 152 | .then((data) => { 153 | populateData(data, dispatch); 154 | toggleCreateTopicModal(); 155 | alert('Created a new topic'); 156 | }) 157 | .catch((e) => console.log(e)); 158 | } 159 | }; 160 | 161 | // onclick handler for deleting a topic 162 | const deleteTopicHandler = (topicName: String) => { 163 | const options: Options = { 164 | method: 'DELETE', 165 | body: JSON.stringify({ topic: topicName }), 166 | headers: { 'Content-Type': 'application/json' }, 167 | }; 168 | 169 | fetch('/api/topic', options) 170 | .then((data) => data.json()) 171 | .then((data) => { 172 | populateData(data, dispatch); 173 | }) 174 | .catch((e) => console.log('error in deleting topic, ', e)); 175 | }; 176 | 177 | const handleToggleProducer = () => { 178 | // include inputted data from modal 179 | 180 | const topic = ( 181 | document.getElementById('selectProducer') as HTMLInputElement 182 | ).value; 183 | 184 | const options = { 185 | method: 'POST', 186 | headers: { 'Content-Type': 'application/json' }, 187 | body: JSON.stringify({ topic: topic }), 188 | }; 189 | 190 | if (isProducerStarted) { 191 | options.method = 'PUT'; 192 | } 193 | 194 | fetch('/api/producer', options) 195 | .then((data) => data.json()) 196 | .then((data) => { 197 | console.log(data); 198 | }) 199 | .catch((e) => console.log(e)); 200 | 201 | setIsProducerStarted(!isProducerStarted); 202 | toggleProducerModal(); 203 | }; 204 | 205 | const handleToggleConsumer = () => { 206 | const topic = (document.getElementById('selectTopic') as HTMLInputElement) 207 | .value; 208 | const groupId = ( 209 | document.getElementById('createGroupID') as HTMLInputElement 210 | ).value; 211 | 212 | console.log('topic: ', topic, 'groupId ', groupId); 213 | 214 | const option = { 215 | method: 'POST', 216 | body: JSON.stringify({ topic, groupId }), 217 | headers: { 'content-type': 'application/json' }, 218 | }; 219 | 220 | if (isConsumerStarted) { 221 | option.method = 'PUT'; 222 | } 223 | 224 | fetch('/api/consumer', option) 225 | .then((data) => data.json()) 226 | .then((data) => toggleConsumerModal()) 227 | .catch((e) => console.log(e.target)); 228 | 229 | setIsConsumerStarted(!isConsumerStarted); 230 | toggleConsumerModal(); 231 | }; 232 | 233 | return ( 234 | 235 | 236 | 237 | 245 | 253 | 261 | 262 | 263 | {/* Table Head */} 264 | 265 | 266 | 267 | 268 | Topic Name 269 | 270 | 271 | Partitions 272 | 273 | 274 | 275 | 276 | {/* Table Body*/} 277 | {isConnected && ( 278 | 279 | {rows.map((row) => ( 280 | 281 | 282 | {/* // delete a topic */} 283 | 290 | 291 | ))} 292 | 293 | // create a topic 294 | )} 295 |
296 | 297 | {/* Modal for Creating a new topic */} 298 | 305 |
306 | Enter Topic Name 307 | 308 | 309 | 316 |
317 |
318 | 319 | {/* Modal for Producer */} 320 | 327 |
328 | Producer to start 329 | 330 | 331 | 332 | 340 |
341 |
342 | 343 | {/* Modal for Consumer */} 344 | 351 |
352 | Select Topics To Read 353 | 354 | 355 | 356 | Create A Group ID 357 | 358 | 363 | 364 | 372 |
373 |
374 |
375 |
376 | ); 377 | }; 378 | 379 | export default TopicsDisplay; 380 | -------------------------------------------------------------------------------- /client/src/containers/HomeScreen/TopicsDisplay/TopicRow.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC, MouseEvent, useState } from 'react'; 2 | import { useSelector, useDispatch } from 'react-redux'; 3 | import { overallState } from '../../../state/reducers'; 4 | // // importing IPCReder 5 | const { ipcRenderer } = window.require('electron'); 6 | import { populateData } from '../../../helperFunctions/populateData'; 7 | 8 | import { KeyboardArrowDown, KeyboardArrowUp } from '@material-ui/icons'; 9 | 10 | import { Link } from 'react-router-dom'; 11 | 12 | import { 13 | Button, 14 | Box, 15 | Collapse, 16 | IconButton, 17 | Table, 18 | TableBody, 19 | TableCell, 20 | TableHead, 21 | TableRow, 22 | Typography, 23 | Input, 24 | makeStyles, 25 | Modal, 26 | } from '@material-ui/core'; 27 | 28 | import { ErrorRounded } from '@material-ui/icons'; 29 | 30 | const useRowStyles = makeStyles({ 31 | root: { 32 | '& > *': { 33 | borderBottom: 'unset', 34 | }, 35 | }, 36 | tableWrapper: { 37 | margin: 30, 38 | boxShadow: '10px 5px 5px lightgrey;', 39 | }, 40 | tableHeaderRow: { 41 | backgroundColor: 'black', 42 | }, 43 | tableHeaderText: { 44 | color: 'white', 45 | fontWeight: 'bold', 46 | }, 47 | partitionButtons: { 48 | backgroundColor: 'white', 49 | }, 50 | primaryButtons: { 51 | backgroundColor: 'white', 52 | justifySelf: 'center', 53 | color: 'black', 54 | fontWeight: 'bold', 55 | }, 56 | modal: { 57 | display: 'flex', 58 | alignItems: 'center', 59 | justifyContent: 'center', 60 | }, 61 | insideModalDiv: { 62 | display: 'flex', 63 | width: 300, 64 | height: 300, 65 | flexDirection: 'column', 66 | justifyContent: 'center', 67 | alignItems: 'center', 68 | backgroundColor: 'white', 69 | borderRadius: '5%', 70 | }, 71 | button: { 72 | marginTop: 10, 73 | backgroundColor: 'red', 74 | }, 75 | }); 76 | 77 | interface Options { 78 | // headers: {}; 79 | body: string; 80 | method: string; 81 | } 82 | 83 | export const TopicRow = (props: { row: any }) => { 84 | const { row } = props; 85 | const [open, setOpen] = React.useState(false); 86 | const classes = useRowStyles(); 87 | 88 | const [isOpenModal, setOpenModal] = useState(false); 89 | 90 | // function to handle partition click -- opens a new window -- we need to know which partiton to show live data for 91 | const handleClickPartition = () => { 92 | ipcRenderer.send('open-partition'); 93 | }; 94 | 95 | const dispatch = useDispatch(); 96 | const handleCreatePartition = () => { 97 | // grabbing inputs 98 | const input: HTMLInputElement | null = 99 | document.querySelector('#inputPartition'); 100 | 101 | const topic: HTMLInputElement | null = document.querySelector( 102 | '#inputTopicForPartition' 103 | ); 104 | 105 | // input validation 106 | if (input && input.value === '') { 107 | alert('cannot leave the name field empty for the partition'); 108 | return; 109 | } 110 | // fetch request 111 | const options: RequestInit | Options = { 112 | method: 'POST', 113 | headers: { 'Content-Type': 'application/json' }, 114 | body: JSON.stringify({ 115 | topic: topic?.value, 116 | numPartitions: Number(input?.value), 117 | }), 118 | }; 119 | 120 | //finish the then after getting reposne 121 | fetch('/api/partition', options) 122 | .then((data: any) => data.json()) 123 | .then((data) => { 124 | console.log('data from backend after sending to add partition ', data); 125 | // reset everything in redux 126 | populateData(data, dispatch); 127 | }) 128 | .catch((e) => console.log(e)); 129 | }; 130 | 131 | const openModal = () => { 132 | setOpenModal(true); 133 | }; 134 | 135 | const closeModal = () => { 136 | setOpenModal(false); 137 | }; 138 | 139 | return ( 140 | 141 | 142 | 143 | {/* onclick - arrow changes */} 144 | setOpen(!open)} 148 | > 149 | {open ? : } 150 | 151 | 152 | 153 | 154 | {row.topicName} 155 | 156 | {row.partitions} 157 | 158 | 159 | {/* Create another TableRow for the partitions*/} 160 | 161 | 162 | 163 | 164 | 171 | Partitions 172 | 173 | 174 | {/* Table headers for Partitions */} 175 | 176 | 177 | 178 | 179 | Id 180 | 181 | 182 | Leader 183 | 184 | 185 | Partition-errode 186 | 187 | 188 | ISR 189 | 190 | 191 | Replicas 192 | 193 | 194 | 195 | 196 | {/* Table Body */} 197 | {/* Mapping through array of partitions -- row needs to be state */} 198 | 199 | {row.partitionData.map((data: any) => ( 200 | <> 201 | handleClickPartition()} 205 | > 206 | 207 | {data.id} 208 | 209 | {data.leader} 210 | {data.partitionErrorCode} 211 | {data.isr} 212 | {data.replicas} 213 | 214 | {/*
*/} 215 | 216 | ))} 217 | 224 | 225 | 232 |
233 | Number of Partitions 234 | 239 | 244 | 252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 | ); 262 | }; 263 | -------------------------------------------------------------------------------- /client/src/containers/Metrics/BarChart.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useRef, useState, FC } from 'react'; 2 | import { Chart, registerables } from 'chart.js'; 3 | import { useSelector } from 'react-redux'; 4 | import { overallState } from '../../state/reducers/index'; 5 | import { MetricsState } from '../../state/reducers/metricsReducer'; 6 | 7 | import { 8 | Typography, 9 | makeStyles, 10 | Card, 11 | } from '@material-ui/core'; 12 | 13 | const useStyles = makeStyles(() => ({ 14 | emptyWrapper: { 15 | display: 'flex', 16 | flexDirection: 'column', 17 | }, 18 | })); 19 | 20 | // chart.js 3 is ESM tree shakeable and requires to register all components that you are going to use. Thus, you have to register the linear scale manually 21 | Chart.register(...registerables); 22 | 23 | export const BarChart: FC = () => { 24 | // state object to plot the chart on 25 | const chartContainer: any = useRef(null); 26 | const classes = useStyles(); 27 | // state of barchart 28 | const [chartInstance, setChartInstance] = useState(null); 29 | 30 | const chartData = useSelector( 31 | (state) => state.metrics.chartData 32 | ); 33 | console.log('chartData state', chartData); 34 | 35 | const chartConfig: any = { 36 | type: 'bar', 37 | data: chartData, 38 | options: { 39 | scales: { 40 | yAxes: [ 41 | { 42 | ticks: { 43 | beginAtZero: true, 44 | }, 45 | }, 46 | ], 47 | }, 48 | }, 49 | }; 50 | 51 | useEffect(() => { 52 | if (chartInstance) chartInstance.clear(); 53 | if (chartContainer && chartContainer.current) { 54 | const newChartInstance: any = new Chart( 55 | chartContainer.current, 56 | chartConfig 57 | ); 58 | setChartInstance(newChartInstance); 59 | } 60 | }, [chartContainer, chartData]); 61 | 62 | const handleEmptyReturnValue = (): any => { 63 | return ( 64 | 65 | 66 | The selected metric is currently unavailable 67 | 68 | 69 | Data is generated as you run your kafka cluster. Select another metric 70 | 71 | 72 | ); 73 | }; 74 | 75 | return ( 76 | <> 77 | {Object.values(chartData).length ? ( 78 | 79 | ) : ( 80 | handleEmptyReturnValue() 81 | )} 82 | 83 | ); 84 | }; -------------------------------------------------------------------------------- /client/src/containers/Metrics/MetricsScreen.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState, FC } from 'react'; 2 | import { requestParameters } from './requestParameters'; 3 | import { BarChart } from './BarChart'; 4 | import NavBar from '../HomeScreen/Sidepanel/NavBar'; 5 | import { PieChart } from './PieChart'; 6 | import { useDispatch, useSelector } from 'react-redux'; 7 | import { populateChart } from '../../helperFunctions/populateChart'; 8 | 9 | import { 10 | InputLabel, 11 | Button, 12 | Select, 13 | FormControl, 14 | MenuItem, 15 | Paper, 16 | Typography, 17 | Input, 18 | makeStyles, 19 | Card, 20 | } from '@material-ui/core'; 21 | 22 | // styles 23 | const useStyles = makeStyles(() => ({ 24 | metricsWrapper: { 25 | display: 'flex', 26 | flexDirection: 'column', 27 | }, 28 | barChart: { 29 | alignItems: 'center', 30 | }, 31 | pieChart: { 32 | width: 600, 33 | alignSelf: 'center', 34 | marginTop: 50, 35 | marginBottom: 40, 36 | }, 37 | button: { 38 | display: 'block', 39 | marginBottom: 20, 40 | }, 41 | formControl: { 42 | margin: 20, 43 | minWidth: 120, 44 | }, 45 | })); 46 | 47 | export const MetricsScreen: FC = () => { 48 | const classes = useStyles(); 49 | 50 | // local state for opening the selction for metrics 51 | const [isSelectOpen, setIsSelectOpen] = useState(false); 52 | 53 | // local state for saving selected value 54 | const [selectedMetric, setSelectedMetric] = useState(''); 55 | 56 | const dispatch = useDispatch(); 57 | 58 | const toggleSelect = () => { 59 | setIsSelectOpen(!isSelectOpen); 60 | }; 61 | 62 | const handleSelectedMetric = (e: any) => { 63 | setSelectedMetric(e.target.value); 64 | 65 | // we nead a helper fuction to create labels and values before storing in state 66 | 67 | // make fetch request and save data to redux - data to be used in reusable charts 68 | 69 | let url = 'http://localhost:9090/api/v1/query?query='; 70 | 71 | fetch((url += e.target.value)) 72 | .then((data) => data.json()) 73 | .then((data) => { 74 | const { 75 | data: { result }, 76 | } = data; 77 | populateChart(result, dispatch); 78 | }); 79 | }; 80 | 81 | return ( 82 | 83 | 84 | 85 | {/* Form to select metric you want to display */} 86 | 87 | Select a metric from the dropdown 88 | 105 | 106 | 107 | {/* Import bar Chart */} 108 | 109 | 110 | 111 | 112 | {/* Import pie Chart */} 113 |
114 | 115 |
116 |
117 |
118 | ); 119 | }; 120 | -------------------------------------------------------------------------------- /client/src/containers/Metrics/PieChart.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useRef, useState } from 'react'; 2 | import { Chart, registerables } from 'chart.js'; 3 | import { useSelector } from 'react-redux'; 4 | import { overallState } from '../../state/reducers/index'; 5 | import { MetricsState } from '../../state/reducers/metricsReducer'; 6 | 7 | // chart.js 3 is ESM tree shakeable and requires to register all components that you are going to use. Thus, you have to register the linear scale manually 8 | Chart.register(...registerables); 9 | 10 | export const PieChart = () => { 11 | // state object to plot the chart on 12 | const chartContainer: any = useRef(null); 13 | 14 | // state of barchart 15 | const [chartInstance, setChartInstance] = useState(null); 16 | 17 | const chartData = useSelector( 18 | (state) => state.metrics.chartData 19 | ); 20 | 21 | const config: any = { 22 | type: 'polarArea', 23 | data: {}, 24 | }; 25 | 26 | useEffect(() => { 27 | if (chartInstance) chartInstance.clear(); 28 | config.data = chartData; 29 | if (chartContainer && chartContainer.current) { 30 | const newChartInstance: any = new Chart(chartContainer.current, config); 31 | setChartInstance(newChartInstance); 32 | } 33 | }, [chartContainer, chartData]); 34 | 35 | return ( 36 | <> 37 | {Object.values(chartData).length ? ( 38 | 39 | ) : null} 40 | 41 | ); 42 | }; -------------------------------------------------------------------------------- /client/src/containers/Metrics/requestParameters.tsx: -------------------------------------------------------------------------------- 1 | export const requestParameters = (): string[] => { 2 | return [ 3 | 'jmx_scrape_duration_seconds', 4 | 5 | 'jmx_scrape_error', 6 | 'jvm_gc_collection_seconds_count', 7 | 8 | 'jvm_gc_collection_seconds_sum', 9 | 10 | 'jvm_memory_bytes_committed', 11 | 12 | 'jvm_memory_bytes_max', 13 | 14 | 'jvm_memory_bytes_used', 15 | 16 | 'jvm_memory_pool_bytes_committed', 17 | 18 | 'jvm_memory_pool_bytes_max', 19 | 20 | 'jvm_memory_pool_bytes_used', 21 | 22 | 'kafka_cluster_partition_atminisr', 23 | 24 | 'kafka_cluster_partition_insyncreplicascount', 25 | 26 | 'kafka_cluster_partition_laststableoffsetlag', 27 | 28 | 'kafka_cluster_partition_replicascount', 29 | 30 | 'kafka_cluster_partition_underminisr', 31 | 32 | 'kafka_cluster_partition_underreplicated', 33 | 34 | 'kafka_controller_controllerchannelmanager_queuesize', 35 | 36 | 'kafka_controller_controllerchannelmanager_requestrateandqueuetimems', 37 | 38 | 'kafka_controller_controllerchannelmanager_requestrateandqueuetimems_count', 39 | 40 | 'kafka_controller_controllerchannelmanager_totalqueuesize', 41 | 42 | 'kafka_controller_controllereventmanager_eventqueuesize', 43 | 'kafka_controller_controllereventmanager_eventqueuetimems', 44 | 45 | 'kafka_controller_controllereventmanager_eventqueuetimems_count', 46 | 47 | 'kafka_controller_controllerstats_autoleaderbalancerateandtimems', 48 | 49 | 'kafka_controller_controllerstats_autoleaderbalancerateandtimems_count', 50 | 51 | 'kafka_controller_controllerstats_controlledshutdownrateandtimems', 52 | 53 | 'kafka_controller_controllerstats_controlledshutdownrateandtimems_count', 54 | 55 | 'kafka_controller_controllerstats_controllerchangerateandtimems', 56 | 57 | 'kafka_controller_controllerstats_controllerchangerateandtimems_count', 58 | 59 | 'kafka_controller_controllerstats_controllershutdownrateandtimems', 60 | 61 | 'kafka_controller_controllerstats_controllershutdownrateandtimems_count', 62 | 63 | 'kafka_controller_controllerstats_isrchangerateandtimems', 64 | 65 | 'kafka_controller_controllerstats_isrchangerateandtimems_count', 66 | 67 | 'kafka_controller_controllerstats_leaderandisrresponsereceivedrateandtimems', 68 | 69 | 'kafka_controller_controllerstats_leaderandisrresponsereceivedrateandtimems_count', 70 | 71 | 'kafka_controller_controllerstats_leaderelectionrateandtimems', 72 | 73 | 'kafka_controller_controllerstats_leaderelectionrateandtimems_count', 74 | 75 | 'kafka_controller_controllerstats_listpartitionreassignmentrateandtimems', 76 | 77 | 'kafka_controller_controllerstats_listpartitionreassignmentrateandtimems_count', 78 | 79 | 'kafka_controller_controllerstats_logdirchangerateandtimems', 80 | 81 | 'kafka_controller_controllerstats_logdirchangerateandtimems_count', 82 | 83 | 'kafka_controller_controllerstats_manualleaderbalancerateandtimems', 84 | 85 | 'kafka_controller_controllerstats_manualleaderbalancerateandtimems_count', 86 | 87 | 'kafka_controller_controllerstats_partitionreassignmentrateandtimems', 88 | 89 | 'kafka_controller_controllerstats_partitionreassignmentrateandtimems_count', 90 | 91 | 'kafka_controller_controllerstats_topicchangerateandtimems', 92 | 93 | 'kafka_controller_controllerstats_topicchangerateandtimems_count', 94 | 95 | 'kafka_controller_controllerstats_topicdeletionrateandtimems', 96 | 97 | 'kafka_controller_controllerstats_topicdeletionrateandtimems_count', 98 | 99 | 'kafka_controller_controllerstats_topicuncleanleaderelectionenablerateandtimems', 100 | 101 | 'kafka_controller_controllerstats_topicuncleanleaderelectionenablerateandtimems_count', 102 | 103 | 'kafka_controller_controllerstats_uncleanleaderelectionenablerateandtimems', 104 | 105 | 'kafka_controller_controllerstats_uncleanleaderelectionenablerateandtimems_count', 106 | 107 | 'kafka_controller_controllerstats_uncleanleaderelections_total', 108 | 109 | 'kafka_controller_controllerstats_updatefeaturesrateandtimems', 110 | 111 | 'kafka_controller_controllerstats_updatefeaturesrateandtimems_count', 112 | 113 | 'kafka_controller_kafkacontroller_activecontrollercount', 114 | 115 | 'kafka_controller_kafkacontroller_controllerstate', 116 | 117 | 'kafka_controller_kafkacontroller_globalpartitioncount', 118 | 119 | 'kafka_controller_kafkacontroller_globaltopiccount', 120 | 121 | 'kafka_controller_kafkacontroller_offlinepartitionscount', 122 | 123 | 'kafka_controller_kafkacontroller_preferredreplicaimbalancecount', 124 | 125 | 'kafka_controller_kafkacontroller_replicasineligibletodeletecount', 126 | 127 | 'kafka_controller_kafkacontroller_replicastodeletecount', 128 | 129 | 'kafka_controller_kafkacontroller_topicsineligibletodeletecount', 130 | 131 | 'kafka_controller_kafkacontroller_topicstodeletecount', 132 | 133 | 'kafka_coordinator_group_groupmetadatamanager_numgroups', 134 | 135 | 'kafka_coordinator_group_groupmetadatamanager_numgroupscompletingrebalance', 136 | 137 | 'kafka_coordinator_group_groupmetadatamanager_numgroupsdead', 138 | 139 | 'kafka_coordinator_group_groupmetadatamanager_numgroupsempty', 140 | 141 | 'kafka_coordinator_group_groupmetadatamanager_numgroupspreparingrebalance', 142 | 143 | 'kafka_coordinator_group_groupmetadatamanager_numgroupsstable', 144 | 145 | 'kafka_coordinator_group_groupmetadatamanager_numoffsets', 146 | 147 | 'kafka_coordinator_transaction_transactionmarkerchannelmanager_logappendretryqueuesize', 148 | 149 | 'kafka_coordinator_transaction_transactionmarkerchannelmanager_unknowndestinationqueuesize', 150 | 151 | 'kafka_log_log_logendoffset', 152 | 153 | 'kafka_log_log_logstartoffset', 154 | 155 | 'kafka_log_log_numlogsegments', 156 | 157 | 'kafka_log_log_size', 158 | 159 | 'kafka_log_logcleaner_cleaner_recopy_percent', 160 | 161 | 'kafka_log_logcleaner_deadthreadcount', 162 | 163 | 'kafka_log_logcleaner_max_buffer_utilization_percent', 164 | 165 | 'kafka_log_logcleaner_max_clean_time_secs', 166 | 167 | 'kafka_log_logcleaner_max_compaction_delay_secs', 168 | 169 | 'kafka_log_logcleanermanager_max_dirty_percent', 170 | 171 | 'kafka_log_logcleanermanager_time_since_last_run_ms', 172 | 173 | 'kafka_log_logcleanermanager_uncleanable_bytes', 174 | 175 | 'kafka_log_logcleanermanager_uncleanable_partitions_count', 176 | 177 | 'kafka_log_logmanager_logdirectoryoffline', 178 | 179 | 'kafka_log_logmanager_offlinelogdirectorycount', 180 | 181 | 'kafka_network_acceptor_acceptorblockedpercent_count', 182 | 183 | 'kafka_network_processor_idlepercent', 184 | 185 | 'kafka_network_requestchannel_requestqueuesize', 186 | 187 | 'kafka_network_requestchannel_responsequeuesize', 188 | 189 | 'kafka_network_requestmetrics_errors_total', 190 | 191 | 'kafka_network_requestmetrics_localtimems', 192 | 193 | 'kafka_network_requestmetrics_localtimems_count', 194 | 195 | 'kafka_network_requestmetrics_messageconversionstimems', 196 | 197 | 'kafka_network_requestmetrics_messageconversionstimems_count', 198 | 199 | 'kafka_network_requestmetrics_remotetimems', 200 | 201 | 'kafka_network_requestmetrics_remotetimems_count', 202 | 203 | 'kafka_network_requestmetrics_requestbytes', 204 | 205 | 'kafka_network_requestmetrics_requestbytes_count', 206 | 207 | 'kafka_network_requestmetrics_requestqueuetimems', 208 | 209 | 'kafka_network_requestmetrics_requestqueuetimems_count', 210 | 211 | 'kafka_network_requestmetrics_requests_total', 212 | 213 | 'kafka_network_requestmetrics_responsequeuetimems', 214 | 215 | 'kafka_network_requestmetrics_responsequeuetimems_count', 216 | 217 | 'kafka_network_requestmetrics_responsesendtimems', 218 | 219 | 'kafka_network_requestmetrics_responsesendtimems_count', 220 | 221 | 'kafka_network_requestmetrics_temporarymemorybytes', 222 | 223 | 'kafka_network_requestmetrics_temporarymemorybytes_count', 224 | 225 | 'kafka_network_requestmetrics_throttletimems', 226 | 227 | 'kafka_network_requestmetrics_throttletimems_count', 228 | 'kafka_network_requestmetrics_totaltimems', 229 | 230 | 'kafka_network_requestmetrics_totaltimems_count', 231 | 232 | 'kafka_network_socketserver_controlplaneexpiredconnectionskilledcount', 233 | 234 | 'kafka_network_socketserver_controlplanenetworkprocessoravgidlepercent', 235 | 236 | 'kafka_network_socketserver_expiredconnectionskilledcount', 237 | 238 | 'kafka_network_socketserver_memorypoolavailable', 239 | 240 | 'kafka_network_socketserver_memorypoolused', 241 | 242 | 'kafka_network_socketserver_networkprocessoravgidlepercent', 243 | 244 | 'kafka_server_brokertopicmetrics_bytesin_total', 245 | 246 | 'kafka_server_brokertopicmetrics_bytesout_total', 247 | 248 | 'kafka_server_brokertopicmetrics_bytesrejected_total', 249 | 250 | 'kafka_server_brokertopicmetrics_failedfetchrequests_total', 251 | 252 | 'kafka_server_brokertopicmetrics_failedproducerequests_total', 253 | 254 | 'kafka_server_brokertopicmetrics_fetchmessageconversions_total', 255 | 256 | 'kafka_server_brokertopicmetrics_invalidmagicnumberrecords_total', 257 | 258 | 'kafka_server_brokertopicmetrics_invalidmessagecrcrecords_total', 259 | 260 | 'kafka_server_brokertopicmetrics_invalidoffsetorsequencerecords_total', 261 | 262 | 'kafka_server_brokertopicmetrics_messagesin_total', 263 | 264 | 'kafka_server_brokertopicmetrics_nokeycompactedtopicrecords_total', 265 | 266 | 'kafka_server_brokertopicmetrics_producemessageconversions_total', 267 | 268 | 'kafka_server_brokertopicmetrics_reassignmentbytesin_total', 269 | 270 | 'kafka_server_brokertopicmetrics_reassignmentbytesout_total', 271 | 'kafka_server_brokertopicmetrics_replicationbytesin_total', 272 | 273 | 'kafka_server_brokertopicmetrics_replicationbytesout_total', 274 | 275 | 'kafka_server_brokertopicmetrics_totalfetchrequests_total', 276 | 277 | 'kafka_server_brokertopicmetrics_totalproducerequests_total', 278 | 279 | 'kafka_server_delayedfetchmetrics_expires_total', 280 | 281 | 'kafka_server_delayedoperationpurgatory_numdelayedoperations', 282 | 283 | 'kafka_server_delayedoperationpurgatory_purgatorysize', 284 | 285 | 'kafka_server_fetchsessioncache_incrementalfetchsessionevictions_total', 286 | 287 | 'kafka_server_fetchsessioncache_numincrementalfetchpartitionscached', 288 | 289 | 'kafka_server_fetchsessioncache_numincrementalfetchsessions', 290 | 291 | 'kafka_server_kafkarequesthandlerpool_requesthandleravgidlepercent_count', 292 | 293 | 'kafka_server_kafkaserver_brokerstate', 294 | 295 | 'kafka_server_kafkaserver_linux_disk_read_bytes', 296 | 297 | 'kafka_server_kafkaserver_linux_disk_write_bytes', 298 | 299 | 'kafka_server_kafkaserver_yammer_metrics_count', 300 | 301 | 'kafka_server_replicaalterlogdirsmanager_deadthreadcount', 302 | 303 | 'kafka_server_replicaalterlogdirsmanager_failedpartitionscount', 304 | 305 | 'kafka_server_replicaalterlogdirsmanager_maxlag', 306 | 307 | 'kafka_server_replicaalterlogdirsmanager_minfetchrate', 308 | 309 | 'kafka_server_replicafetchermanager_deadthreadcount', 310 | 311 | 'kafka_server_replicafetchermanager_failedpartitionscount', 312 | 313 | 'kafka_server_replicafetchermanager_maxlag', 314 | 315 | 'kafka_server_replicafetchermanager_minfetchrate', 316 | 317 | 'kafka_server_replicamanager_atminisrpartitioncount', 318 | 319 | 'kafka_server_replicamanager_failedisrupdates_total', 320 | 321 | 'kafka_server_replicamanager_isrexpands_total', 322 | 323 | 'kafka_server_replicamanager_isrshrinks_total', 324 | 325 | 'kafka_server_replicamanager_leadercount', 326 | 327 | 'kafka_server_replicamanager_offlinereplicacount', 328 | 329 | 'kafka_server_replicamanager_partitioncount', 330 | 331 | 'kafka_server_replicamanager_reassigningpartitions', 332 | 333 | 'kafka_server_replicamanager_underminisrpartitioncount', 334 | 335 | 'kafka_server_replicamanager_underreplicatedpartitions', 336 | 'kafka_server_sessionexpirelistener_zookeeperauthfailures_total', 337 | 338 | 'kafka_server_sessionexpirelistener_zookeeperdisconnects_total', 339 | 340 | 'kafka_server_sessionexpirelistener_zookeeperexpires_total', 341 | 342 | 'kafka_server_sessionexpirelistener_zookeeperreadonlyconnects_total', 343 | 344 | 'kafka_server_sessionexpirelistener_zookeepersaslauthentications_total', 345 | 346 | 'kafka_server_sessionexpirelistener_zookeepersyncconnects_total', 347 | 348 | 'kafka_server_zookeeperclientmetrics_zookeeperrequestlatencyms', 349 | 350 | 'kafka_server_zookeeperclientmetrics_zookeeperrequestlatencyms_count', 351 | 352 | 'kafka_utils_throttler_cleaner_io_count', 353 | 354 | 'process_cpu_seconds_total', 355 | 356 | 'process_max_fds', 357 | 358 | 'process_open_fds', 359 | 360 | 'process_resident_memory_bytes', 361 | 362 | 'process_start_time_seconds', 363 | 364 | 'process_virtual_memory_bytes', 365 | 366 | 'scrape_duration_seconds', 367 | 368 | 'scrape_samples_post_metric_relabeling', 369 | 370 | 'scrape_samples_scraped', 371 | 'scrape_series_added', 372 | ]; 373 | }; 374 | -------------------------------------------------------------------------------- /client/src/containers/PartitionScreen/MTPaginationOptions.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC } from "react"; 2 | import { 3 | makeStyles, 4 | useTheme, 5 | Theme, 6 | createStyles, 7 | } from "@material-ui/core/styles"; 8 | 9 | import IconButton from "@material-ui/core/IconButton"; 10 | import FirstPageIcon from "@material-ui/icons/FirstPage"; 11 | import KeyboardArrowLeft from "@material-ui/icons/KeyboardArrowLeft"; 12 | import KeyboardArrowRight from "@material-ui/icons/KeyboardArrowRight"; 13 | import LastPageIcon from "@material-ui/icons/LastPage"; 14 | import PauseIcon from "@material-ui/icons/Pause"; 15 | import PlayArrowIcon from "@material-ui/icons/PlayArrow"; 16 | 17 | const useStyles1 = makeStyles((theme: Theme) => 18 | createStyles({ 19 | root: { 20 | flexShrink: 0, 21 | marginLeft: theme.spacing(2.5), 22 | }, 23 | }) 24 | ); 25 | 26 | interface TablePaginationActionsProp { 27 | count: number; 28 | page: number; 29 | rowsPerPage: number; 30 | onPageChange: ( 31 | event: React.MouseEvent, 32 | newPage: number 33 | ) => void; 34 | togglePause: boolean; 35 | setTogglePause: React.Dispatch>; 36 | pageIndex: number; 37 | pageSize: number; 38 | totalMessages: number; 39 | } 40 | 41 | export const MTPaginationOptions: FC = (props) => { 42 | const classes = useStyles1(); 43 | const theme = useTheme(); 44 | 45 | const { 46 | count, 47 | page, 48 | rowsPerPage, 49 | onPageChange, 50 | togglePause, 51 | setTogglePause, 52 | pageIndex, 53 | pageSize, 54 | totalMessages, 55 | } = props; 56 | 57 | const handleFirstPageButtonClick = ( 58 | e: React.MouseEvent 59 | ) => { 60 | setTogglePause(true); 61 | onPageChange(e, 0); 62 | }; 63 | 64 | const handleBackButtonClick = (e: React.MouseEvent) => { 65 | setTogglePause(true); 66 | onPageChange(e, pageIndex - 1); 67 | }; 68 | 69 | const handleNextButtonClick = (e: React.MouseEvent) => { 70 | if (pageIndex + 1 === Math.floor(totalMessages / pageSize)) 71 | setTogglePause(false); 72 | else setTogglePause(true); 73 | onPageChange(e, pageIndex + 1); 74 | }; 75 | 76 | const handleLastPageButtonClick = ( 77 | e: React.MouseEvent 78 | ) => { 79 | setTogglePause(false); 80 | onPageChange(e, Math.floor(totalMessages / pageSize)); 81 | }; 82 | 83 | const handlePauseButtonClick = (e: React.MouseEvent) => { 84 | setTogglePause(!togglePause); 85 | }; 86 | 87 | return ( 88 |
89 | 94 | {theme.direction === "rtl" ? : } 95 | 96 | 101 | {theme.direction === "rtl" ? ( 102 | 103 | ) : ( 104 | 105 | )} 106 | 107 | 108 | {togglePause ? : } 109 | 110 | = Math.ceil(count / rowsPerPage) - 1} 113 | aria-label="next page" 114 | > 115 | {theme.direction === "rtl" ? ( 116 | 117 | ) : ( 118 | 119 | )} 120 | 121 | = Math.ceil(count / rowsPerPage) - 1} 124 | aria-label="last page" 125 | > 126 | {theme.direction === "rtl" ? : } 127 | 128 |
129 | ); 130 | }; 131 | -------------------------------------------------------------------------------- /client/src/containers/PartitionScreen/MessageTable.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC, useState } from 'react'; 2 | import { useDispatch } from 'react-redux'; 3 | import { MTPaginationOptions } from './MTPaginationOptions'; 4 | import { appendMessageActionCreator } from '../../state/actions/actions'; 5 | 6 | import { 7 | Table, 8 | TableBody, 9 | TableCell, 10 | TableContainer, 11 | TableHead, 12 | TableRow, 13 | TableFooter, 14 | TablePagination, 15 | Paper, 16 | makeStyles, 17 | } from '@material-ui/core'; 18 | 19 | const useRowStyles = makeStyles({ 20 | root: { 21 | '& > *': { 22 | borderBottom: 'unset', 23 | }, 24 | }, 25 | tableWrapper: { 26 | margin: 30, 27 | }, 28 | tableHeaderRow: { 29 | backgroundColor: 'black', 30 | }, 31 | tableHeaderText: { 32 | color: 'white', 33 | fontWeight: 'bold', 34 | }, 35 | }); 36 | 37 | interface MessageTableProps { 38 | messages: {}[]; 39 | ws: WebSocket; 40 | setMessages: React.Dispatch>; 41 | } 42 | 43 | export const MessageTable: FC = ({ 44 | messages, 45 | ws, 46 | setMessages, 47 | }) => { 48 | const classes = useRowStyles(); 49 | 50 | const flattenObj = (obj: any) => { 51 | const flatObj: any = {}; 52 | Object.keys(obj).forEach((key) => { 53 | if (typeof obj[key] === 'object') 54 | Object.assign(flatObj, flattenObj(obj[key])); 55 | else flatObj[key] = obj[key]; 56 | }); 57 | return flatObj; 58 | }; 59 | 60 | messages = messages.map((el) => flattenObj(el)); 61 | 62 | const [pageSize, setPageSize] = useState(25); 63 | const [pageIndex, setPageIndex] = useState( 64 | Math.floor(messages.length / pageSize) 65 | ); 66 | const [togglePause, setTogglePause] = useState(false); 67 | const start = pageIndex * pageSize; 68 | const end = Math.min(start + pageSize, messages.length); 69 | const showMessages = messages.slice(start, end); 70 | const numEmptyRows = pageSize - (end - start); 71 | const emptyRows = []; 72 | for (let i = 0; i < numEmptyRows; i++) { 73 | emptyRows.push( 74 | 75 | 78 | 79 | ); 80 | } 81 | 82 | const dispatch = useDispatch(); 83 | 84 | ws.onmessage = (event: any) => { 85 | const array = event.data.split('message: '); 86 | const data = JSON.parse(array[1]); 87 | dispatch(appendMessageActionCreator(data)); 88 | if (!togglePause) setPageIndex(Math.floor(messages.length / pageSize)); 89 | }; 90 | 91 | const handleChangePage = ( 92 | e: React.MouseEvent | null, 93 | pageIndex: number 94 | ) => { 95 | setPageIndex(pageIndex); 96 | }; 97 | 98 | const handleChangePageSize = ( 99 | e: React.ChangeEvent 100 | ) => { 101 | const newPageIndex = Math.floor(start / parseInt(e.target.value)); 102 | if (newPageIndex === Math.floor(messages.length / pageSize)) 103 | setTogglePause(false); 104 | else setTogglePause(true); 105 | setPageSize(parseInt(e.target.value)); 106 | setPageIndex(newPageIndex); 107 | }; 108 | 109 | return ( 110 |
111 | 112 | 113 | 114 | 115 | {messages[0] && 116 | Object.keys(messages[0]).map((key) => ( 117 | {key} 118 | ))} 119 | 120 | 121 | 122 | 123 | {showMessages.map((el, index) => ( 124 | 125 | {Object.values(el).map((value: any) => ( 126 | {value} 127 | ))} 128 | 129 | ))} 130 | {emptyRows} 131 | 132 | 133 | 134 | 135 | ( 147 | 155 | )} 156 | /> 157 | 158 | 159 |
160 |
161 |
162 | ); 163 | }; 164 | -------------------------------------------------------------------------------- /client/src/containers/PartitionScreen/PartitionScreen.tsx: -------------------------------------------------------------------------------- 1 | import React, { FC } from 'react'; 2 | import { useSelector, useDispatch } from 'react-redux'; 3 | import { KafkaState } from '../../state/reducers/kafkaDataReducer'; 4 | import { overallState } from '../../state/reducers/index'; 5 | import { MessageTable } from './MessageTable'; 6 | import NavBar from '../HomeScreen/Sidepanel/NavBar'; 7 | 8 | interface PartitionScreenProps { 9 | topic: string; 10 | partitionID: string; 11 | ws: WebSocket; 12 | } 13 | 14 | export const PartitionScreen: FC = ({ 15 | topic, 16 | partitionID, 17 | ws, 18 | }) => { 19 | const dispatch = useDispatch(); 20 | const messages = useSelector( 21 | state => state.kafka.messages 22 | ); 23 | 24 | return ( 25 | <> 26 | 27 | 28 | 29 | ); 30 | }; 31 | -------------------------------------------------------------------------------- /client/src/helperFunctions/populateChart.ts: -------------------------------------------------------------------------------- 1 | import { populateChartActionCreator } from '../state/actions/actions'; 2 | 3 | const createChartData = (inputData: any) => { 4 | const { 5 | metric: { __name__: label }, 6 | } = inputData[0]; 7 | const labels: string[] = []; 8 | const data: number[] = []; 9 | const backgroundColor: string[] = backgroundColorCreator(inputData.length); 10 | const borderWidth = 4; 11 | 12 | inputData.forEach((el: any) => { 13 | let label = ''; 14 | Object.keys(el.metric).forEach((key, i, arr) => { 15 | if (key !== '__name__') { 16 | label += key + ': ' + el.metric[key]; 17 | if (i !== arr.length - 1) { 18 | label += '\n'; 19 | } 20 | } 21 | }); 22 | labels.push(label); 23 | let addedData = false; 24 | for (let i = 0; i < el.value.length; i++) { 25 | if (typeof el.value[i] === 'number') { 26 | data.push(el.value[i]); 27 | addedData = true; 28 | break; 29 | } 30 | } 31 | if (!addedData) data.push(0); 32 | }); 33 | 34 | const dataSetObj = { label, data, backgroundColor, borderWidth }; 35 | const formattedData = { labels, datasets: [dataSetObj] }; 36 | return formattedData; 37 | }; 38 | 39 | // fire Action Creator 40 | export const populateChart = (data: any, dispatch: any) => { 41 | if (data.length === 0) return dispatch(populateChartActionCreator({})); 42 | const formattedData = createChartData(data); 43 | dispatch(populateChartActionCreator(formattedData)); 44 | }; 45 | 46 | const backgroundColorCreator = (length: number) => { 47 | const backgroundColor = []; 48 | while (length > 0) { 49 | const val1 = getRandomArbitrary(); 50 | const val2 = getRandomArbitrary(); 51 | const val3 = getRandomArbitrary(); 52 | backgroundColor.push(`rgba(${val1}, ${val2}, ${val3}, 0.2)`); 53 | length--; 54 | } 55 | 56 | return backgroundColor; 57 | }; 58 | 59 | const getRandomArbitrary = (): number => { 60 | return Math.floor(Math.random() * (255 - 0) + 0); 61 | }; -------------------------------------------------------------------------------- /client/src/helperFunctions/populateData.ts: -------------------------------------------------------------------------------- 1 | import { 2 | populateDataActionCreator, 3 | } from '../state/actions/actions'; 4 | 5 | const createData = ( 6 | topicName: string, 7 | partitions: number, 8 | partitionData: any 9 | ) => { 10 | return { 11 | topicName: topicName, 12 | partitions: partitions, 13 | partitionData: partitionData.map((el: any) => ({ 14 | id: el.partitionId, 15 | partitionErrorCode: el.partitionErrorCode, 16 | leader: !!el.leader, 17 | replicas: el.replicas[0], 18 | isr: el.isr[0], 19 | })), 20 | }; 21 | }; 22 | 23 | export const populateData = (data: any, dispatch: any) => { 24 | const array = data.metadata.topics; 25 | const rows = array.map((el: any) => 26 | createData(el.name, el.partitions.length, el.partitions) 27 | ); 28 | dispatch(populateDataActionCreator(rows)); 29 | }; 30 | -------------------------------------------------------------------------------- /client/src/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 14 | 15 | KafkaFix 16 | 17 | 18 | 19 |
20 | 21 | 22 | -------------------------------------------------------------------------------- /client/src/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import App from './App'; 4 | import { Provider } from 'react-redux'; 5 | import store from './state/store'; 6 | 7 | console.log(document.getElementById('root')) 8 | 9 | ReactDOM.render( 10 | 11 | 12 | , 13 | document.getElementById('root') 14 | ); 15 | -------------------------------------------------------------------------------- /client/src/state/actions/actions.ts: -------------------------------------------------------------------------------- 1 | import { Type } from '../constants/constants'; 2 | import { KafkaState } from '../reducers/kafkaDataReducer'; 3 | 4 | export interface Action { 5 | type: number; 6 | payload?: any; 7 | } 8 | 9 | export const connectedActionCreator = (): Action => { 10 | return { 11 | type: Type.CONNECTED, 12 | }; 13 | }; 14 | 15 | export const disconnectedActionCreator = (): Action => { 16 | return { 17 | type: Type.DISCONNECTED, 18 | }; 19 | }; 20 | 21 | export const populateTopicsActionsCreator = (input: KafkaState[]): Action => { 22 | return { 23 | type: Type.POPULATE_TOPICS, 24 | payload: input, 25 | }; 26 | }; 27 | 28 | export const populateDataActionCreator = (input: any) => { 29 | return { 30 | type: Type.POPULATE_DATA, 31 | payload: input, 32 | }; 33 | }; 34 | 35 | export const appendMessageActionCreator = (input: any) => { 36 | return { 37 | type: Type.APPEND_MESSAGE, 38 | payload: input, 39 | }; 40 | }; 41 | 42 | export const populateNotifActionCreator = (input: any) => { 43 | return { 44 | type: Type.POPULATE_NOTIF, 45 | payload: input, 46 | }; 47 | }; 48 | 49 | export const appendNotifActionCreator = (input: any) => { 50 | return { 51 | type: Type.APPEND_NOTIF, 52 | payload: input, 53 | }; 54 | }; 55 | 56 | export const populateChartActionCreator = (input: any) => { 57 | return { 58 | type: Type.POPULATE_CHART, 59 | payload: input, 60 | }; 61 | }; 62 | -------------------------------------------------------------------------------- /client/src/state/actions/oauthActions.ts: -------------------------------------------------------------------------------- 1 | // import Axios from 'axios'; 2 | import { 3 | USER_LOGIN_REQUEST, USER_LOGIN_SUCCESS, 4 | USER_LOGIN_FAIL, SET_ERROR, 5 | } from '../constants/oauthConstants'; 6 | 7 | 8 | export interface Action { 9 | type: any; 10 | payload: { 11 | email: any; 12 | }; 13 | } 14 | 15 | export const OauthLoginRequestActionCreator = () => { 16 | return { 17 | type: USER_LOGIN_REQUEST, 18 | }; 19 | }; 20 | 21 | export const OauthLoginSuccessActionCreator = () => { 22 | return { 23 | type: USER_LOGIN_SUCCESS, 24 | }; 25 | }; 26 | 27 | export const OauthLoginFailActionCreator = (error: string) => { 28 | return { 29 | type: USER_LOGIN_FAIL, 30 | action: error, 31 | }; 32 | }; 33 | 34 | export const OauthSetErrorActionCreator = (error: string) => { 35 | return { 36 | type: SET_ERROR, 37 | payload: error 38 | } 39 | }; 40 | // export const signIn = async (email: any, password: any, dispatch: (arg0: { type: string; payload?: any; }) => void) => { 41 | // const copyEmail = email; 42 | // dispatch({ type: USER_LOGIN_REQUEST }); 43 | // try { 44 | // const {data} = await Axios.post('/oauth-callback', { email: copyEmail, password }); 45 | // const { email} = data.doc; 46 | // dispatch({ type: USER_LOGIN_SUCCESS, payload: {email: copyEmail} }); 47 | // } catch (error) { 48 | // dispatch({ type: USER_LOGIN_FAIL, payload: error.message }); 49 | // } 50 | // }; 51 | -------------------------------------------------------------------------------- /client/src/state/actions/userActions.ts: -------------------------------------------------------------------------------- 1 | <<<<<<< HEAD 2 | 3 | ======= 4 | >>>>>>> 6169dfc14570b0ad4f9d00b46f1bc69b571b4285 5 | import { 6 | USER_LOGIN_REQUEST, 7 | USER_LOGIN_SUCCESS, 8 | USER_LOGIN_FAIL, 9 | USER_SIGNUP_REQUEST, 10 | USER_SIGNUP_SUCCESS, 11 | USER_SIGNUP_FAIL, 12 | USER_LOGOUT, 13 | SET_USER, 14 | SET_ERROR, 15 | } from '../constants/userConstants'; 16 | 17 | export interface Action { 18 | type: any; 19 | payload: { 20 | email: any; 21 | fullName: any; 22 | }; 23 | } 24 | 25 | export const setUserActionCreator = (email: string) => { 26 | return { 27 | type: SET_USER, 28 | payload: email, 29 | }; 30 | }; 31 | 32 | export const loginRequestActionCreator = () => { 33 | return { 34 | type: USER_LOGIN_REQUEST, 35 | }; 36 | }; 37 | 38 | export const loginSuccessActionCreator = () => { 39 | return { 40 | type: USER_LOGIN_SUCCESS, 41 | }; 42 | }; 43 | 44 | export const loginFailActionCreator = (error: string) => { 45 | return { 46 | type: USER_LOGIN_FAIL, 47 | action: error, 48 | }; 49 | }; 50 | 51 | export const signUpRequestActionCreator = () => { 52 | return { 53 | type: USER_SIGNUP_REQUEST, 54 | }; 55 | }; 56 | 57 | export const signUpSuccessActionCreator = () => { 58 | return { 59 | type: USER_SIGNUP_SUCCESS, 60 | }; 61 | }; 62 | 63 | export const signUpFailActionCreator = (error: string) => { 64 | return { 65 | type: USER_SIGNUP_FAIL, 66 | action: error, 67 | }; 68 | }; 69 | 70 | export const logoutActionCreator = () => { 71 | return { 72 | type: USER_LOGOUT, 73 | }; 74 | }; 75 | 76 | export const setErrorActionCreator = (error: string) => { 77 | return { 78 | type: SET_ERROR, 79 | payload: error, 80 | }; 81 | }; 82 | -------------------------------------------------------------------------------- /client/src/state/constants/constants.ts: -------------------------------------------------------------------------------- 1 | export enum Type { 2 | CONNECTED, 3 | DISCONNECTED, 4 | POPULATE_TOPICS, 5 | POPULATE_DATA, 6 | APPEND_MESSAGE, 7 | POPULATE_NOTIF, 8 | APPEND_NOTIF, 9 | POPULATE_CHART, 10 | } 11 | -------------------------------------------------------------------------------- /client/src/state/constants/oauthConstants.ts: -------------------------------------------------------------------------------- 1 | export const USER_LOGIN_REQUEST = 'USER_LOGIN_REQUEST'; 2 | export const USER_LOGIN_SUCCESS = 'USER_LOGIN_SUCCESS'; 3 | export const USER_LOGIN_FAIL = 'USER_LOGIN_FAIL'; 4 | export const SET_ERROR = 'SET_ERROR'; -------------------------------------------------------------------------------- /client/src/state/constants/userConstants.ts: -------------------------------------------------------------------------------- 1 | export const USER_LOGIN_REQUEST = 'USER_LOGIN_REQUEST'; 2 | export const USER_LOGIN_SUCCESS = 'USER_LOGIN_SUCCESS'; 3 | export const USER_LOGIN_FAIL = 'USER_LOGIN_FAIL'; 4 | 5 | export const USER_UPDATE_REQUEST = 'USER_UPDATE_REQUEST'; 6 | export const USER_UPDATE_SUCCESS = 'USER_UPDATE_SUCCESS'; 7 | export const USER_UPDATE_FAIL = 'USER_UPDATE_FAIL'; 8 | 9 | export const USER_SIGNUP_REQUEST = 'USER_SIGNUP_REQUEST'; 10 | export const USER_SIGNUP_SUCCESS = 'USER_SIGNUP_SUCCESS'; 11 | export const USER_SIGNUP_FAIL = 'USER_SIGNUP_FAIL'; 12 | 13 | export const USER_LOGOUT = 'USER_LOGOUT'; 14 | 15 | export const SET_USER = 'SET_USER'; 16 | export const SET_ERROR = 'SET_ERROR'; -------------------------------------------------------------------------------- /client/src/state/reducers/index.ts: -------------------------------------------------------------------------------- 1 | import { combineReducers } from 'redux'; 2 | import { kafkaDataReducer, KafkaState } from './kafkaDataReducer'; 3 | import { userReducer, UserState } from './userReducer'; 4 | import {metricsReducer, MetricsState} from './metricsReducer'; 5 | 6 | export interface overallState { 7 | kafka: KafkaState; 8 | user: UserState, 9 | metrics: MetricsState 10 | } 11 | 12 | const reducers = combineReducers({ 13 | kafka: kafkaDataReducer, 14 | user: userReducer, 15 | metrics: metricsReducer, 16 | }); 17 | 18 | export default reducers; 19 | -------------------------------------------------------------------------------- /client/src/state/reducers/kafkaDataReducer.ts: -------------------------------------------------------------------------------- 1 | import { Type } from '../constants/constants'; 2 | import { Action } from '../actions/actions'; 3 | 4 | export interface TopicData { 5 | name: string; 6 | partitionNum: number; 7 | consumerNum: number; 8 | producerNum: number; 9 | } 10 | 11 | export interface KafkaState { 12 | isConnected: boolean; 13 | data: any[]; 14 | messages: any[]; 15 | notif: any[]; 16 | } 17 | 18 | const initialState: KafkaState = { 19 | isConnected: false, 20 | data: [], 21 | messages: [], 22 | notif: [], 23 | }; 24 | 25 | export const kafkaDataReducer = ( 26 | state: KafkaState = initialState, 27 | action: Action 28 | ): KafkaState => { 29 | switch (action.type) { 30 | case Type.CONNECTED: 31 | return { 32 | ...state, 33 | isConnected: true, 34 | }; 35 | case Type.DISCONNECTED: 36 | return { 37 | ...state, 38 | isConnected: false, 39 | }; 40 | case Type.POPULATE_DATA: 41 | return { 42 | ...state, 43 | data: action.payload, 44 | }; 45 | case Type.APPEND_MESSAGE: 46 | return { 47 | ...state, 48 | messages: [...state.messages, action.payload], 49 | }; 50 | case Type.POPULATE_NOTIF: 51 | return { 52 | ...state, 53 | notif: action.payload, 54 | }; 55 | case Type.APPEND_NOTIF: 56 | return { 57 | ...state, 58 | notif: [...state.notif, action.payload], 59 | }; 60 | default: 61 | return state; 62 | } 63 | }; 64 | -------------------------------------------------------------------------------- /client/src/state/reducers/metricsReducer.ts: -------------------------------------------------------------------------------- 1 | import { Type } from '../constants/constants'; 2 | import { Action } from '../actions/actions'; 3 | 4 | export interface MetricsState { 5 | chartData: ChartData | Object; 6 | } 7 | 8 | interface ChartData { 9 | labels: string[], 10 | datasets: DataSet[] 11 | } 12 | 13 | interface DataSet { 14 | label: string, 15 | data: number[], 16 | backgroundColor: string [], 17 | borderWidth: number 18 | } 19 | 20 | const initialState: MetricsState = { 21 | chartData: {}, 22 | }; 23 | 24 | export const metricsReducer = (state = initialState, action: any) => { 25 | switch (action.type) { 26 | case Type.POPULATE_CHART: 27 | return { 28 | ...state, 29 | chartData: action.payload, 30 | }; 31 | default: 32 | return state; 33 | } 34 | }; 35 | -------------------------------------------------------------------------------- /client/src/state/reducers/oauthReducer.ts: -------------------------------------------------------------------------------- 1 | import * as types from '../constants/oauthConstants'; 2 | import { Action } from '../actions/oauthActions'; 3 | 4 | export interface OauthState { 5 | email: string; 6 | LOGINLoading: boolean; 7 | error: string; 8 | preferences: any; 9 | } 10 | 11 | const initialState: OauthState = { 12 | email: "", 13 | // password: '', 14 | LOGINLoading: false, 15 | error: "", 16 | preferences: null, 17 | }; 18 | export const userReducer = ( 19 | state: OauthState = initialState, 20 | action: Action) => { 21 | switch (action.type) { 22 | case types.SET_ERROR: 23 | return { 24 | ...state, 25 | error: action.payload 26 | } 27 | case types.USER_LOGIN_REQUEST: 28 | return { ...state, authenticating: true }; 29 | case types.USER_LOGIN_SUCCESS: 30 | return { ...state, authenticating: false, email: action.payload.email}; 31 | case types.USER_LOGIN_FAIL: 32 | return { ...state, authenticating: false, error: action.payload }; 33 | 34 | } 35 | } -------------------------------------------------------------------------------- /client/src/state/reducers/userReducer.ts: -------------------------------------------------------------------------------- 1 | import * as types from '../constants/userConstants'; 2 | import { Action } from '../actions/userActions'; 3 | 4 | export interface UserState { 5 | email: string; 6 | signupLoading: boolean; 7 | LOGINLoading: boolean; 8 | error: string; 9 | preferences: any; 10 | } 11 | 12 | const initialState: UserState = { 13 | email: '', 14 | signupLoading: false, 15 | LOGINLoading: false, 16 | error: '', 17 | preferences: null, 18 | }; 19 | 20 | // 21 | export const userReducer = ( 22 | state: UserState = initialState, 23 | action: Action 24 | ) => { 25 | switch (action.type) { 26 | case types.SET_USER: 27 | return { 28 | ...state, 29 | email: action.payload, 30 | }; 31 | case types.SET_ERROR: 32 | return { 33 | ...state, 34 | error: action.payload, 35 | }; 36 | case types.USER_LOGIN_REQUEST: 37 | return { ...state, LOGINLoading: true }; 38 | case types.USER_LOGIN_SUCCESS: 39 | return { ...state, LOGINloading: false }; 40 | case types.USER_LOGIN_FAIL: 41 | return { ...state, LOGINLoading: false, error: action.payload }; 42 | case types.USER_SIGNUP_REQUEST: 43 | return { ...state, signupLoading: true }; 44 | case types.USER_SIGNUP_SUCCESS: 45 | return { ...state, signupLoading: false }; 46 | case types.USER_SIGNUP_FAIL: 47 | return { ...state, signupLoading: false, error: action.payload }; 48 | case types.USER_LOGOUT: 49 | return { ...initialState }; 50 | default: 51 | return state; 52 | } 53 | }; 54 | -------------------------------------------------------------------------------- /client/src/state/store.ts: -------------------------------------------------------------------------------- 1 | import { createStore } from 'redux'; 2 | import reducers from './reducers'; 3 | 4 | const store = createStore(reducers); 5 | 6 | export default store; 7 | -------------------------------------------------------------------------------- /client/src/styles.css: -------------------------------------------------------------------------------- 1 | * { 2 | margin: 0; 3 | padding: 0; 4 | } 5 | 6 | .sidepanel { 7 | width: 30%; 8 | } 9 | 10 | -------------------------------------------------------------------------------- /dist/bundle.js.LICENSE.txt: -------------------------------------------------------------------------------- 1 | /* 2 | object-assign 3 | (c) Sindre Sorhus 4 | @license MIT 5 | */ 6 | 7 | /*! 8 | * Chart.js v3.4.1 9 | * https://www.chartjs.org 10 | * (c) 2021 Chart.js Contributors 11 | * Released under the MIT License 12 | */ 13 | 14 | /** 15 | * A better abstraction over CSS. 16 | * 17 | * @copyright Oleg Isonen (Slobodskoi) / Isonen 2014-present 18 | * @website https://github.com/cssinjs/jss 19 | * @license MIT 20 | */ 21 | 22 | /** @license Material-UI v4.11.2 23 | * 24 | * This source code is licensed under the MIT license found in the 25 | * LICENSE file in the root directory of this source tree. 26 | */ 27 | 28 | /** @license Material-UI v4.12.1 29 | * 30 | * This source code is licensed under the MIT license found in the 31 | * LICENSE file in the root directory of this source tree. 32 | */ 33 | 34 | /** @license React v0.20.2 35 | * scheduler.production.min.js 36 | * 37 | * Copyright (c) Facebook, Inc. and its affiliates. 38 | * 39 | * This source code is licensed under the MIT license found in the 40 | * LICENSE file in the root directory of this source tree. 41 | */ 42 | 43 | /** @license React v16.13.1 44 | * react-is.production.min.js 45 | * 46 | * Copyright (c) Facebook, Inc. and its affiliates. 47 | * 48 | * This source code is licensed under the MIT license found in the 49 | * LICENSE file in the root directory of this source tree. 50 | */ 51 | 52 | /** @license React v17.0.2 53 | * react-dom.production.min.js 54 | * 55 | * Copyright (c) Facebook, Inc. and its affiliates. 56 | * 57 | * This source code is licensed under the MIT license found in the 58 | * LICENSE file in the root directory of this source tree. 59 | */ 60 | 61 | /** @license React v17.0.2 62 | * react.production.min.js 63 | * 64 | * Copyright (c) Facebook, Inc. and its affiliates. 65 | * 66 | * This source code is licensed under the MIT license found in the 67 | * LICENSE file in the root directory of this source tree. 68 | */ 69 | -------------------------------------------------------------------------------- /dist/index.html: -------------------------------------------------------------------------------- 1 | KafkaFix
-------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.2' 2 | services: 3 | prometheus: 4 | image: prom/prometheus 5 | ports: 6 | - '9090:9090' 7 | volumes: 8 | - ./prometheus.yml:/etc/prometheus/prometheus.yml 9 | 10 | zookeeper: 11 | image: zookeeper 12 | ports: 13 | - '2181:2181' 14 | volumes: 15 | - ./zk-kafka/zookeeper/data:/data 16 | - ./zk-kafka/zookeeper/datalog:/datalog 17 | 18 | kafka: 19 | build: . 20 | ports: 21 | - '9092:9092' 22 | - '9999:9999' 23 | environment: 24 | KAFKA_BROKER_ID: 1 25 | KAFKA_ADVERTISED_LISTENERS: INSIDE://kafka:19092,OUTSIDE://${DOCKER_HOST_IP:-127.0.0.1}:9092 26 | KAFKA_LISTENERS: INSIDE://:19092,OUTSIDE://:9092 27 | KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE 28 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT 29 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 30 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 31 | KAFKA_LOG4J_LOGGERS: kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO 32 | KAFKA_OPTS: -javaagent:/usr/app/jmx_prometheus_javaagent.jar=7071:/usr/app/prom-jmx-agent-config.yml 33 | 34 | JMX_PORT: 9999 35 | KAFKA_JMX_HOSTNAME: ${DOCKER_HOST_IP:-127.0.0.1} 36 | KAFKA_JMX_OPTS: -Djava.rmi.server.hostname=127.0.0.1 37 | -Dcom.sun.management.jmxremote=true 38 | -Dcom.sun.management.jmxremote.local.only=false 39 | -Dcom.sun.management.jmxremote.rmi.port=9999 40 | -Dcom.sun.management.jmxremote.port=9999 41 | -Dcom.sun.management.jmxremote.authenticate=false 42 | -Dcom.sun.management.jmxremote.ssl=false 43 | 44 | volumes: 45 | - ./zk-kafka/kafka/data:/var/lib/kafka/data 46 | depends_on: 47 | - zookeeper 48 | -------------------------------------------------------------------------------- /documents/KafkaFix (png).png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/kafkafix/577e5110a29cf08e0a1397a712bc9b85e52812d0/documents/KafkaFix (png).png -------------------------------------------------------------------------------- /documents/KafkaFixIcon.icns: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/kafkafix/577e5110a29cf08e0a1397a712bc9b85e52812d0/documents/KafkaFixIcon.icns -------------------------------------------------------------------------------- /license.txt: -------------------------------------------------------------------------------- 1 | hello -------------------------------------------------------------------------------- /main.js: -------------------------------------------------------------------------------- 1 | const { app, BrowserWindow, ipcMain, dialog } = require('electron'); 2 | const path = require('path'); 3 | const fetch = require('node-fetch'); 4 | 5 | app.whenReady().then(() => createWindow()); 6 | 7 | app.on('activate', () => { 8 | if (BrowserWindow.getAllWindows().length === 0) createWindow(); 9 | }); 10 | 11 | app.on('window-all-closed', () => { 12 | if (process.platform !== 'darwin') app.quit(); 13 | }); 14 | 15 | ipcMain.on('open-partition', () => createPartitionWindow()); 16 | ipcMain.on('upload-file', () => uploadFile()); 17 | 18 | const createWindow = () => { 19 | const win = new BrowserWindow({ 20 | width: 1000, 21 | height: 800, 22 | webPreferences: { 23 | nodeIntegration: true, 24 | contextIsolation: false, 25 | }, 26 | }); 27 | const filePath = `file://${path.join(__dirname, './dist/index.html')}`; 28 | 29 | win.loadURL(filePath); 30 | }; 31 | 32 | const createPartitionWindow = () => { 33 | const win = new BrowserWindow({ 34 | width: 500, 35 | height: 500, 36 | }); 37 | 38 | win.loadURL('http://localhost:8080/partition'); 39 | }; 40 | 41 | const uploadFile = () => { 42 | dialog 43 | .showOpenDialog({ 44 | title: 'Select docker-compose file', 45 | defaultPath: path.join(__dirname, '../assets/'), 46 | buttonLabel: 'Select', 47 | filters: [ 48 | { 49 | name: 'YML file', 50 | extensions: ['yml', 'yaml'], 51 | }, 52 | ], 53 | properties: 54 | process.platform !== 'darwin' 55 | ? ['openFile'] 56 | : ['openFile', 'openDirectory'], 57 | }) 58 | .then(file => { 59 | if (!file.canceled) { 60 | const filePath = file.filePaths[0].toString(); 61 | const options = { 62 | method: 'POST', 63 | headers: { 'Content-Type': 'application/json' }, 64 | body: JSON.stringify({ filePath }), 65 | }; 66 | 67 | fetch('http://localhost:3000/api/composeup', options).catch(e => 68 | console.log('error: docker compose up', e) 69 | ); 70 | } 71 | }); 72 | }; 73 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafkafix", 3 | "description": "Kafka Monitoring Tool", 4 | "version": "1.0.0", 5 | "author": "Andy Wang, Kyu Park, Ranisha Rafeeque, Yom Woldemichael", 6 | "main": "main.js", 7 | "build": { 8 | "appId": "com.electron.kafkafix", 9 | "productName": "KafkaFix", 10 | "directories": { 11 | "output": "build" 12 | }, 13 | "mac": { 14 | "category": "public.app-category.developer-tools", 15 | "target": "pkg", 16 | "icon": "./assets/KafkaFixIcon.icns", 17 | "type": "distribution" 18 | }, 19 | "pkg": { 20 | "allowAnywhere": true, 21 | "license": "./license.txt", 22 | "overwriteAction": "upgrade" 23 | } 24 | }, 25 | "scripts": { 26 | "start": "concurrently \"npm run electron\" \"nodemon ./server/index.ts\"", 27 | "convert": "tsc -p ./", 28 | "electron": "concurrently \" npm run dev\" \"wait-on http://localhost:8080 && electron .\"", 29 | "dev": "webpack serve", 30 | "build": "webpack --mode production", 31 | "test": "echo \"Error: no test specified\" && exit 1", 32 | "bundle": "NODE_ENV=production webpack", 33 | "package": "electron-forge package", 34 | "make": "electron-forge make", 35 | "dist": "electron-builder" 36 | }, 37 | "repository": { 38 | "type": "git", 39 | "url": "git+https://github.com/oslabs-beta/kafkafix.git" 40 | }, 41 | "keywords": [ 42 | "Electron", 43 | "quick", 44 | "start", 45 | "tutorial", 46 | "demo", 47 | "typescript" 48 | ], 49 | "license": "ISC", 50 | "bugs": { 51 | "url": "https://github.com/oslabs-beta/kafkafix/issues" 52 | }, 53 | "homepage": "https://github.com/oslabs-beta/kafkafix#readme", 54 | "dependencies": { 55 | "@material-ui/core": "^4.11.4", 56 | "@material-ui/icons": "^4.11.2", 57 | "@svgr/webpack": "^5.5.0", 58 | "axios": "^0.21.1", 59 | "bcrypt": "^5.0.1", 60 | "chart.js": "^3.4.1", 61 | "clsx": "^1.1.1", 62 | "cors": "^2.8.5", 63 | "dotenv": "^10.0.0", 64 | "electron-is-dev": "^2.0.0", 65 | "express": "^4.17.1", 66 | "firebase": "^8.7.0", 67 | "form-data": "^4.0.0", 68 | "fs": "0.0.1-security", 69 | "js-cookie": "^2.2.1", 70 | "kafkajs": "^1.15.0", 71 | "material-ui": "^0.20.2", 72 | "mongoose": "^5.13.0", 73 | "node-fetch": "^2.6.1", 74 | "path": "^0.12.7", 75 | "prop-types": "^15.7.2", 76 | "react": "^17.0.2", 77 | "react-dom": "^17.0.2", 78 | "react-redux": "^7.2.4", 79 | "react-router-dom": "^5.2.0", 80 | "redux": "^4.1.0", 81 | "redux-thunk": "^2.3.0", 82 | "url-loader": "^4.1.1", 83 | "wait-on": "^6.0.0", 84 | "winston": "^3.3.3", 85 | "winston-mongodb": "^5.0.7", 86 | "ws": "^7.5.1" 87 | }, 88 | "devDependencies": { 89 | "@electron-forge/cli": "^6.0.0-beta.58", 90 | "@types/cors": "^2.8.10", 91 | "@types/dotenv": "^8.2.0", 92 | "@types/express": "^4.17.12", 93 | "@types/js-cookie": "^2.2.7", 94 | "@types/material-ui": "^0.21.9", 95 | "@types/node": "^15.14.0", 96 | "@types/node-fetch": "^2.5.11", 97 | "@types/react": "^17.0.11", 98 | "@types/react-dom": "^17.0.8", 99 | "@types/react-redux": "^7.1.16", 100 | "@types/react-router-dom": "^5.1.7", 101 | "@types/redux": "^3.6.0", 102 | "@types/redux-thunk": "^2.1.0", 103 | "@types/ws": "^7.4.5", 104 | "concurrently": "^6.2.0", 105 | "css-loader": "^5.2.6", 106 | "electron": "^13.1.7", 107 | "electron-builder": "^22.11.7", 108 | "electron-is-dev": "^2.0.0", 109 | "html-webpack-plugin": "^5.3.2", 110 | "nodemon": "^1.19.4", 111 | "style-loader": "^3.0.0", 112 | "ts-loader": "^9.2.3", 113 | "ts-node": "^10.0.0", 114 | "typescript": "^4.3.4", 115 | "webpack": "^5.41.0", 116 | "webpack-cli": "^4.7.2", 117 | "webpack-dev-server": "^3.11.2" 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /prom-jmx-agent-config.yml: -------------------------------------------------------------------------------- 1 | lowercaseOutputName: true 2 | 3 | rules: 4 | # Special cases and very specific rules 5 | - pattern: kafka.server<>Value 6 | name: kafka_server_$1_$2 7 | type: GAUGE 8 | labels: 9 | clientId: '$3' 10 | topic: '$4' 11 | partition: '$5' 12 | - pattern: kafka.server<>Value 13 | name: kafka_server_$1_$2 14 | type: GAUGE 15 | labels: 16 | clientId: '$3' 17 | broker: '$4:$5' 18 | - pattern: kafka.coordinator.(\w+)<>Value 19 | name: kafka_coordinator_$1_$2_$3 20 | type: GAUGE 21 | 22 | # Generic per-second counters with 0-2 key/value pairs 23 | - pattern: kafka.(\w+)<>Count 24 | name: kafka_$1_$2_$3_total 25 | type: COUNTER 26 | labels: 27 | '$4': '$5' 28 | '$6': '$7' 29 | - pattern: kafka.(\w+)<>Count 30 | name: kafka_$1_$2_$3_total 31 | type: COUNTER 32 | labels: 33 | '$4': '$5' 34 | - pattern: kafka.(\w+)<>Count 35 | name: kafka_$1_$2_$3_total 36 | type: COUNTER 37 | 38 | - pattern: kafka.server<>([a-z-]+) 39 | name: kafka_server_quota_$3 40 | type: GAUGE 41 | labels: 42 | resource: '$1' 43 | clientId: '$2' 44 | 45 | - pattern: kafka.server<>([a-z-]+) 46 | name: kafka_server_quota_$4 47 | type: GAUGE 48 | labels: 49 | resource: '$1' 50 | user: '$2' 51 | clientId: '$3' 52 | 53 | # Generic gauges with 0-2 key/value pairs 54 | - pattern: kafka.(\w+)<>Value 55 | name: kafka_$1_$2_$3 56 | type: GAUGE 57 | labels: 58 | '$4': '$5' 59 | '$6': '$7' 60 | - pattern: kafka.(\w+)<>Value 61 | name: kafka_$1_$2_$3 62 | type: GAUGE 63 | labels: 64 | '$4': '$5' 65 | - pattern: kafka.(\w+)<>Value 66 | name: kafka_$1_$2_$3 67 | type: GAUGE 68 | 69 | # Emulate Prometheus 'Summary' metrics for the exported 'Histogram's. 70 | # 71 | # Note that these are missing the '_sum' metric! 72 | - pattern: kafka.(\w+)<>Count 73 | name: kafka_$1_$2_$3_count 74 | type: COUNTER 75 | labels: 76 | '$4': '$5' 77 | '$6': '$7' 78 | - pattern: kafka.(\w+)<>(\d+)thPercentile 79 | name: kafka_$1_$2_$3 80 | type: GAUGE 81 | labels: 82 | '$4': '$5' 83 | '$6': '$7' 84 | quantile: '0.$8' 85 | - pattern: kafka.(\w+)<>Count 86 | name: kafka_$1_$2_$3_count 87 | type: COUNTER 88 | labels: 89 | '$4': '$5' 90 | - pattern: kafka.(\w+)<>(\d+)thPercentile 91 | name: kafka_$1_$2_$3 92 | type: GAUGE 93 | labels: 94 | '$4': '$5' 95 | quantile: '0.$6' 96 | - pattern: kafka.(\w+)<>Count 97 | name: kafka_$1_$2_$3_count 98 | type: COUNTER 99 | - pattern: kafka.(\w+)<>(\d+)thPercentile 100 | name: kafka_$1_$2_$3 101 | type: GAUGE 102 | labels: 103 | quantile: '0.$4' 104 | 105 | - pattern: '"kafka.consumer"<>(Count)' 106 | name: kafka_consumer_$1_$3_$4 107 | labels: 108 | thread: $2 109 | - pattern: '"kafka.consumer"<>(Count|Value)' 110 | name: kafka_consumer_$1_$5_$6 111 | labels: 112 | hostport: $3:$4 113 | thread: $2 114 | - name: kafka_consumer_$1_$7_$8 115 | pattern: '"kafka.consumer"<>(Count|Value)' 116 | labels: 117 | hostport: $3:$4 118 | partition: $6 119 | thread: $2 120 | topic: $5 121 | - pattern: '"kafka.server"<>Count' 122 | name: kafka_server_$1_$5_total 123 | labels: 124 | hostport: $3:$4 125 | thread: $2 126 | - pattern: '"kafka.server"<>(Count|Value)' 127 | name: kafka_server_$1_$7_$8 128 | labels: 129 | hostport: $3:$4 130 | partition: $6 131 | thread: $2 132 | topic: $5 133 | - name: kafka_cluster_$1_$4 134 | pattern: '"kafka.cluster"<>Value' 135 | labels: 136 | partition: $3 137 | topic: $2 138 | - pattern: '"kafka.log"<>Value' 139 | name: kafka_log_$3 140 | labels: 141 | partition: $2 142 | topic: $1 143 | - pattern: '"kafka.server"<>Count' 144 | name: kafka_server_$1_$3_total 145 | labels: 146 | topic: $2 147 | type: COUNTER 148 | - pattern: '"kafka.server"<>(Count|Value)' 149 | name: kafka_server_$1_$3_$4 150 | labels: 151 | topic: $2 152 | - pattern: '"kafka.network"<>Value' 153 | name: kafka_network_$1_$3 154 | labels: 155 | processor: $2 156 | - pattern: '"kafka.(\w+)"<>Count' 157 | name: kafka_$1_$2_$3_total 158 | type: COUNTER 159 | - pattern: '"kafka.(\w+)"<>(Count|Value)' 160 | name: kafka_$1_$2_$3_$4 161 | 162 | #kafka.connect:type=app-info,client-id="{clientid}" 163 | #kafka.consumer:type=app-info,client-id="{clientid}" 164 | #kafka.producer:type=app-info,client-id="{clientid}" 165 | - pattern: 'kafka.(.+)<>start-time-ms' 166 | name: kafka_$1_start_time_seconds 167 | labels: 168 | clientId: '$2' 169 | help: 'Kafka $1 JMX metric start time seconds' 170 | type: GAUGE 171 | valueFactor: 0.001 172 | - pattern: 'kafka.(.+)<>(commit-id|version): (.+)' 173 | name: kafka_$1_$3_info 174 | value: 1 175 | labels: 176 | clientId: '$2' 177 | $3: '$4' 178 | help: 'Kafka $1 JMX metric info version and commit-id' 179 | type: GAUGE 180 | 181 | #kafka.producer:type=producer-topic-metrics,client-id="{clientid}",topic="{topic}"", partition="{partition}" 182 | #kafka.consumer:type=consumer-fetch-manager-metrics,client-id="{clientid}",topic="{topic}"", partition="{partition}" 183 | - pattern: kafka.(.+)<>(.+-total|compression-rate|.+-avg|.+-replica|.+-lag|.+-lead) 184 | name: kafka_$2_$6 185 | labels: 186 | clientId: '$3' 187 | topic: '$4' 188 | partition: '$5' 189 | help: 'Kafka $1 JMX metric type $2' 190 | type: GAUGE 191 | 192 | #kafka.producer:type=producer-topic-metrics,client-id="{clientid}",topic="{topic}" 193 | #kafka.consumer:type=consumer-fetch-manager-metrics,client-id="{clientid}",topic="{topic}"", partition="{partition}" 194 | - pattern: kafka.(.+)<>(.+-total|compression-rate|.+-avg) 195 | name: kafka_$2_$5 196 | labels: 197 | clientId: '$3' 198 | topic: '$4' 199 | help: 'Kafka $1 JMX metric type $2' 200 | type: GAUGE 201 | 202 | # #kafka.connect:type=connect-node-metrics,client-id="{clientid}",node-id="{nodeid}" 203 | # #kafka.consumer:type=consumer-node-metrics,client-id=consumer-1,node-id="{nodeid}" 204 | # - pattern: kafka.(.+)<>(.+-total|.+-avg) 205 | # name: kafka_$2_$5 206 | # labels: 207 | # clientId: '$3' 208 | # nodeId: '$4' 209 | # help: 'Kafka $1 JMX metric type $2' 210 | # type: UNTYPED 211 | 212 | #kafka.connect:type=kafka-metrics-count,client-id="{clientid}" 213 | #kafka.consumer:type=consumer-fetch-manager-metrics,client-id="{clientid}" 214 | #kafka.consumer:type=consumer-coordinator-metrics,client-id="{clientid}" 215 | #kafka.consumer:type=consumer-metrics,client-id="{clientid}" 216 | - pattern: kafka.(.+)<>(.+-total|.+-avg|.+-bytes|.+-count|.+-ratio|.+-age|.+-flight|.+-threads|.+-connectors|.+-tasks|.+-ago) 217 | name: kafka_$2_$4 218 | labels: 219 | clientId: '$3' 220 | help: 'Kafka $1 JMX metric type $2' 221 | type: GAUGE 222 | 223 | #kafka.connect:type=connector-task-metrics,connector="{connector}",task="{task}<> status" 224 | - pattern: 'kafka.connect<>status: ([a-z-]+)' 225 | name: kafka_connect_connector_status 226 | value: 1 227 | labels: 228 | connector: '$1' 229 | task: '$2' 230 | status: '$3' 231 | help: 'Kafka Connect JMX Connector status' 232 | type: GAUGE 233 | 234 | #kafka.connect:type=task-error-metrics,connector="{connector}",task="{task}" 235 | #kafka.connect:type=source-task-metrics,connector="{connector}",task="{task}" 236 | #kafka.connect:type=sink-task-metrics,connector="{connector}",task="{task}" 237 | #kafka.connect:type=connector-task-metrics,connector="{connector}",task="{task}" 238 | - pattern: kafka.connect<>(.+-total|.+-count|.+-ms|.+-ratio|.+-avg|.+-failures|.+-requests|.+-timestamp|.+-logged|.+-errors|.+-retries|.+-skipped) 239 | name: kafka_connect_$1_$4 240 | labels: 241 | connector: '$2' 242 | task: '$3' 243 | help: 'Kafka Connect JMX metric type $1' 244 | type: GAUGE 245 | 246 | #kafka.connect:type=connector-metrics,connector="{connector}" 247 | #kafka.connect:type=connect-worker-metrics,connector="{connector}" 248 | - pattern: kafka.connect<>([a-z-]+) 249 | name: kafka_connect_worker_$2 250 | labels: 251 | connector: '$1' 252 | help: 'Kafka Connect JMX metric $1' 253 | type: GAUGE 254 | 255 | #kafka.connect:type=connect-worker-metrics 256 | - pattern: kafka.connect<>([a-z-]+) 257 | name: kafka_connect_worker_$1 258 | help: 'Kafka Connect JMX metric worker' 259 | type: GAUGE 260 | 261 | #kafka.connect:type=connect-worker-rebalance-metrics 262 | - pattern: kafka.connect<>([a-z-]+) 263 | name: kafka_connect_worker_rebalance_$1 264 | help: 'Kafka Connect JMX metric rebalance information' 265 | type: GAUGE 266 | - pattern: kafka.cluster<>Value 267 | name: kafka_cluster_$1_$2 268 | labels: 269 | topic: '$3' 270 | partition: '$4' 271 | - pattern: kafka.log<>Value 272 | name: kafka_log_$1 273 | labels: 274 | topic: '$2' 275 | partition: '$3' 276 | - pattern: kafka.controller<>(Count|Value) 277 | name: kafka_controller_$1_$2 278 | - pattern: kafka.network<>Value 279 | name: kafka_network_$1_$2 280 | - pattern: kafka.network<>Count 281 | name: kafka_network_$1_$2_total 282 | labels: 283 | request: '$3' 284 | - pattern: kafka.network<>Count 285 | name: kafka_network_$1_$2 286 | labels: 287 | request: '$3' 288 | type: COUNTER 289 | - pattern: kafka.network<>Count 290 | name: kafka_network_$1_$2 291 | labels: 292 | request: '$3' 293 | - pattern: kafka.network<>Count 294 | name: kafka_network_$1_$2 295 | - pattern: kafka.server<>Count 296 | name: kafka_server_$1_$2_total 297 | labels: 298 | topic: '$3' 299 | - pattern: kafka.server<>Count 300 | name: kafka_server_$1_$2_total 301 | type: COUNTER 302 | 303 | - pattern: kafka.server<>(Count|Value) 304 | name: kafka_server_$1_$2 305 | labels: 306 | clientId: '$3' 307 | topic: '$4' 308 | partition: '$5' 309 | - pattern: kafka.server<>(Count|Value) 310 | name: kafka_server_$1_$2 311 | labels: 312 | topic: '$3' 313 | partition: '$4' 314 | - pattern: kafka.server<>(Count|Value) 315 | name: kafka_server_$1_$2 316 | labels: 317 | topic: '$3' 318 | type: COUNTER 319 | 320 | - pattern: kafka.server<>(Count|Value) 321 | name: kafka_server_$1_$2 322 | labels: 323 | clientId: '$3' 324 | broker: '$4:$5' 325 | - pattern: kafka.server<>(Count|Value) 326 | name: kafka_server_$1_$2 327 | labels: 328 | clientId: '$3' 329 | - pattern: kafka.server<>(Count|Value) 330 | name: kafka_server_$1_$2 331 | 332 | - pattern: kafka.(\w+)<>Count 333 | name: kafka_$1_$2_$3_total 334 | - pattern: kafka.(\w+)<>Count 335 | name: kafka_$1_$2_$3_total 336 | labels: 337 | topic: '$4' 338 | type: COUNTER 339 | - pattern: kafka.(\w+)<>Count 340 | name: kafka_$1_$2_$3_total 341 | labels: 342 | topic: '$4' 343 | partition: '$5' 344 | type: COUNTER 345 | - pattern: kafka.(\w+)<>(Count|Value) 346 | name: kafka_$1_$2_$3_$4 347 | type: COUNTER 348 | - pattern: kafka.(\w+)<>(Count|Value) 349 | name: kafka_$1_$2_$3_$6 350 | labels: 351 | '$4': '$5' 352 | -------------------------------------------------------------------------------- /prometheus.yml: -------------------------------------------------------------------------------- 1 | global: 2 | evaluation_interval: 5s 3 | scrape_configs: 4 | - job_name: 'kafka' 5 | static_configs: 6 | - targets: ['kafka:7071'] 7 | # - job_name: 'zookeeper' 8 | # static_configs: 9 | # -targets: 10 | # -.....:2181 11 | -------------------------------------------------------------------------------- /server/auth/auth.controller.ts: -------------------------------------------------------------------------------- 1 | import { RequestHandler } from "express"; 2 | import { handleAsync } from "../common"; 3 | const bcrypt = require("bcrypt"); 4 | import fs from "fs"; 5 | import path from "path"; 6 | 7 | export class AuthController { 8 | static signup: RequestHandler = async (req, res, next) => { 9 | const { email, password } = req.body; 10 | let data; 11 | try { 12 | data = JSON.parse( 13 | fs.readFileSync(path.resolve(__dirname, "../users.json")).toString() 14 | ); 15 | if (data[email]) return next({ err: "email has already been used" }); 16 | } catch (e) { 17 | return next({ err: "error with searching db for email: " + e }); 18 | } 19 | const hashedPassword = await bcrypt.hash(password, 5); 20 | const cookie = await bcrypt.hash(email, 5); 21 | try { 22 | data[email] = { email, password: hashedPassword, cookie }; 23 | fs.writeFileSync( 24 | path.resolve(__dirname, "../users.json"), 25 | JSON.stringify(data) 26 | ); 27 | 28 | } catch (e) { 29 | return next({ err: "error with inserting into user collection: " + e }); 30 | } 31 | res.cookie("SSID", cookie); 32 | return next(); 33 | }; 34 | 35 | 36 | static login: RequestHandler = async (req, res, next) => { 37 | const { email, password } = req.body; 38 | try { 39 | const data = JSON.parse( 40 | fs.readFileSync(path.resolve(__dirname, "../users.json")).toString() 41 | ); 42 | 43 | const { password: hashedPassword, cookie } = data[email]; 44 | const passwordMatch = await bcrypt.compare(password, hashedPassword); 45 | if (passwordMatch) { 46 | res.cookie("SSID", cookie); 47 | 48 | return next(); 49 | } 50 | } catch (e) { 51 | return next({ err: "error with searching for user pass in db: " + e }); 52 | } 53 | }; 54 | 55 | static logout: RequestHandler = async (req, res, next) => { 56 | try { 57 | res.clearCookie("ssid"); 58 | return next(); 59 | } catch (e) { 60 | next({ 61 | log: "ERROR from AuthController.logout", 62 | message: { err: `Did not delete cookie properly ERROR: ${e}` }, 63 | }); 64 | } 65 | }; 66 | } 67 | -------------------------------------------------------------------------------- /server/auth/auth.routes.ts: -------------------------------------------------------------------------------- 1 | import { Application, Request, Response } from "express"; 2 | import { RouteConfig } from "../common/route.config"; 3 | import { AuthController } from "./auth.controller"; 4 | 5 | export class AuthRoutes extends RouteConfig { 6 | constructor(app: Application) { 7 | super(app, "AuthRoutes"); 8 | } 9 | 10 | routes() { 11 | /** 12 | * @POST api/signup 13 | * @desc sign up user 14 | */ 15 | this.app 16 | .route("/api/signup") 17 | .post([AuthController.signup], (req: Request, res: Response) => { 18 | res.status(200).json("successful signup"); 19 | }); 20 | 21 | /** 22 | * @POST api/login 23 | * @desc login user 24 | */ 25 | this.app 26 | .route("/api/login") 27 | .post([AuthController.login], (req: Request, res: Response) => { 28 | res.status(200).json("successful login"); 29 | }); 30 | 31 | this.app.post("/api/logout", AuthController.logout, (req, res) => { 32 | res.status(200).json("success"); 33 | }); 34 | 35 | return this.app; 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /server/common/handleAsync.ts: -------------------------------------------------------------------------------- 1 | export const handleAsync = async (promise: PromiseLike) => { 2 | try { 3 | const data = await promise; 4 | 5 | return [data, null]; 6 | } catch (e) { 7 | const error: any = { 8 | status: 500, 9 | message: e, 10 | }; 11 | 12 | console.error(e); 13 | return [null, error]; 14 | } 15 | }; 16 | -------------------------------------------------------------------------------- /server/common/index.ts: -------------------------------------------------------------------------------- 1 | export { logCreator } from './logCreator'; 2 | export { handleAsync } from './handleAsync'; 3 | export { mockData } from './mockData'; 4 | -------------------------------------------------------------------------------- /server/common/logCreator.ts: -------------------------------------------------------------------------------- 1 | import { format } from 'winston'; 2 | const winston = require('winston'); 3 | 4 | interface IProps { 5 | namespace: string; 6 | log: any; 7 | } 8 | 9 | const { createLogger, transports } = winston; 10 | const { combine, json, metadata, timestamp } = format; 11 | 12 | export const logCreator = () => { 13 | const logger = createLogger({ 14 | level: 'error', 15 | format: combine( 16 | timestamp({ format: 'YYY-MM-DD hh:mm:ss' }), 17 | json(), 18 | metadata() 19 | ), 20 | transports: [ 21 | new transports.Console(), 22 | new transports.File({ filename: 'error.log' }), 23 | ], 24 | }); 25 | 26 | return ({ 27 | namespace, 28 | log: { message, broker, clientId, error, groupId }, 29 | }: IProps) => { 30 | logger.log({ 31 | level: 'error', 32 | namespace, 33 | message, 34 | error, 35 | clientId, 36 | broker, 37 | groupId, 38 | }); 39 | }; 40 | }; 41 | -------------------------------------------------------------------------------- /server/common/route.config.ts: -------------------------------------------------------------------------------- 1 | import { Application } from 'express'; 2 | 3 | export abstract class RouteConfig { 4 | app: Application; 5 | name: string; 6 | 7 | constructor(app: Application, name: string) { 8 | this.app = app; 9 | this.name = name; 10 | this.routes(); 11 | } 12 | 13 | routeName() { 14 | return this.name; 15 | } 16 | 17 | abstract routes(): Application; 18 | } 19 | -------------------------------------------------------------------------------- /server/index.ts: -------------------------------------------------------------------------------- 1 | import express, { Request, Response, ErrorRequestHandler } from 'express'; 2 | const path = require('path'); 3 | import * as http from 'http'; 4 | import WebSocket, { Server } from 'ws'; 5 | import dotenv from 'dotenv'; 6 | import cors from 'cors'; 7 | 8 | import { RouteConfig } from './common/route.config'; 9 | import { AuthRoutes } from './auth/auth.routes'; 10 | import { OAuthRoutes } from './oauth/oauth.routes'; 11 | import { GroupRoutes } from './kafka/group/group.routes'; 12 | import { JMXRoutes } from './jmx/jmx.routes'; 13 | import { KafkaRoutes } from './kafka/kafka/kafka.routes'; 14 | import { LogRoutes } from './log/log.routes'; 15 | import { TopicRoutes } from './kafka/topic/topic.routes'; 16 | 17 | dotenv.config(); 18 | 19 | // initialize configuration 20 | const app = express(); 21 | const PORT = process.env.PORT || 3000; 22 | export const server = http.createServer(app); 23 | const wss = new Server({ server }); 24 | 25 | // start DB 26 | // new DB(); 27 | 28 | // middlewares 29 | app.use(cors()); 30 | app.use(express.json()); 31 | app.use(express.urlencoded({ extended: true })); 32 | 33 | // routes 34 | const routes: Array = []; 35 | routes.push(new AuthRoutes(app)); 36 | routes.push(new OAuthRoutes(app)); 37 | routes.push(new GroupRoutes(app)); 38 | routes.push(new JMXRoutes(app)); 39 | routes.push(new KafkaRoutes(app)); 40 | routes.push(new LogRoutes(app)); 41 | routes.push(new TopicRoutes(app)); 42 | 43 | // server index html 44 | app.get('/partition', (req, res) => { 45 | return res 46 | .status(200) 47 | .sendFile(path.resolve(__dirname, '../client/src/index.html')); 48 | }); 49 | 50 | // 404 51 | app.use('*', (req: Request, res: Response) => { 52 | return res.status(404).send('Invalid Route'); 53 | }); 54 | 55 | // global error handler 56 | app.use(((err, req, res, next) => { 57 | const defaultErr = { 58 | status: 500, 59 | message: 'Error: Middleware error at global error handler', 60 | }; 61 | const errorObj = Object.assign({}, defaultErr, err); 62 | return res.status(errorObj.status).json(errorObj.message); 63 | }) as ErrorRequestHandler); 64 | 65 | // server 66 | server.listen(PORT, () => { 67 | console.log(`Server on port ${PORT}`); 68 | app.locals.server = server; //! 69 | 70 | routes.forEach((route: RouteConfig) => { 71 | console.log(`Route configured: ${route.routeName()}`); 72 | }); 73 | }); 74 | 75 | // websocket server 76 | wss.once('connection', (ws: WebSocket) => { 77 | app.locals.ws = ws; 78 | console.log('ws connected'); 79 | 80 | ws.on('close', () => console.log('ws disconnected')); 81 | }); 82 | -------------------------------------------------------------------------------- /server/jmx/MBeans.ts: -------------------------------------------------------------------------------- 1 | export enum MBeans { 2 | underReplicatedPartitions = 'kafka_server_replicamanager_underreplicatedpartitions', 3 | isrShrinksPerSec = 'kafka_server_replicamanager_isrshrinks_total', 4 | activeControllerCount = 'kafka_controller_kafkacontroller_activecontrollercount', 5 | offlinePartitionsCount = 'kafka_controller_kafkacontroller_offlinepartitionscount', 6 | leaderElectionRateAndTimeMs = 'kafka_controller_controllerstats_leaderelectionrateandtimems', 7 | uncleanLeaderElectionsPerSec = 'kafka_controller_controllerstats_uncleanleaderelectionenablerateandtimems', 8 | totalTimeMs = 'kafka_network_requestmetrics_totaltimems', 9 | purgatorySize = 'kafka_server_delayedoperationpurgatory_purgatorysize', 10 | bytesInTotal = 'kafka_server_brokertopicmetrics_bytesin_total', 11 | bytesOutTotal = 'kafka_server_brokertopicmetrics_bytesout_total', 12 | requestsPerSecond = 'kafka_network_requestmetrics_requests_total', 13 | messagesPerSecond = 'kafka_server_brokertopicmetrics_messagesin_total', 14 | 15 | garbageCollectionCount = 'jvm_gc_collection_seconds_count', 16 | garbageCollectionTime = 'jvm_gc_collection_seconds_sum', 17 | 18 | zkRequestLatencyMs = 'kafka_server_zookeeperclientmetrics_zookeeperrequestlatencyms', 19 | zkRequestLatencyMsCount = 'kafka_server_zookeeperclientmetrics_zookeeperrequestlatencyms_count', 20 | 21 | memoryBytesUsed = 'jvm_memory_bytes_used', 22 | cpuUsage = 'process_cpu_seconds_total', 23 | memoryCacheuser = 'jvm_memory_pool_bytes_used', 24 | } 25 | -------------------------------------------------------------------------------- /server/jmx/host.metrics.controller.ts: -------------------------------------------------------------------------------- 1 | import fetch from 'node-fetch'; 2 | import { RequestHandler } from 'express'; 3 | import { handleAsync } from '../common'; 4 | import { MBeans } from './MBeans'; 5 | 6 | const url = 'http://localhost:9090/api/v1/query?query=' 7 | 8 | export class HostMetricsControllerr { 9 | 10 | /** 11 | * @JMXAttribute Memory bytes used 12 | * @MBean jvm_memory_bytes_used 13 | * @desc Used bytes of a given JVM memory area 14 | * @type Gauge 15 | */ 16 | 17 | 18 | /** 19 | * @JMXAttribute CPU Usage 20 | * @MBean process_cpu_seconds_total 21 | * @desc total cpu usage 22 | * @type Gauge 23 | */ 24 | 25 | /** 26 | * @JMXAttribute Memory cache 27 | * @MBean jvm_memory_pool_bytes_used {Code Cache} 28 | * @desc Used bytes of a given JVM memory pool 29 | * @type Gauge 30 | */ 31 | } 32 | -------------------------------------------------------------------------------- /server/jmx/jmx.routes.ts: -------------------------------------------------------------------------------- 1 | import { Application } from 'express'; 2 | import { RouteConfig } from '../common/route.config'; 3 | 4 | export class JMXRoutes extends RouteConfig { 5 | constructor(app: Application) { 6 | super(app, 'JMXRoutes'); 7 | } 8 | 9 | routes() { 10 | return this.app; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /server/jmx/jvm.metrics.controller.ts: -------------------------------------------------------------------------------- 1 | import fetch from 'node-fetch'; 2 | import { RequestHandler } from 'express'; 3 | 4 | import { handleAsync } from '../common'; 5 | import { MBeans } from './MBeans'; 6 | 7 | const url = process.env.PROMETHEUS; 8 | 9 | export class JVMMetricsController { 10 | /** 11 | * @JMXAttribute CollectionCount 12 | * @MBean java.lang:type=GarbageCollector,name=G1 (Young|Old) Generation 13 | * @desc The total count of young or old garbage collection processes executed by the JVM 14 | * @type Other 15 | */ 16 | static garbageCollectionCount: RequestHandler = async (req, res, next) => { 17 | const MBean = MBeans.garbageCollectionCount; 18 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 19 | const data = await response.json(); 20 | 21 | if (error) return next(error); 22 | const old: any[] = []; 23 | const young: any[] = []; 24 | 25 | data.data.result.forEach((data: any) => { 26 | data.metric.gc === 'G1 Old Generation' ? old.push(data) : null; 27 | data.metric.gc === 'G1 Young Generation' ? young.push(data) : null; 28 | }); 29 | 30 | res.locals.garbageCollectionCount = { old, young }; 31 | 32 | return next(); 33 | }; 34 | 35 | /** 36 | * @JMXAttribute CollectionTime 37 | * @MBean java.lang:type=GarbageCollector,name=G1 (Young|Old) Generation 38 | * @desc The total amount of time (in milliseconds) the JVM has spent executing young or old garbage collection processes 39 | * @type Other 40 | */ 41 | static garbageCollectionTime: RequestHandler = async (req, res, next) => { 42 | const MBean = MBeans.garbageCollectionTime; 43 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 44 | const data = await response.json(); 45 | 46 | if (error) return next(error); 47 | const old: any[] = []; 48 | const young: any[] = []; 49 | 50 | data.data.result.forEach((data: any) => { 51 | data.metric.gc === 'G1 Old Generation' ? old.push(data) : null; 52 | data.metric.gc === 'G1 Young Generation' ? young.push(data) : null; 53 | }); 54 | 55 | res.locals.garbageCollectionTime = { old, young }; 56 | 57 | return next(); 58 | }; 59 | } 60 | -------------------------------------------------------------------------------- /server/jmx/kafka.metrics.controller.ts: -------------------------------------------------------------------------------- 1 | import fetch from 'node-fetch'; 2 | import { RequestHandler } from 'express'; 3 | 4 | import { handleAsync } from '../common'; 5 | import { MBeans } from './MBeans'; 6 | 7 | const url = 'http://localhost:9090/api/v1/query?query='; 8 | 9 | export class KafkaMetricsController { 10 | /** 11 | * @name UnderReplicatedPartitions 12 | * @MBean name kafka.server: type = ReplicaManager, name = UnderReplicatedPartitions 13 | * @desc Number of unreplicated partitions 14 | * @metricType Gauge 15 | */ 16 | static underReplicatedPartitions: RequestHandler = async (req, res, next) => { 17 | const MBean = MBeans.underReplicatedPartitions; 18 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 19 | const data = await response.json(); // CHECK type - changed handle async 20 | 21 | if (error) return next(error); 22 | res.locals.underReplicatedPartitions = data.data.result[0].value; 23 | 24 | return next(); 25 | }; 26 | 27 | /** 28 | * @name IsrShrinksPerSec/IsrExpandsPerSec 29 | * @MBean kafka.server:type=ReplicaManager,name=IsrShrinksPerSec 30 | * @MBean kafka.server:type=ReplicaManager,name=IsrExpandsPerSec 31 | * @desc Rate at which the pool of in-sync replicas (ISRs) shrinks/expands 32 | * @metricType Counter 33 | */ 34 | // CHECK only shows total: find shrink/expand 35 | static isrShrinksPerSec: RequestHandler = async (req, res, next) => { 36 | const MBean = MBeans.isrShrinksPerSec; 37 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 38 | const data = await response.json(); 39 | 40 | if (error) return next(error); 41 | res.locals.isrShrinksPerSec = data.data.result; 42 | 43 | return next(); 44 | }; 45 | 46 | /** 47 | * @name ActiveControllerCount 48 | * @MBean kafka.controller:type=KafkaController,name=ActiveControllerCount 49 | * @desc Number of active controllers in cluster 50 | * @metricType Gauge 51 | */ 52 | static activeControllerCount: RequestHandler = async (req, res, next) => { 53 | const MBean = MBeans.activeControllerCount; 54 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 55 | const data = await response.json(); 56 | 57 | if (error) return next(error); 58 | res.locals.activeControllerCount = data.data.result; 59 | 60 | return next(); 61 | }; 62 | 63 | /** 64 | * @name OfflinePartitionsCount 65 | * @MBean kafka.controller:type=KafkaController,name=OfflinePartitionsCount 66 | * @desc Number of offline partitions 67 | * @metricType Gauge 68 | */ 69 | static offlinePartitionsCount: RequestHandler = async (req, res, next) => { 70 | const MBean = MBeans.offlinePartitionsCount; 71 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 72 | const data = await response.json(); 73 | 74 | if (error) return next(error); 75 | res.locals.offlinePartitionsCount = data.data.result; 76 | 77 | return next(); 78 | }; 79 | 80 | /** 81 | * @name LeaderElectionRateAndTimeMs 82 | * @MBean kafka.controller:type=ControllerStats,name=LeaderElectionRateAndTimeMs 83 | * @desc Leader election rate and latency 84 | * @metricType Gauge 85 | */ 86 | static leaderElectionRateAndTimeMs: RequestHandler = async ( 87 | req, 88 | res, 89 | next 90 | ) => { 91 | const MBean = MBeans.leaderElectionRateAndTimeMs; 92 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 93 | const data = await response.json(); 94 | 95 | if (error) return next(error); 96 | res.locals.leaderElectionRateAndTimeMs = data.data.result; 97 | 98 | return next(); 99 | }; 100 | 101 | /** 102 | * @name UncleanLeaderElectionsPerSec 103 | * @MBean kafka.controller:type=ControllerStats,name=UncleanLeaderElectionsPerSec 104 | * @desc Number of “unclean” elections per second 105 | * @metricType Gauge 106 | */ 107 | static uncleanLeaderElectionsPerSec: RequestHandler = async ( 108 | req, 109 | res, 110 | next 111 | ) => { 112 | const MBean = MBeans.uncleanLeaderElectionsPerSec; 113 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 114 | const data = await response.json(); 115 | 116 | if (error) return next(error); 117 | res.locals.uncleanLeaderElectionsPerSec = data.data.result; 118 | 119 | return next(); 120 | }; 121 | 122 | /** 123 | * @name TotalTimeMs 124 | * @MBean kafka.network:type=RequestMetrics,name=TotalTimeMs,request={Produce|FetchConsumer|FetchFollower} 125 | * @desc Total time (in ms) to serve the specified request (Produce/Fetch) 126 | * @metricType Gauge 127 | */ 128 | static totalTimeMs: RequestHandler = async (req, res, next) => { 129 | const MBean = MBeans.totalTimeMs; 130 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 131 | const data = await response.json(); 132 | 133 | if (error) return next(error); 134 | const fetchConsumer: any[] = []; 135 | const fetchFollower: any[] = []; 136 | const produce: any[] = []; 137 | 138 | data.data.result.forEach((data: any) => { 139 | data.metric.request === 'FetchConsumer' ? fetchConsumer.push(data) : null; 140 | data.metric.request === 'FetchFollower' ? fetchFollower.push(data) : null; 141 | data.metric.request === 'Produce' ? produce.push(data) : null; 142 | }); 143 | 144 | res.locals.totalTimeMs = { fetchConsumer, fetchFollower, produce }; 145 | 146 | console.log(fetchConsumer); 147 | console.log(fetchFollower); 148 | console.log(produce); 149 | 150 | return next(); 151 | }; 152 | 153 | /** 154 | * @name PurgatorySize 155 | * @MBean kafka.server:type=DelayedOperationPurgatory,name=PurgatorySize,delayedOperation={Produce|Fetch} 156 | * @desc Number of requests waiting in producer purgatory/Number of requests waiting in fetch purgatory 157 | * @metricType Gauge 158 | */ 159 | static purgatorySize: RequestHandler = async (req, res, next) => { 160 | const MBean = MBeans.purgatorySize; 161 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 162 | const data = await response.json(); 163 | 164 | if (error) return next(error); 165 | const Fetch: any[] = []; 166 | const produce: any[] = []; 167 | 168 | data.data.result.forEach((data: any) => { 169 | data.metric.delayedOperation === 'Fetch' ? Fetch.push(data) : null; 170 | data.metric.delayedOperation === 'Produce' ? produce.push(data) : null; 171 | }); 172 | 173 | res.locals.purgatorySize = { fetch: Fetch, produce }; 174 | 175 | return next(); 176 | }; 177 | 178 | /** 179 | * @name BytesInPerSec/BytesOutPerSec 180 | * @MBean kafka.server:type=BrokerTopicMetrics,name={BytesInPerSec|BytesOutPerSec} 181 | * @desc Aggregate incoming/outgoing byte rate 182 | * @metricType Counter 183 | */ 184 | static bytesPerSec: RequestHandler = async (req, res, next) => { 185 | const MBeanIn = MBeans.bytesInTotal; 186 | const MBeanOut = MBeans.bytesOutTotal; 187 | 188 | const [bytesIn, inError] = await handleAsync(fetch(`${url}${MBeanIn}`)); 189 | const [bytesOut, outError] = await handleAsync(fetch(`${url}${MBeanOut}`)); 190 | 191 | const bytesInTotal = await bytesIn.json(); 192 | const bytesOutTotal = await bytesOut.json(); 193 | 194 | if (inError) return next(inError); 195 | if (outError) return next(outError); 196 | 197 | res.locals.bytesPerTotal = { bytesInTotal, bytesOutTotal }; 198 | 199 | return next(); 200 | }; 201 | 202 | /** 203 | * @name RequestsPerSecond 204 | * @MBean kafka.network:type=RequestMetrics,name=RequestsPerSec,request={Produce|FetchConsumer|FetchFollower},version={0|1|2|3|…} 205 | * @desc Number of (producer|consumer|follower) requests per second 206 | * @metricType Counter 207 | */ 208 | static requestsPerSecond: RequestHandler = async (req, res, next) => { 209 | const MBean = MBeans.requestsPerSecond; 210 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 211 | const data = await response.json(); 212 | 213 | if (error) return next(error); 214 | 215 | const fetchConsumer: any[] = []; 216 | const fetchFollower: any[] = []; 217 | const produce: any[] = []; 218 | 219 | data.data.result.forEach((data: any) => { 220 | data.metric.delayedOperation === 'FetchConsumer' 221 | ? fetchConsumer.push(data) 222 | : null; 223 | data.metric.delayedOperation === 'FetchFollower' 224 | ? fetchFollower.push(data) 225 | : null; 226 | data.metric.delayedOperation === 'Produce' ? produce.push(data) : null; 227 | }); 228 | 229 | res.locals.requestsPerSecond = { fetchConsumer, fetchFollower, produce }; 230 | 231 | // console.log(res.locals.fetchConsumer); 232 | // console.log(res.locals.fetchFollower); 233 | // console.log(res.locals.produce); 234 | 235 | return next(); 236 | }; 237 | 238 | /** 239 | * @name MessagesPerSecond 240 | * @MBean kafka.server,type=BrokerTopicMetrics,name=MessageInPerSec 241 | * @desc Number of message requests per second 242 | * @metricType Counter 243 | */ 244 | static messagesPerSecond: RequestHandler = async (req, res, next) => { 245 | const MBean = MBeans.messagesPerSecond; 246 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 247 | const data = await response.json(); 248 | 249 | if (error) return next(error); 250 | res.locals.requestsPerSecond = data.data.result; 251 | 252 | return next(); 253 | }; 254 | } 255 | -------------------------------------------------------------------------------- /server/jmx/zookeeper.metrics.controller.ts: -------------------------------------------------------------------------------- 1 | import fetch from 'node-fetch'; 2 | import { RequestHandler } from 'express'; 3 | import { handleAsync } from '../common'; 4 | import { MBeans } from './MBeans'; 5 | 6 | const url = 'http://localhost:9090/api/v1/query?query='; 7 | 8 | export class ZookeeperMetricsController { 9 | /** 10 | * @name zkRequestLatencyMs 11 | * @MBean name kafka.server: type = ZooKeeperClientMetrics, name = ZooKeeperRequestLatencyMs 12 | * @desc zookeeper request latency ms 13 | * @metricType Gauge 14 | */ 15 | static zkRequestLatencyMs: RequestHandler = async (req, res, next) => { 16 | const ws: WebSocket = req.app.locals.ws; 17 | const MBean = MBeans.zkRequestLatencyMs; 18 | 19 | setInterval(async () => { 20 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 21 | const data = await response.json(); 22 | 23 | if (error) return next(error); 24 | res.locals.zkRequestLatencyMs = data.data.result; 25 | ws.send(data.data.result); 26 | }, 5000); 27 | 28 | return next(); 29 | }; 30 | 31 | /** 32 | * @name zkRequestLatencyMsCount 33 | * @MBean name kafka.server: type = ZooKeeperClientMetrics, name = ZooKeeperRequestLatencyMs 34 | * @desc zookeeper request latency ms count 35 | * @metricType Gauge 36 | */ 37 | static zkRequestLatencyMsCount: RequestHandler = async (req, res, next) => { 38 | const MBean = MBeans.zkRequestLatencyMsCount; 39 | const [response, error] = await handleAsync(fetch(`${url}${MBean}`)); 40 | const data = await response.json(); 41 | 42 | if (error) return next(error); 43 | res.locals.zkRequestLatencyMsCount = data.data.result; 44 | 45 | return next(); 46 | }; 47 | } 48 | -------------------------------------------------------------------------------- /server/kafka/group/group.controller.ts: -------------------------------------------------------------------------------- 1 | import { RequestHandler } from 'express'; 2 | import { Admin } from 'kafkajs'; 3 | 4 | import { handleAsync } from '../../common'; 5 | 6 | export class GroupController { 7 | /** 8 | * @desc return the consumer group offset for a topic 9 | * @param {string} topic 10 | * @returns {[]{}} 11 | */ 12 | static groupOffsets: RequestHandler = async (req, res, next) => { 13 | const admin: Admin = req.app.locals.admin; 14 | const { groupId, topic } = req.body; 15 | const [offsets, error] = await handleAsync( 16 | admin.fetchOffsets({ groupId, topic, resolveOffsets: true }) 17 | ); 18 | 19 | if (error) return next(error); 20 | res.locals.offsets = offsets; 21 | 22 | return next(); 23 | }; 24 | 25 | /** 26 | * @desc resets the consumer group offsets to the earliest or latest offset 27 | * @param {string} groupId 28 | * @param {string} topic 29 | * @param {boolean} earliest 30 | */ 31 | static resetGroupOffsets: RequestHandler = async (req, res, next) => { 32 | const admin: Admin = req.app.locals.admin; 33 | const { groupId, topic, earliest } = req.body; 34 | const [, error] = await handleAsync( 35 | admin.resetOffsets({ groupId, topic, earliest }) 36 | ); 37 | 38 | if (error) return next(error); 39 | 40 | return next(); 41 | }; 42 | 43 | /** 44 | * @desc set the consumer group offset to any value 45 | * @param {string} groupId 46 | * @param {string} topic 47 | * @param {[]{}} partitions 48 | * @param {number} partition 49 | * @param {string} offset 50 | */ 51 | static setGroupOffsets: RequestHandler = async (req, res, next) => { 52 | const admin: Admin = req.app.locals.admin; 53 | const { groupId, topic, partition, offset } = req.body; 54 | const [, error] = await handleAsync( 55 | admin.setOffsets({ groupId, topic, partitions: [{ partition, offset }] }) 56 | ); 57 | 58 | if (error) return next(error); 59 | 60 | return next(); 61 | }; 62 | 63 | /** 64 | * @desc resets the consumer group offset to the earliest or lastest offset (latest by default) 65 | */ 66 | 67 | static resetGroupOffsetsByTimestamp: RequestHandler = async ( 68 | req, 69 | res, 70 | next 71 | ) => {}; 72 | 73 | /** 74 | * @desc list groups available on the broker 75 | * @returns {[]{}} 76 | */ 77 | static listGroups: RequestHandler = async (req, res, next) => { 78 | const admin: Admin = req.app.locals.admin; 79 | const [groups, error] = await handleAsync(admin.listGroups()); 80 | 81 | if (error) return next(error); 82 | res.locals.groups = groups; 83 | 84 | return next(); 85 | }; 86 | 87 | /** 88 | * @desc describe consumer groups by groupIds 89 | * @param {string} groupId 90 | * @returns {[]{}} 91 | */ 92 | static describeGroups: RequestHandler = async (req, res, next) => { 93 | const admin: Admin = req.app.locals.admin; 94 | const { groupId } = req.body; 95 | const [groups, error] = await handleAsync(admin.describeGroups([groupId])); 96 | 97 | if (error) return next(error); 98 | res.locals.groups = groups; 99 | 100 | return next(); 101 | }; 102 | 103 | /** 104 | * @desc delete groups by groupId 105 | * @param {string} groupId 106 | */ 107 | static deleteGroups: RequestHandler = async (req, res, next) => { 108 | const admin: Admin = req.app.locals.admin; 109 | const { groupId } = req.body; 110 | const [, error] = await handleAsync(admin.deleteGroups([groupId])); 111 | 112 | if (error) return next(error); 113 | 114 | return next(); 115 | }; 116 | } 117 | -------------------------------------------------------------------------------- /server/kafka/group/group.routes.ts: -------------------------------------------------------------------------------- 1 | import { Application, Request, Response } from 'express'; 2 | 3 | import { RouteConfig } from '../../common/route.config'; 4 | import { GroupController } from './group.controller'; 5 | 6 | export class GroupRoutes extends RouteConfig { 7 | constructor(app: Application) { 8 | super(app, 'GroupRoutes'); 9 | } 10 | 11 | routes() { 12 | /** 13 | * @GET api/grouplist 14 | * @desc get list of groups 15 | */ 16 | this.app 17 | .route('/api/grouplist') 18 | .get([GroupController.listGroups], (req: Request, res: Response) => { 19 | const { groups } = res.locals; 20 | return res.status(200).json({ groups }); 21 | }); 22 | 23 | /** 24 | * @GET api/groups 25 | * @desc get group description 26 | */ 27 | this.app 28 | .route('/api/groups') 29 | .get([GroupController.describeGroups], (req: Request, res: Response) => { 30 | const { groups } = res.locals; 31 | return res.status(200).json({ groups }); 32 | }); 33 | 34 | /** 35 | * @DELETE api/groups 36 | * @desc deletes a group 37 | */ 38 | this.app 39 | .route('/api/groups') 40 | .delete([GroupController.deleteGroups], (req: Request, res: Response) => { 41 | return res.sendStatus(200); 42 | }); 43 | 44 | /** 45 | * @GET api/groupoffsets 46 | * @desc get offsets of a group 47 | */ 48 | this.app 49 | .route('/api/groupoffsets') 50 | .get([GroupController.groupOffsets], (req: Request, res: Response) => { 51 | const { offsets } = res.locals; 52 | return res.status(200).json({ offsets }); 53 | }); 54 | 55 | /** 56 | * @PUT api/groupoffsets 57 | * @desc reset group offsets 58 | */ 59 | this.app 60 | .route('/api/groupoffsets') 61 | .put( 62 | [GroupController.resetGroupOffsets], 63 | (req: Request, res: Response) => { 64 | res.sendStatus(200); 65 | } 66 | ); 67 | 68 | return this.app; 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /server/kafka/kafka/consumer.controller.ts: -------------------------------------------------------------------------------- 1 | import { RequestHandler } from 'express'; 2 | import { Consumer } from 'kafkajs'; 3 | import * as WebSocket from 'ws'; 4 | 5 | import { handleAsync } from '../../common'; 6 | 7 | export class ConsumerController { 8 | /** 9 | * @desc starts a consumer for given topic and groupId 10 | */ 11 | static startConsumer: RequestHandler = async (req, res, next) => { 12 | const { topic, groupId } = req.body; 13 | const ws: WebSocket = req.app.locals.ws; 14 | const consumer: Consumer = req.app.locals.kafka.consumer({ groupId }); 15 | const store = req.app.locals.consumers; 16 | 17 | !store[topic] ? (store[topic] = {}) : (store[topic][groupId] = consumer); 18 | 19 | const [, connectErr] = await handleAsync(consumer.connect()); 20 | const [, subscribeErr] = await handleAsync( 21 | consumer.subscribe({ topic, fromBeginning: true }) 22 | ); 23 | 24 | if (connectErr) return connectErr; 25 | if (subscribeErr) return subscribeErr; 26 | 27 | await consumer.run({ 28 | partitionsConsumedConcurrently: 1, 29 | eachMessage: async ({ 30 | topic, 31 | partition, 32 | message: { timestamp, value }, 33 | }) => { 34 | const messageFormat = `timestamp: ${timestamp} topic: ${topic} partition: ${partition} message: ${value}`; 35 | console.log('message consumed'); 36 | ws.send(messageFormat); 37 | }, 38 | }); 39 | 40 | return next(); 41 | }; 42 | 43 | /** 44 | * @desc stops consumer by given topic name and groupId 45 | */ 46 | static stopConsumer: RequestHandler = async (req, res, next) => { 47 | const { topic, groupId } = req.body; 48 | const consumer = req.app.locals.conumers[topic][groupId]; 49 | 50 | if (!consumer) return next(new Error('consumer does not exist')); 51 | 52 | const [, error] = await handleAsync(consumer.disconnect()); 53 | if (error) return next(error); 54 | 55 | delete req.app.locals.consumer[topic][groupId]; 56 | 57 | return next(); 58 | }; 59 | } 60 | -------------------------------------------------------------------------------- /server/kafka/kafka/kafka.controller.ts: -------------------------------------------------------------------------------- 1 | import { RequestHandler } from 'express'; 2 | import { Admin, Kafka, logLevel } from 'kafkajs'; 3 | import { exec } from 'child_process'; 4 | 5 | import { LogController } from '../../log/log.controller'; 6 | import { handleAsync } from '../../common'; 7 | 8 | export class KafkaController { 9 | /** 10 | * @desc starts an instance of kafka 11 | */ 12 | static kafka: RequestHandler = async (req, res, next) => { 13 | const PORT: number = req.body.PORT; 14 | const kafka = new Kafka({ 15 | clientId: 'kafkafix', 16 | brokers: [`localhost:${PORT}`], 17 | logLevel: logLevel.ERROR, 18 | logCreator: LogController.logCreator, 19 | }); 20 | 21 | req.app.locals.kafka = kafka; 22 | req.app.locals.consumers = {}; 23 | req.app.locals.producers = {}; 24 | 25 | return next(); 26 | }; 27 | 28 | /** 29 | * @desc starts an instance of admin 30 | */ 31 | static startAdmin: RequestHandler = async (req, res, next) => { 32 | const kafka: Kafka = req.app.locals.kafka; 33 | const admin = kafka.admin(); 34 | const [, error] = await handleAsync(admin.connect()); 35 | 36 | if (error) return next(error); 37 | req.app.locals.admin = admin; 38 | 39 | return next(); 40 | }; 41 | 42 | /** 43 | * @desc disconnects admin 44 | */ 45 | static disconnectAdmin: RequestHandler = async (req, res, next) => { 46 | const admin: Admin = req.app.locals.admin; 47 | const [, error] = await handleAsync(admin.disconnect()); 48 | 49 | if (error) return next(error); 50 | 51 | return next(); 52 | }; 53 | 54 | /** 55 | * Images must be on the computer 56 | * @desc starts all containers 57 | */ 58 | static composeUp: RequestHandler = async (req, res, next) => { 59 | const { filePath } = req.body; 60 | const folderPath = filePath.slice(0, filePath.lastIndexOf('\\')); 61 | const cwd = (req.app.locals.path = folderPath); 62 | exec(`docker compose up`, { cwd }); 63 | 64 | return next(); 65 | }; 66 | 67 | /** 68 | * @desc stops all containers 69 | */ 70 | static composeDown: RequestHandler = async (req, res, next) => { 71 | const cwd = req.app.locals.path; 72 | exec(`docker compose down`, { cwd }); 73 | 74 | return next(); 75 | }; 76 | 77 | /** 78 | * @desc get information about the broker cluster 79 | * @returns {{}} 80 | */ 81 | static describeCluster: RequestHandler = async (req, res, next) => { 82 | const admin: Admin = req.app.locals.admin; 83 | const [cluster, error] = await handleAsync(admin.describeCluster()); 84 | 85 | if (error) return next(error); 86 | res.locals.cluster = cluster; 87 | 88 | return next(); 89 | }; 90 | } 91 | -------------------------------------------------------------------------------- /server/kafka/kafka/kafka.routes.ts: -------------------------------------------------------------------------------- 1 | import { Application, Request, Response } from 'express'; 2 | 3 | import { RouteConfig } from '../../common/route.config'; 4 | import { KafkaController } from './kafka.controller'; 5 | import { TopicController } from '../topic/topic.controller'; 6 | import ProducerController from './producer.controller'; 7 | import { ConsumerController } from './consumer.controller'; 8 | import { GroupController } from '../group/group.controller'; 9 | 10 | export class KafkaRoutes extends RouteConfig { 11 | constructor(app: Application) { 12 | super(app, 'KafkaRoutes'); 13 | } 14 | 15 | routes() { 16 | /** 17 | * @POST Initialize containers 18 | * @desc Initialize Docker containers from docker-compose file 19 | */ 20 | this.app 21 | .route('/api/composeup') 22 | .post([KafkaController.composeUp], (req: Request, res: Response) => { 23 | return res.status(200); 24 | }); 25 | 26 | /** 27 | * @POST Stop containers 28 | * @desc Stop Docker containers from docker-compose file 29 | */ 30 | this.app 31 | .route('/api/connect') 32 | .put([KafkaController.composeDown], (req: Request, res: Response) => { 33 | return res.status(200); 34 | }); 35 | 36 | /** 37 | * @POST Initialize Kafka 38 | * @desc sends cluster info and all metadata 39 | */ 40 | this.app.route('/api/connect').post([ 41 | KafkaController.kafka, 42 | KafkaController.startAdmin, 43 | KafkaController.describeCluster, 44 | TopicController.getAllTopicMetadata, 45 | GroupController.listGroups, 46 | (req: Request, res: Response) => { 47 | const { cluster, metadata, groups } = res.locals; 48 | return res.status(200).json({ cluster, metadata, groups }); 49 | }, 50 | ]); 51 | 52 | /** 53 | * @PUT Disconnects Kafka 54 | * @desc disconnects admin kafka instance 55 | */ 56 | this.app.route('/api/connect').put([ 57 | KafkaController.disconnectAdmin, 58 | (req: Request, res: Response) => { 59 | console.log('disconnect admin'); 60 | return res.status(200); 61 | }, 62 | ]); 63 | 64 | /** 65 | * @POST Initialize producer 66 | * @desc Initialize an instance of producer 67 | */ 68 | 69 | this.app 70 | .route('/api/producer') 71 | .post( 72 | [ProducerController.startProducer], 73 | (req: Request, res: Response) => { 74 | return res.status(200); 75 | } 76 | ); 77 | 78 | /** 79 | * @PUT Stops producer 80 | * @desc Stops given producer by topic 81 | */ 82 | this.app 83 | .route('/api/producer') 84 | .put([ProducerController.stopProducer], (req: Request, res: Response) => { 85 | console.log('disconnect producer'); 86 | return res.status(200); 87 | }); 88 | 89 | /** 90 | * @POST Initialize consumer 91 | * @desc Initialize an instance of consumer 92 | */ 93 | this.app 94 | .route('/api/consumer') 95 | .post( 96 | [ConsumerController.startConsumer], 97 | (req: Request, res: Response) => { 98 | return res.status(200); 99 | } 100 | ); 101 | 102 | /** 103 | * @PUT Stops consumer 104 | * @desc Stops given consumer by topic and groupId 105 | */ 106 | this.app 107 | .route('/api/consumer') 108 | .put([ConsumerController.stopConsumer], (req: Request, res: Response) => { 109 | console.log('disconnect consumer'); 110 | return res.status(200); 111 | }); 112 | 113 | return this.app; 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /server/kafka/kafka/producer.controller.ts: -------------------------------------------------------------------------------- 1 | import { RequestHandler } from 'express'; 2 | import { Producer } from 'kafkajs'; 3 | import process from 'process'; 4 | 5 | import { handleAsync, mockData } from '../../common'; 6 | 7 | class ProducerController { 8 | /** 9 | * @desc starts an instance of producer 10 | */ 11 | static startProducer: RequestHandler = async (req, res, next) => { 12 | const { topic } = req.body; 13 | const producer: Producer = req.app.locals.kafka.producer(); 14 | 15 | req.app.locals.producers[topic] = producer; 16 | 17 | const [, error] = await handleAsync(producer.connect()); 18 | if (error) return error; 19 | 20 | let i = 0; 21 | setInterval(async () => { 22 | await producer.send({ 23 | topic, 24 | messages: [ 25 | { 26 | key: i.toString(), 27 | value: JSON.stringify(mockData[i++]), 28 | }, 29 | ], 30 | }); 31 | console.log(`message produced to topic: ${topic}`); 32 | }, 1000); 33 | 34 | const errorTypes = ['unhandledRejection', 'uncaughtException']; 35 | const signalTraps: NodeJS.Signals[] = ['SIGTERM', 'SIGINT', 'SIGUSR2']; 36 | 37 | errorTypes.map(type => { 38 | process.on(type, async () => { 39 | try { 40 | console.log(`process.on ${type}`); 41 | await producer.disconnect(); 42 | process.exit(0); 43 | } catch (_) { 44 | process.exit(1); 45 | } 46 | }); 47 | }); 48 | 49 | signalTraps.map(type => { 50 | process.once(type, async () => { 51 | try { 52 | await producer.disconnect(); 53 | } finally { 54 | process.kill(process.pid, type); 55 | } 56 | }); 57 | }); 58 | 59 | return next(); 60 | }; 61 | 62 | /** 63 | * @desc stops producer by given topic name 64 | */ 65 | static stopProducer: RequestHandler = async (req, res, next) => { 66 | const { topic } = req.body; 67 | const producer = req.app.locals.producers[topic]; 68 | 69 | if (!producer) return next(new Error('producer does not exist')); 70 | 71 | const [, error] = await handleAsync(producer.disconnect()); 72 | if (error) return next(error); 73 | 74 | delete req.app.locals.producer[topic]; 75 | 76 | return next(); 77 | }; 78 | } 79 | 80 | export default ProducerController; 81 | -------------------------------------------------------------------------------- /server/kafka/topic/topic.controller.ts: -------------------------------------------------------------------------------- 1 | import { RequestHandler } from 'express'; 2 | import { Admin } from 'kafkajs'; 3 | 4 | import { handleAsync } from '../../common'; 5 | 6 | export class TopicController { 7 | /** 8 | * @desc list the names of all existing topics 9 | * @returns {string[]} 10 | */ 11 | static listTopics: RequestHandler = async (req, res, next) => { 12 | const admin: Admin = req.app.locals.admin; 13 | const [topics, error] = await handleAsync(admin.listTopics()); 14 | 15 | if (error) return next(error); 16 | res.locals.topics = topics; 17 | 18 | return next(); 19 | }; 20 | 21 | /** 22 | * ADD handle multiple topics 23 | * @desc create topics 24 | * @param {string[]{}} topics 25 | * @param {string} topic 26 | * @param {number} partitions 27 | * @returns {boolean} 28 | */ 29 | static createTopics: RequestHandler = async (req, res, next) => { 30 | const admin: Admin = req.app.locals.admin; 31 | const { topic, numPartitions, replicationFactor } = req.body; 32 | 33 | const [success, error] = await handleAsync( 34 | admin.createTopics({ 35 | topics: [{ topic, numPartitions, replicationFactor }], 36 | }) 37 | ); 38 | 39 | if (error) return next(error); 40 | if (!success) return next('Topic already exists'); 41 | 42 | return next(); 43 | }; 44 | 45 | /** 46 | * @desc delete a topic 47 | * @param {string[]} topics 48 | * @param { string } topic 49 | */ 50 | static deleteTopic: RequestHandler = async (req, res, next) => { 51 | const admin: Admin = req.app.locals.admin; 52 | const { topic } = req.body; 53 | const [, error] = await handleAsync( 54 | admin.deleteTopics({ topics: [topic] }) 55 | ); 56 | 57 | if (error) return next(error); 58 | 59 | return next(); 60 | }; 61 | 62 | /** 63 | * @desc create partitions for a topic. 64 | * @secs It will resolve in case of success. In case of errors, method will throw exceptions 65 | * @param {[]{}} topicPartitions 66 | * @param {string} topic 67 | * @param {number} count 68 | */ 69 | static createPartition: RequestHandler = async (req, res, next) => { 70 | const admin: Admin = req.app.locals.admin; 71 | const { topic, count } = req.body; 72 | const [, error] = await handleAsync( 73 | admin.createPartitions({ topicPartitions: [{ topic, count }] }) 74 | ); 75 | 76 | if (error) return next(error); 77 | 78 | return next(); 79 | }; 80 | 81 | /** 82 | * @desc get metadata of a topic 83 | * @param {string[]} topics 84 | * @param {string} topic 85 | * @returns 86 | */ 87 | static topicMetadata: RequestHandler = async (req, res, next) => { 88 | const admin: Admin = req.app.locals.admin; 89 | const { topic } = req.body; 90 | const [metadata, error] = await handleAsync( 91 | admin.fetchTopicMetadata({ topics: [topic] }) 92 | ); 93 | 94 | if (error) return next(error); 95 | res.locals.metadata = metadata; 96 | 97 | return next(); 98 | }; 99 | 100 | /** 101 | * @desc get metadata for all topics 102 | * @returns 103 | */ 104 | static getAllTopicMetadata: RequestHandler = async (req, res, next) => { 105 | const admin: Admin = req.app.locals.admin; 106 | const [metadata, error] = await handleAsync(admin.fetchTopicMetadata()); 107 | 108 | if (error) return next(error); 109 | res.locals.metadata = metadata; 110 | 111 | return next(); 112 | }; 113 | 114 | /** 115 | * @desc get most recent offset for a topic 116 | * @param {string} topic 117 | * @returns {[]{}} 118 | */ 119 | static topicOffsets: RequestHandler = async (req, res, next) => { 120 | const admin: Admin = req.app.locals.admin; 121 | const { topic } = req.body; 122 | const [offsets, error] = await handleAsync(admin.fetchTopicOffsets(topic)); 123 | 124 | if (error) return next(error); 125 | res.locals.offsets = offsets; 126 | 127 | return next(); 128 | }; 129 | 130 | /** 131 | * @desc get offset for a topic specified by timestamp 132 | * @param {string} topic 133 | * @param { } timestamp 134 | * @returns {[]{}} 135 | */ 136 | static TopicOffsetsByTimestamp: RequestHandler = async (req, res, next) => { 137 | const admin: Admin = req.app.locals.admin; 138 | const { timestamp, topic } = req.body; 139 | const [offsets, error] = await handleAsync( 140 | admin.fetchTopicOffsetsByTimestamp(topic, timestamp) 141 | ); 142 | 143 | if (error) return next(error); 144 | res.locals.offsets = offsets; 145 | 146 | return next(); 147 | }; 148 | 149 | /** 150 | * @desc delete records from selected topic. 151 | * @param {string} topic 152 | * @param {[]{}} partitions 153 | * @param {number} partition 154 | * @param {string} offset 155 | */ 156 | static deleteTopicRecords: RequestHandler = async (req, res, next) => { 157 | const admin: Admin = req.app.locals.admin; 158 | const { topic, partition, offset } = req.body; 159 | const [, error] = await handleAsync( 160 | admin.deleteTopicRecords({ topic, partitions: [{ partition, offset }] }) 161 | ); 162 | 163 | if (error) return next(error); 164 | 165 | return next(); 166 | }; 167 | } 168 | -------------------------------------------------------------------------------- /server/kafka/topic/topic.routes.ts: -------------------------------------------------------------------------------- 1 | import { Application, Request, Response } from 'express'; 2 | 3 | import { RouteConfig } from '../../common/route.config'; 4 | import { TopicController } from './topic.controller'; 5 | import { KafkaController } from '../kafka/kafka.controller'; 6 | 7 | export class TopicRoutes extends RouteConfig { 8 | constructor(app: Application) { 9 | super(app, 'TopicRoutes'); 10 | } 11 | 12 | routes() { 13 | /** 14 | * @GET api/topic 15 | * @desc get list of topics 16 | */ 17 | this.app 18 | .route('/api/topic') 19 | .get([TopicController.listTopics], (req: Request, res: Response) => { 20 | const { topics } = res.locals; 21 | return res.status(200).json({ topics }); 22 | }); 23 | 24 | /** 25 | * @POST api/topic 26 | * @desc creates a topic 27 | */ 28 | this.app 29 | .route('/api/topic') 30 | .post( 31 | [ 32 | TopicController.createTopics, 33 | KafkaController.describeCluster, 34 | TopicController.getAllTopicMetadata, 35 | ], 36 | (req: Request, res: Response) => { 37 | const { cluster, metadata } = res.locals; 38 | return res.status(200).json({ cluster, metadata }); 39 | } 40 | ); 41 | 42 | /** 43 | * @PUT api/topic 44 | * @desc deletes topic records 45 | */ 46 | this.app 47 | .route('/api/topic') 48 | .put( 49 | [TopicController.deleteTopicRecords], 50 | (req: Request, res: Response) => { 51 | return res.sendStatus(200); 52 | } 53 | ); 54 | 55 | /** 56 | * @DELETE api/topic 57 | * @desc deletes a topic 58 | */ 59 | this.app 60 | .route('/api/topic') 61 | .delete( 62 | [ 63 | TopicController.deleteTopic, 64 | KafkaController.describeCluster, 65 | TopicController.getAllTopicMetadata, 66 | ], 67 | (req: Request, res: Response) => { 68 | const { cluster, metadata } = res.locals; 69 | return res.status(200).json({ cluster, metadata }); 70 | } 71 | ); 72 | 73 | /** 74 | * @POST api/partition 75 | * @desc creates a partition for a topic 76 | */ 77 | this.app 78 | .route('/api/partition') 79 | .post( 80 | [ 81 | TopicController.createPartition, 82 | KafkaController.describeCluster, 83 | TopicController.getAllTopicMetadata, 84 | ], 85 | (req: Request, res: Response) => { 86 | const { cluster, metadata } = res.locals; 87 | return res.status(200).json({ cluster, metadata }); 88 | } 89 | ); 90 | 91 | /** 92 | * @GET api/metadata 93 | * @desc get topic metadata 94 | */ 95 | this.app 96 | .route('/api/metadata') 97 | .get([TopicController.topicMetadata], (req: Request, res: Response) => { 98 | const { metadata } = res.locals; 99 | return res.status(200).json({ metadata }); 100 | }); 101 | 102 | /** 103 | * @GET api/offsets 104 | * @desc get topic offets 105 | */ 106 | this.app 107 | .route('/api/offsets') 108 | .get([TopicController.topicOffsets], (req: Request, res: Response) => { 109 | const { offsets } = res.locals; 110 | return res.status(200).json({ offsets }); 111 | }); 112 | 113 | return this.app; 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /server/log/log.controller.ts: -------------------------------------------------------------------------------- 1 | import { RequestHandler } from 'express'; 2 | import fs from 'fs'; 3 | const winston = require('winston'); 4 | 5 | interface IErrors { 6 | level: string; 7 | namespace: string; 8 | message: string; 9 | error: string; 10 | cliendId: string; 11 | broker: string; 12 | timestamp: string; 13 | } 14 | 15 | interface IProps { 16 | namespace: string; 17 | log: any; 18 | } 19 | 20 | const { createLogger, transports } = winston; 21 | const { combine, json, metadata, timestamp } = winston.format; 22 | 23 | export class LogController { 24 | static logCreator = () => { 25 | const logger = createLogger({ 26 | level: 'info', 27 | format: combine( 28 | timestamp({ format: 'YYY-MM-DD hh:mm:ss' }), 29 | json(), 30 | metadata() 31 | ), 32 | transports: [ 33 | new transports.Console(), 34 | new transports.File({ filename: 'error.log' }), 35 | ], 36 | }); 37 | 38 | return ({ namespace, log }: IProps) => { 39 | const { message, broker, clientId, error, groupId } = log; 40 | 41 | logger.log({ 42 | level: 'error', 43 | namespace, 44 | message, 45 | error, 46 | clientId, 47 | broker, 48 | groupId, 49 | }); 50 | }; 51 | }; 52 | 53 | /** 54 | * @desc get all previous errors from error.log 55 | * @returns {Array{}} 56 | */ 57 | static getErrors: RequestHandler = (req, res, next) => { 58 | const path = './error.log'; 59 | 60 | try { 61 | if (fs.existsSync(path)) { 62 | const darwin = process.platform === 'darwin'; 63 | let data: string | string[] = fs.readFileSync('./error.log').toString(); 64 | const errors: IErrors[] = []; 65 | 66 | if (darwin) data = data.split('\n'); 67 | else data = data.split('\r\n'); 68 | 69 | data.forEach(error => { 70 | if (error.length > 1) errors.push(JSON.parse(error)); 71 | }); 72 | 73 | res.locals.errors = errors; 74 | } 75 | 76 | return next(); 77 | } catch (e) { 78 | console.error(e); 79 | return next(e); 80 | } 81 | }; 82 | } 83 | -------------------------------------------------------------------------------- /server/log/log.routes.ts: -------------------------------------------------------------------------------- 1 | import { Application, Request, Response } from 'express'; 2 | import { RouteConfig } from '../common/route.config'; 3 | import { LogController } from './log.controller'; 4 | 5 | export class LogRoutes extends RouteConfig { 6 | constructor(app: Application) { 7 | super(app, 'LogRoutes'); 8 | } 9 | 10 | routes() { 11 | /** 12 | * @GET api/errors 13 | * @desc get all previous errors 14 | */ 15 | this.app 16 | .route('/api/notification') 17 | .get([LogController.getErrors], (req: Request, res: Response) => { 18 | return res.status(200).json(res.locals.errors); 19 | }); 20 | 21 | return this.app; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /server/oauth/oauth.routes.ts: -------------------------------------------------------------------------------- 1 | import { Application, Request, Response } from 'express'; 2 | import { RouteConfig } from '../common/route.config'; 3 | import axios from 'axios'; 4 | import dotenv from 'dotenv'; 5 | dotenv.config(); 6 | export class OAuthRoutes extends RouteConfig { 7 | constructor(app: Application) { 8 | super(app, 'OAuthRoutes'); 9 | } 10 | routes() { 11 | /** 12 | * @desc login user 13 | */ 14 | 15 | this.app.get('/oauth', (req, res) => { 16 | res.redirect( 17 | `https://github.com/login/oauth/authorize?client_id=${process.env.GITHUB_CLIENT_ID}` 18 | ); 19 | }); 20 | 21 | this.app.get('/oauth-callback', ({ query: { code } }, res) => { 22 | const body = { 23 | client_id: process.env.GITHUB_CLIENT_ID, 24 | client_secret: process.env.GITHUB_SECRET, 25 | code, 26 | }; 27 | const opts = { headers: { accept: 'application/json' } }; 28 | axios 29 | .post('https://github.com/login/oauth/access_token', body, opts) 30 | .then(_res => _res.data.access_token) 31 | .then(token => { 32 | res.redirect(`/?token=${token}`); 33 | }) 34 | .catch(err => res.status(500).json({ err: err.message })); 35 | }); 36 | return this.app; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /server/users.json: -------------------------------------------------------------------------------- 1 | {"dd":{"email":"dd","password":"$2b$05$rf4od8zbpwoAIHH1AaJFP.VNjceP.f0cq.S/W9if88vgNjFjPXQPK","cookie":"$2b$05$lC2pN9NVC.HRYb3vLmjhg.eZ9M0qAmo/.WMmaopsWND/G88tVbLkm"}} -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig.json to read more about this file */ 4 | 5 | /* Basic Options */ 6 | // "incremental": true, /* Enable incremental compilation */ 7 | "target": "es5" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */, 8 | "module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */, 9 | // "lib": [], /* Specify library files to be included in the compilation. */ 10 | // "allowJs": true, /* Allow javascript files to be compiled. */ 11 | // "checkJs": true, /* Report errors in .js files. */ 12 | "jsx": "react" /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */, 13 | // "declaration": true, /* Generates corresponding '.d.ts' file. */ 14 | // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ 15 | // "sourceMap": true, /* Generates corresponding '.map' file. */ 16 | // "outFile": "./", /* Concatenate and emit output to single file. */ 17 | "outDir": "./dist" /* Redirect output structure to the directory. */, 18 | // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 19 | // "composite": true, /* Enable project compilation */ 20 | // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ 21 | // "removeComments": true, /* Do not emit comments to output. */ 22 | // "noEmit": true, /* Do not emit outputs. */ 23 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 24 | // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ 25 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 26 | 27 | /* Strict Type-Checking Options */ 28 | "strict": true /* Enable all strict type-checking options. */, 29 | // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 30 | // "strictNullChecks": true, /* Enable strict null checks. */ 31 | // "strictFunctionTypes": true, /* Enable strict checking of function types. */ 32 | // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ 33 | // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ 34 | // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 35 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 36 | 37 | /* Additional Checks */ 38 | // "noUnusedLocals": true, /* Report errors on unused locals. */ 39 | // "noUnusedParameters": true, /* Report errors on unused parameters. */ 40 | // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ 41 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 42 | // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ 43 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an 'override' modifier. */ 44 | // "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */ 45 | 46 | /* Module Resolution Options */ 47 | // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ 48 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ 49 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 50 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 51 | // "typeRoots": [], /* List of folders to include type definitions from. */ 52 | // "types": [], /* Type declaration files to be included in compilation. */ 53 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 54 | "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */, 55 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 56 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 57 | 58 | /* Source Map Options */ 59 | // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 60 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 61 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 62 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 63 | 64 | /* Experimental Options */ 65 | // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ 66 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 67 | 68 | /* Advanced Options */ 69 | "skipLibCheck": true /* Skip type checking of declaration files. */, 70 | // "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */ 71 | }, 72 | "exclude": ["node_modules"] 73 | } 74 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const HtmlWebPackPlugin = require('html-webpack-plugin'); 3 | 4 | const htmlPlugin = new HtmlWebPackPlugin({ 5 | template: './client/src/index.html', 6 | }); 7 | 8 | module.exports = { 9 | mode: 'development', 10 | entry: './client/src/index.tsx', 11 | target: 'electron-renderer', 12 | 13 | module: { 14 | rules: [ 15 | { 16 | test: /\.tsx?$/, 17 | use: 'ts-loader', 18 | exclude: [/node_modules/], 19 | }, 20 | { 21 | test: /\.css$/i, 22 | use: ['style-loader', 'css-loader'], 23 | }, 24 | ], 25 | }, 26 | 27 | resolve: { 28 | extensions: ['.tsx', '.ts', '.js'], 29 | }, 30 | 31 | output: { 32 | filename: 'bundle.js', 33 | path: path.resolve(__dirname, 'dist'), 34 | }, 35 | plugins: [htmlPlugin], 36 | devServer: { 37 | proxy: { 38 | '/api': 'http://localhost:3000', 39 | }, 40 | contentBase: path.join(__dirname, 'dist'), 41 | compress: true, 42 | port: 8080, 43 | }, 44 | }; 45 | --------------------------------------------------------------------------------