├── frontend ├── public │ ├── favicon.ico │ ├── images │ │ └── gzh.jpg │ ├── favicon.svg │ ├── favicon.html │ └── favicon-instructions.txt ├── tsconfig.node.json ├── src │ ├── App.vue │ ├── main.ts │ ├── utils │ │ └── request.ts │ ├── router │ │ └── index.ts │ ├── layouts │ │ └── DefaultLayout.vue │ └── views │ │ ├── Home.vue │ │ └── Executions.vue ├── index.html ├── vite.config.ts ├── Dockerfile ├── nginx.conf ├── package.json └── tsconfig.json ├── backend ├── src │ └── main │ │ ├── java │ │ └── com │ │ │ └── datagenerator │ │ │ ├── factory │ │ │ └── AlertNotifyFactory.java │ │ │ ├── validator │ │ │ ├── DataValidatorFactory.java │ │ │ ├── impl │ │ │ │ ├── RangeValidator.java │ │ │ │ ├── RequiredValidator.java │ │ │ │ └── RegexValidator.java │ │ │ ├── DataValidator.java │ │ │ └── ValidationResult.java │ │ │ ├── generator │ │ │ ├── DataRule.java │ │ │ ├── rule │ │ │ │ ├── DataRule.java │ │ │ │ ├── impl │ │ │ │ │ ├── FixedRule.java │ │ │ │ │ ├── SequenceRule.java │ │ │ │ │ ├── ReferenceRule.java │ │ │ │ │ ├── RandomRule.java │ │ │ │ │ ├── EnumRule.java │ │ │ │ │ ├── DateRule.java │ │ │ │ │ └── StringRule.java │ │ │ │ └── DataRuleFactory.java │ │ │ ├── DataGenerator.java │ │ │ ├── DataGeneratorFactory.java │ │ │ └── impl │ │ │ │ └── TemplateDataGenerator.java │ │ │ ├── config │ │ │ ├── AopConfig.java │ │ │ ├── SchedulingConfig.java │ │ │ ├── MybatisPlusConfig.java │ │ │ ├── ThreadPoolConfig.java │ │ │ ├── MyMetaObjectHandler.java │ │ │ ├── CorsConfig.java │ │ │ ├── SwaggerConfig.java │ │ │ └── KafkaConfig.java │ │ │ ├── common │ │ │ ├── PageRequest.java │ │ │ └── Result.java │ │ │ ├── mapper │ │ │ ├── TaskMapper.java │ │ │ ├── DataSourceMapper.java │ │ │ ├── SystemInfoMapper.java │ │ │ └── ExecutionRecordMapper.java │ │ │ ├── model │ │ │ └── ApiResponse.java │ │ │ ├── metadata │ │ │ ├── ForeignKeyMetadata.java │ │ │ ├── ColumnMetadata.java │ │ │ └── TableMetadata.java │ │ │ ├── scheduler │ │ │ ├── TaskSchedulingService.java │ │ │ └── TaskScheduler.java │ │ │ ├── service │ │ │ ├── SystemInfoService.java │ │ │ ├── TaskService.java │ │ │ ├── ExecutionRecordService.java │ │ │ ├── DataSourceService.java │ │ │ ├── impl │ │ │ │ ├── ExecutionRecordServiceImpl.java │ │ │ │ ├── SystemInfoServiceImpl.java │ │ │ │ ├── TaskServiceImpl.java │ │ │ │ └── DataSourceServiceImpl.java │ │ │ └── DataGenerateService.java │ │ │ ├── DataGeneratorApplication.java │ │ │ ├── annotation │ │ │ ├── PerformanceMonitor.java │ │ │ ├── ApiOperation.java │ │ │ └── ApiOperationAspect.java │ │ │ ├── exception │ │ │ ├── BusinessException.java │ │ │ └── GlobalExceptionHandler.java │ │ │ ├── entity │ │ │ ├── ExecutionRecord.java │ │ │ ├── SystemInfo.java │ │ │ ├── DataSource.java │ │ │ └── DataTask.java │ │ │ ├── util │ │ │ └── JsonUtil.java │ │ │ ├── task │ │ │ └── SystemMonitorTask.java │ │ │ ├── controller │ │ │ ├── SystemInfoController.java │ │ │ ├── TaskController.java │ │ │ ├── ExecutionRecordController.java │ │ │ └── DataSourceController.java │ │ │ └── aspect │ │ │ └── PerformanceMonitorAspect.java │ │ └── resources │ │ ├── logback-spring.xml │ │ ├── application.yml │ │ └── db │ │ └── init.sql ├── Dockerfile └── pom.xml ├── stop.bat ├── scripts └── package │ ├── exclude.txt │ ├── package.sh │ ├── simple-package.ps1 │ └── package.cmd ├── start.bat ├── start.sh ├── deploy.sh ├── docker-compose.yml ├── LICENSE └── README.md /frontend/public/favicon.ico: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/factory/AlertNotifyFactory.java: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/validator/DataValidatorFactory.java: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/validator/impl/RangeValidator.java: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /stop.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | echo 停止数据生成器应用... 3 | 4 | REM 停止并移除容器 5 | docker-compose down 6 | 7 | echo 应用已停止 -------------------------------------------------------------------------------- /frontend/public/images/gzh.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Mrkuhuo/data-generator/HEAD/frontend/public/images/gzh.jpg -------------------------------------------------------------------------------- /scripts/package/exclude.txt: -------------------------------------------------------------------------------- 1 | node_modules\ 2 | .git\ 3 | .idea\ 4 | .vscode\ 5 | target\ 6 | *.log 7 | *.tar.gz 8 | *.zip -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/DataRule.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator; 2 | 3 | /** 4 | * 数据生成规则接口 5 | */ 6 | public interface DataRule { 7 | /** 8 | * 生成一个符合规则的值 9 | * @return 生成的值 10 | */ 11 | Object generate(); 12 | } -------------------------------------------------------------------------------- /frontend/tsconfig.node.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "composite": true, 4 | "skipLibCheck": true, 5 | "module": "ESNext", 6 | "moduleResolution": "bundler", 7 | "allowSyntheticDefaultImports": true 8 | }, 9 | "include": ["vite.config.ts"] 10 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/config/AopConfig.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.config; 2 | 3 | import org.springframework.context.annotation.Configuration; 4 | import org.springframework.context.annotation.EnableAspectJAutoProxy; 5 | 6 | @Configuration 7 | @EnableAspectJAutoProxy 8 | public class AopConfig { 9 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/common/PageRequest.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.common; 2 | 3 | import lombok.Data; 4 | 5 | @Data 6 | public class PageRequest { 7 | private Integer pageNum = 1; 8 | private Integer pageSize = 10; 9 | private String orderBy; 10 | private Boolean asc = true; 11 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/config/SchedulingConfig.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.config; 2 | 3 | import org.springframework.context.annotation.Configuration; 4 | import org.springframework.scheduling.annotation.EnableScheduling; 5 | 6 | @Configuration 7 | @EnableScheduling 8 | public class SchedulingConfig { 9 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/mapper/TaskMapper.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.mapper; 2 | 3 | import com.baomidou.mybatisplus.core.mapper.BaseMapper; 4 | import com.datagenerator.entity.DataTask; 5 | import org.apache.ibatis.annotations.Mapper; 6 | 7 | @Mapper 8 | public interface TaskMapper extends BaseMapper { 9 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/mapper/DataSourceMapper.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.mapper; 2 | 3 | import com.baomidou.mybatisplus.core.mapper.BaseMapper; 4 | import com.datagenerator.entity.DataSource; 5 | import org.apache.ibatis.annotations.Mapper; 6 | 7 | @Mapper 8 | public interface DataSourceMapper extends BaseMapper { 9 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/mapper/SystemInfoMapper.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.mapper; 2 | 3 | import com.baomidou.mybatisplus.core.mapper.BaseMapper; 4 | import com.datagenerator.entity.SystemInfo; 5 | import org.apache.ibatis.annotations.Mapper; 6 | 7 | @Mapper 8 | public interface SystemInfoMapper extends BaseMapper { 9 | } -------------------------------------------------------------------------------- /frontend/src/App.vue: -------------------------------------------------------------------------------- 1 | 6 | 7 | 10 | 11 | -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/model/ApiResponse.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @NoArgsConstructor 9 | @AllArgsConstructor 10 | public class ApiResponse { 11 | private int code; 12 | private String message; 13 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/mapper/ExecutionRecordMapper.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.mapper; 2 | 3 | import com.baomidou.mybatisplus.core.mapper.BaseMapper; 4 | import com.datagenerator.entity.ExecutionRecord; 5 | import org.apache.ibatis.annotations.Mapper; 6 | 7 | @Mapper 8 | public interface ExecutionRecordMapper extends BaseMapper { 9 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/metadata/ForeignKeyMetadata.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.metadata; 2 | 3 | import lombok.Data; 4 | import java.util.HashSet; 5 | import java.util.Set; 6 | 7 | @Data 8 | public class ForeignKeyMetadata { 9 | private String columnName; 10 | private String referencedTable; 11 | private String referencedColumn; 12 | private Set validValues = new HashSet<>(); 13 | } -------------------------------------------------------------------------------- /frontend/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 数据生成平台 8 | 9 | 10 |
11 | 12 | 13 | -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/validator/DataValidator.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.validator; 2 | 3 | import java.util.Map; 4 | 5 | public interface DataValidator { 6 | /** 7 | * 验证数据 8 | * @param data 待验证的数据 9 | * @return 验证结果 10 | */ 11 | ValidationResult validate(Map data); 12 | 13 | /** 14 | * 获取验证器类型 15 | * @return 验证器类型 16 | */ 17 | String getType(); 18 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/rule/DataRule.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator.rule; 2 | 3 | public interface DataRule { 4 | /** 5 | * 生成数据 6 | * @return 生成的数据 7 | */ 8 | Object generate(); 9 | 10 | /** 11 | * 获取规则类型 12 | * @return 规则类型 13 | */ 14 | String getType(); 15 | 16 | /** 17 | * 获取规则参数 18 | * @return 规则参数 19 | */ 20 | Object getParams(); 21 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/metadata/ColumnMetadata.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.metadata; 2 | 3 | import lombok.Data; 4 | 5 | @Data 6 | public class ColumnMetadata { 7 | private String name; 8 | private String dataType; 9 | private boolean nullable; 10 | private String maxLength; 11 | private String defaultValue; 12 | private String comment; 13 | private String columnKey; 14 | private String extra; 15 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/scheduler/TaskSchedulingService.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.scheduler; 2 | 3 | import com.datagenerator.entity.DataTask; 4 | 5 | public interface TaskSchedulingService { 6 | /** 7 | * 调度任务 8 | * @param task 要调度的任务 9 | */ 10 | void scheduleTask(DataTask task); 11 | 12 | /** 13 | * 取消任务调度 14 | * @param taskId 要取消的任务ID 15 | */ 16 | void cancelTask(Long taskId); 17 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/service/SystemInfoService.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.service; 2 | 3 | import com.baomidou.mybatisplus.extension.service.IService; 4 | import com.datagenerator.entity.SystemInfo; 5 | 6 | public interface SystemInfoService extends IService { 7 | 8 | /** 9 | * 获取系统信息 10 | */ 11 | SystemInfo getSystemInfo(); 12 | 13 | /** 14 | * 保存系统信息 15 | */ 16 | void saveSystemInfo(SystemInfo systemInfo); 17 | } -------------------------------------------------------------------------------- /backend/Dockerfile: -------------------------------------------------------------------------------- 1 | # 使用多阶段构建 2 | # 第一阶段:构建阶段 3 | FROM maven:3.8.4-openjdk-17 AS builder 4 | 5 | # 设置工作目录 6 | WORKDIR /app 7 | 8 | # 复制pom.xml 9 | COPY pom.xml . 10 | 11 | # 复制源代码 12 | COPY src ./src 13 | 14 | # 构建应用 15 | RUN mvn clean package -DskipTests 16 | 17 | # 第二阶段:运行阶段 18 | FROM openjdk:17-slim 19 | 20 | WORKDIR /app 21 | 22 | # 从构建阶段复制jar文件 23 | COPY --from=builder /app/target/*.jar app.jar 24 | 25 | # 暴露端口 26 | EXPOSE 8080 27 | 28 | # 设置启动命令 29 | ENTRYPOINT ["java","-jar","app.jar"] -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/DataGenerator.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | public interface DataGenerator { 7 | /** 8 | * 生成数据 9 | * @param template 数据生成模板 10 | * @param count 生成数量 11 | * @return 生成的数据列表 12 | */ 13 | List> generate(String template, int count); 14 | 15 | /** 16 | * 获取字段列表 17 | * @return 字段列表 18 | */ 19 | List getFields(); 20 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/service/TaskService.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.service; 2 | 3 | import com.baomidou.mybatisplus.extension.service.IService; 4 | import com.datagenerator.entity.DataTask; 5 | 6 | public interface TaskService extends IService { 7 | /** 8 | * 启动任务 9 | */ 10 | boolean startTask(Long taskId); 11 | 12 | /** 13 | * 停止任务 14 | */ 15 | boolean stopTask(Long taskId); 16 | 17 | /** 18 | * 立即执行一次任务 19 | */ 20 | boolean executeTask(Long taskId); 21 | } -------------------------------------------------------------------------------- /frontend/vite.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vite' 2 | import vue from '@vitejs/plugin-vue' 3 | import path from 'path' 4 | 5 | export default defineConfig({ 6 | plugins: [vue()], 7 | server: { 8 | port: 3000, 9 | proxy: { 10 | '/api': { 11 | target: 'http://localhost:8888', 12 | changeOrigin: true, 13 | rewrite: (path) => path.replace(/^\/api/, '') 14 | }, 15 | }, 16 | }, 17 | resolve: { 18 | alias: { 19 | '@': path.resolve(__dirname, 'src'), 20 | }, 21 | }, 22 | }) -------------------------------------------------------------------------------- /frontend/public/favicon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | DG 10 | -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/service/ExecutionRecordService.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.service; 2 | 3 | import com.baomidou.mybatisplus.extension.service.IService; 4 | import com.datagenerator.entity.ExecutionRecord; 5 | 6 | public interface ExecutionRecordService extends IService { 7 | /** 8 | * 创建执行记录 9 | */ 10 | ExecutionRecord createRecord(Long taskId); 11 | 12 | /** 13 | * 更新执行记录状态 14 | */ 15 | boolean updateStatus(Long recordId, Integer status, String errorMessage, Long recordsCount); 16 | } -------------------------------------------------------------------------------- /frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | # 构建阶段 2 | FROM node:16-alpine as builder 3 | 4 | # 设置工作目录 5 | WORKDIR /app 6 | 7 | # 复制package.json和package-lock.json 8 | COPY package*.json ./ 9 | 10 | # 安装依赖 11 | RUN npm install 12 | 13 | # 复制源代码 14 | COPY . . 15 | 16 | # 构建应用 17 | RUN npm run build 18 | 19 | # 运行阶段 20 | FROM nginx:alpine 21 | 22 | # 复制nginx配置 23 | COPY nginx.conf /etc/nginx/conf.d/default.conf 24 | 25 | # 从构建阶段复制构建产物 26 | COPY --from=builder /app/dist /usr/share/nginx/html 27 | 28 | # 暴露端口 29 | EXPOSE 80 30 | 31 | # 启动nginx 32 | CMD ["nginx", "-g", "daemon off;"] -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/rule/impl/FixedRule.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator.rule.impl; 2 | 3 | import com.datagenerator.generator.rule.DataRule; 4 | import lombok.Data; 5 | 6 | @Data 7 | public class FixedRule implements DataRule { 8 | private Object value; 9 | 10 | @Override 11 | public Object generate() { 12 | return value; 13 | } 14 | 15 | @Override 16 | public String getType() { 17 | return "fixed"; 18 | } 19 | 20 | @Override 21 | public Object getParams() { 22 | return this; 23 | } 24 | } -------------------------------------------------------------------------------- /frontend/src/main.ts: -------------------------------------------------------------------------------- 1 | import { createApp } from 'vue' 2 | import { createPinia } from 'pinia' 3 | import ElementPlus from 'element-plus' 4 | import 'element-plus/dist/index.css' 5 | import * as ElementPlusIconsVue from '@element-plus/icons-vue' 6 | import App from './App.vue' 7 | import router from './router' 8 | 9 | const app = createApp(App) 10 | 11 | // 注册Element Plus图标 12 | for (const [key, component] of Object.entries(ElementPlusIconsVue)) { 13 | app.component(key, component) 14 | } 15 | 16 | app.use(createPinia()) 17 | app.use(router) 18 | app.use(ElementPlus) 19 | 20 | app.mount('#app') -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/metadata/TableMetadata.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.metadata; 2 | 3 | import com.datagenerator.entity.DataSource; 4 | import lombok.Data; 5 | import java.util.*; 6 | 7 | @Data 8 | public class TableMetadata { 9 | private String tableName; 10 | private Map columns = new HashMap<>(); 11 | private String primaryKeyColumn; 12 | private boolean autoIncrement; 13 | private List foreignKeys = new ArrayList<>(); 14 | private Set uniqueColumns = new HashSet<>(); 15 | private DataSource dataSource; 16 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/DataGeneratorApplication.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator; 2 | 3 | import org.mybatis.spring.annotation.MapperScan; 4 | import org.springframework.boot.SpringApplication; 5 | import org.springframework.boot.autoconfigure.SpringBootApplication; 6 | import org.springframework.scheduling.annotation.EnableScheduling; 7 | 8 | @SpringBootApplication 9 | @EnableScheduling 10 | @MapperScan("com.datagenerator.mapper") 11 | public class DataGeneratorApplication { 12 | public static void main(String[] args) { 13 | SpringApplication.run(DataGeneratorApplication.class, args); 14 | } 15 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/validator/ValidationResult.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.validator; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.ArrayList; 6 | import java.util.List; 7 | 8 | @Data 9 | public class ValidationResult { 10 | private boolean valid; 11 | private List errors; 12 | 13 | public ValidationResult() { 14 | this.valid = true; 15 | this.errors = new ArrayList<>(); 16 | } 17 | 18 | public void addError(String error) { 19 | this.valid = false; 20 | this.errors.add(error); 21 | } 22 | 23 | public void addErrors(List errors) { 24 | this.valid = false; 25 | this.errors.addAll(errors); 26 | } 27 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/rule/impl/SequenceRule.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator.rule.impl; 2 | 3 | import com.datagenerator.generator.rule.DataRule; 4 | import lombok.Data; 5 | 6 | @Data 7 | public class SequenceRule implements DataRule { 8 | private long current = 0; 9 | private long step = 1; 10 | private long start = 0; 11 | 12 | @Override 13 | public Object generate() { 14 | long value = start + current * step; 15 | current++; 16 | return value; 17 | } 18 | 19 | @Override 20 | public String getType() { 21 | return "sequence"; 22 | } 23 | 24 | @Override 25 | public Object getParams() { 26 | return this; 27 | } 28 | } -------------------------------------------------------------------------------- /frontend/nginx.conf: -------------------------------------------------------------------------------- 1 | server { 2 | listen 80; 3 | server_name localhost; 4 | 5 | # 前端静态文件目录 6 | root /usr/share/nginx/html; 7 | index index.html; 8 | 9 | # 处理前端路由 10 | location / { 11 | try_files $uri $uri/ /index.html; 12 | } 13 | 14 | # 后端API代理 15 | location /api/ { 16 | proxy_pass http://backend:8080/; 17 | proxy_set_header Host $host; 18 | proxy_set_header X-Real-IP $remote_addr; 19 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 20 | proxy_set_header X-Forwarded-Proto $scheme; 21 | } 22 | 23 | # 错误页面 24 | error_page 500 502 503 504 /50x.html; 25 | location = /50x.html { 26 | root /usr/share/nginx/html; 27 | } 28 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/annotation/PerformanceMonitor.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.annotation; 2 | 3 | import java.lang.annotation.*; 4 | 5 | @Target({ElementType.METHOD}) 6 | @Retention(RetentionPolicy.RUNTIME) 7 | @Documented 8 | public @interface PerformanceMonitor { 9 | /** 10 | * 接口名称 11 | */ 12 | String name() default ""; 13 | 14 | /** 15 | * 是否记录参数 16 | */ 17 | boolean logParams() default true; 18 | 19 | /** 20 | * 是否记录返回值 21 | */ 22 | boolean logResult() default true; 23 | 24 | /** 25 | * 是否记录异常 26 | */ 27 | boolean logException() default true; 28 | 29 | /** 30 | * 是否记录执行时间 31 | */ 32 | boolean logTime() default true; 33 | } -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "data-generator-frontend", 3 | "version": "1.0.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "vite", 7 | "build": "vue-tsc --noEmit && vite build", 8 | "preview": "vite preview" 9 | }, 10 | "dependencies": { 11 | "@element-plus/icons-vue": "^2.1.0", 12 | "axios": "^1.8.1", 13 | "date-fns": "^4.1.0", 14 | "echarts": "^5.4.3", 15 | "element-plus": "^2.3.9", 16 | "pinia": "^2.1.6", 17 | "vue": "^3.3.4", 18 | "vue-router": "^4.2.4" 19 | }, 20 | "devDependencies": { 21 | "@types/node": "^20.5.0", 22 | "@vitejs/plugin-vue": "^4.2.3", 23 | "sass": "^1.66.1", 24 | "typescript": "^5.1.6", 25 | "vite": "^4.4.9", 26 | "vue-tsc": "^1.8.8" 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/config/MybatisPlusConfig.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.config; 2 | 3 | import com.baomidou.mybatisplus.annotation.DbType; 4 | import com.baomidou.mybatisplus.extension.plugins.MybatisPlusInterceptor; 5 | import com.baomidou.mybatisplus.extension.plugins.inner.PaginationInnerInterceptor; 6 | import org.springframework.context.annotation.Bean; 7 | import org.springframework.context.annotation.Configuration; 8 | 9 | @Configuration 10 | public class MybatisPlusConfig { 11 | 12 | @Bean 13 | public MybatisPlusInterceptor mybatisPlusInterceptor() { 14 | MybatisPlusInterceptor interceptor = new MybatisPlusInterceptor(); 15 | interceptor.addInnerInterceptor(new PaginationInnerInterceptor(DbType.MYSQL)); 16 | return interceptor; 17 | } 18 | } -------------------------------------------------------------------------------- /start.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | echo 开始启动数据生成器应用... 3 | 4 | REM 检查Docker是否安装 5 | where docker >nul 2>nul 6 | if %ERRORLEVEL% neq 0 ( 7 | echo 错误: Docker未安装,请先安装Docker和Docker Compose 8 | exit /b 1 9 | ) 10 | 11 | REM 检查Docker Compose是否安装 12 | where docker-compose >nul 2>nul 13 | if %ERRORLEVEL% neq 0 ( 14 | echo 错误: Docker Compose未安装,请先安装Docker Compose 15 | exit /b 1 16 | ) 17 | 18 | REM 构建并启动容器 19 | echo 构建并启动容器... 20 | docker-compose up -d --build 21 | 22 | REM 等待服务启动 23 | echo 等待服务启动... 24 | timeout /t 10 /nobreak >nul 25 | 26 | REM 检查服务是否正常运行 27 | docker-compose ps 28 | echo. 29 | echo 如果上面显示的服务状态都是"Up",则服务已成功启动! 30 | echo 前端访问地址: http://localhost 31 | echo 后端API地址: http://localhost:8888 32 | echo Kafka地址: localhost:9092 33 | echo MySQL地址: localhost:3306 34 | echo. 35 | echo 如果服务启动失败,请运行 "docker-compose logs" 查看详细日志 -------------------------------------------------------------------------------- /start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "开始启动数据生成器应用..." 4 | 5 | # 检查Docker是否安装 6 | if ! command -v docker &> /dev/null; then 7 | echo "错误: Docker未安装,请先安装Docker和Docker Compose" 8 | exit 1 9 | fi 10 | 11 | # 检查Docker Compose是否安装 12 | if ! command -v docker-compose &> /dev/null; then 13 | echo "错误: Docker Compose未安装,请先安装Docker Compose" 14 | exit 1 15 | fi 16 | 17 | # 构建并启动容器 18 | echo "构建并启动容器..." 19 | docker-compose up -d --build 20 | 21 | # 等待服务启动 22 | echo "等待服务启动..." 23 | sleep 10 24 | 25 | # 检查服务是否正常运行 26 | if docker-compose ps | grep -q "Up"; then 27 | echo "服务已成功启动!" 28 | echo "前端访问地址: http://localhost" 29 | echo "后端API地址: http://localhost:8888" 30 | echo "Kafka地址: localhost:9092" 31 | echo "MySQL地址: localhost:3306" 32 | else 33 | echo "服务启动失败,请检查日志:" 34 | docker-compose logs 35 | fi -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/exception/BusinessException.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.exception; 2 | 3 | import lombok.Getter; 4 | 5 | @Getter 6 | public class BusinessException extends RuntimeException { 7 | private final Integer code; 8 | 9 | public BusinessException(String message) { 10 | super(message); 11 | this.code = 500; 12 | } 13 | 14 | public BusinessException(Integer code, String message) { 15 | super(message); 16 | this.code = code; 17 | } 18 | 19 | public BusinessException(String message, Throwable cause) { 20 | super(message, cause); 21 | this.code = 500; 22 | } 23 | 24 | public BusinessException(Integer code, String message, Throwable cause) { 25 | super(message, cause); 26 | this.code = code; 27 | } 28 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/annotation/ApiOperation.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.annotation; 2 | 3 | import java.lang.annotation.*; 4 | 5 | @Target({ElementType.METHOD}) 6 | @Retention(RetentionPolicy.RUNTIME) 7 | @Documented 8 | public @interface ApiOperation { 9 | 10 | /** 11 | * 接口名称 12 | */ 13 | String name() default ""; 14 | 15 | /** 16 | * 接口描述 17 | */ 18 | String description() default ""; 19 | 20 | /** 21 | * 是否记录参数 22 | */ 23 | boolean logParams() default true; 24 | 25 | /** 26 | * 是否记录返回值 27 | */ 28 | boolean logResult() default true; 29 | 30 | /** 31 | * 是否记录异常 32 | */ 33 | boolean logException() default true; 34 | 35 | /** 36 | * 是否记录执行时间 37 | */ 38 | boolean logTime() default true; 39 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/config/ThreadPoolConfig.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.config; 2 | 3 | import org.springframework.context.annotation.Bean; 4 | import org.springframework.context.annotation.Configuration; 5 | import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler; 6 | 7 | @Configuration 8 | public class ThreadPoolConfig { 9 | 10 | @Bean(name = "threadPoolTaskScheduler") 11 | public ThreadPoolTaskScheduler taskScheduler() { 12 | ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler(); 13 | scheduler.setPoolSize(10); 14 | scheduler.setThreadNamePrefix("task-scheduler-"); 15 | scheduler.setErrorHandler(t -> { 16 | // 处理任务执行异常 17 | System.err.println("任务执行异常: " + t.getMessage()); 18 | }); 19 | scheduler.initialize(); 20 | return scheduler; 21 | } 22 | } -------------------------------------------------------------------------------- /frontend/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "useDefineForClassFields": true, 5 | "module": "ESNext", 6 | "lib": ["ES2020", "DOM", "DOM.Iterable"], 7 | "skipLibCheck": true, 8 | 9 | /* Bundler mode */ 10 | "moduleResolution": "bundler", 11 | "allowImportingTsExtensions": true, 12 | "resolveJsonModule": true, 13 | "isolatedModules": true, 14 | "noEmit": true, 15 | "jsx": "preserve", 16 | 17 | /* Linting */ 18 | "strict": true, 19 | "noUnusedLocals": true, 20 | "noUnusedParameters": true, 21 | "noFallthroughCasesInSwitch": true, 22 | 23 | /* Path alias */ 24 | "baseUrl": ".", 25 | "paths": { 26 | "@/*": ["src/*"] 27 | } 28 | }, 29 | "include": ["src/**/*.ts", "src/**/*.d.ts", "src/**/*.tsx", "src/**/*.vue"], 30 | "references": [{ "path": "./tsconfig.node.json" }] 31 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/config/MyMetaObjectHandler.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.config; 2 | 3 | import com.baomidou.mybatisplus.core.handlers.MetaObjectHandler; 4 | import org.apache.ibatis.reflection.MetaObject; 5 | import org.springframework.stereotype.Component; 6 | 7 | import java.time.LocalDateTime; 8 | 9 | @Component 10 | public class MyMetaObjectHandler implements MetaObjectHandler { 11 | 12 | @Override 13 | public void insertFill(MetaObject metaObject) { 14 | this.strictInsertFill(metaObject, "createTime", LocalDateTime.class, LocalDateTime.now()); 15 | this.strictInsertFill(metaObject, "updateTime", LocalDateTime.class, LocalDateTime.now()); 16 | } 17 | 18 | @Override 19 | public void updateFill(MetaObject metaObject) { 20 | this.strictUpdateFill(metaObject, "updateTime", LocalDateTime.class, LocalDateTime.now()); 21 | } 22 | } -------------------------------------------------------------------------------- /frontend/src/utils/request.ts: -------------------------------------------------------------------------------- 1 | import axios from 'axios' 2 | import { ElMessage } from 'element-plus' 3 | 4 | // 定义API响应类型 5 | export interface ApiResponse { 6 | code: number 7 | message: string 8 | data: T 9 | } 10 | 11 | const request = axios.create({ 12 | baseURL: 'http://localhost:8888/api', 13 | timeout: 5000 14 | }) 15 | 16 | // 响应拦截器 17 | request.interceptors.response.use( 18 | response => { 19 | return response.data 20 | }, 21 | error => { 22 | if (error.response) { 23 | switch (error.response.status) { 24 | case 404: 25 | ElMessage.error('请求的资源不存在') 26 | break 27 | case 500: 28 | ElMessage.error('服务器错误') 29 | break 30 | default: 31 | ElMessage.error(error.response.data?.message || '请求失败') 32 | } 33 | } else { 34 | ElMessage.error('网络错误,请检查网络连接') 35 | } 36 | return Promise.reject(error) 37 | } 38 | ) 39 | 40 | export default request -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/entity/ExecutionRecord.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.entity; 2 | 3 | import com.baomidou.mybatisplus.annotation.*; 4 | import lombok.Data; 5 | 6 | import java.time.LocalDateTime; 7 | 8 | @Data 9 | @TableName("task_execution") 10 | public class ExecutionRecord { 11 | @TableId(type = IdType.AUTO) 12 | private Long id; 13 | 14 | private Long taskId; 15 | 16 | private LocalDateTime startTime; 17 | 18 | private LocalDateTime endTime; 19 | 20 | private String status; 21 | 22 | private Long totalCount; 23 | 24 | private Long successCount; 25 | 26 | private Long errorCount; 27 | 28 | private String errorMessage; 29 | 30 | @TableField(fill = FieldFill.INSERT) 31 | private LocalDateTime createTime; 32 | 33 | @TableField(fill = FieldFill.INSERT_UPDATE) 34 | private LocalDateTime updateTime; 35 | 36 | @TableLogic 37 | private Integer deleted; 38 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/util/JsonUtil.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.util; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import lombok.extern.slf4j.Slf4j; 6 | 7 | @Slf4j 8 | public class JsonUtil { 9 | 10 | private static final ObjectMapper objectMapper = new ObjectMapper(); 11 | 12 | public static String toJson(Object obj) { 13 | try { 14 | return objectMapper.writeValueAsString(obj); 15 | } catch (JsonProcessingException e) { 16 | log.error("JSON序列化失败", e); 17 | throw new RuntimeException("JSON序列化失败", e); 18 | } 19 | } 20 | 21 | public static T fromJson(String json, Class clazz) { 22 | try { 23 | return objectMapper.readValue(json, clazz); 24 | } catch (JsonProcessingException e) { 25 | log.error("JSON反序列化失败", e); 26 | throw new RuntimeException("JSON反序列化失败", e); 27 | } 28 | } 29 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/validator/impl/RequiredValidator.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.validator.impl; 2 | 3 | import com.datagenerator.validator.DataValidator; 4 | import com.datagenerator.validator.ValidationResult; 5 | import lombok.Data; 6 | 7 | import java.util.List; 8 | import java.util.Map; 9 | 10 | @Data 11 | public class RequiredValidator implements DataValidator { 12 | private List requiredFields; 13 | 14 | @Override 15 | public ValidationResult validate(Map data) { 16 | ValidationResult result = new ValidationResult(); 17 | 18 | if (requiredFields != null) { 19 | for (String field : requiredFields) { 20 | if (!data.containsKey(field) || data.get(field) == null) { 21 | result.addError("字段 " + field + " 不能为空"); 22 | } 23 | } 24 | } 25 | 26 | return result; 27 | } 28 | 29 | @Override 30 | public String getType() { 31 | return "required"; 32 | } 33 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/entity/SystemInfo.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.entity; 2 | 3 | import com.baomidou.mybatisplus.annotation.IdType; 4 | import com.baomidou.mybatisplus.annotation.TableId; 5 | import com.baomidou.mybatisplus.annotation.TableName; 6 | import lombok.Data; 7 | 8 | import java.time.LocalDateTime; 9 | 10 | @Data 11 | @TableName("system_info") 12 | public class SystemInfo { 13 | 14 | @TableId(type = IdType.AUTO) 15 | private Long id; 16 | 17 | /** 18 | * CPU使用率 19 | */ 20 | private Double cpuUsage; 21 | 22 | /** 23 | * 内存使用率 24 | */ 25 | private Double memoryUsage; 26 | 27 | /** 28 | * 磁盘使用率 29 | */ 30 | private Double diskUsage; 31 | 32 | /** 33 | * JVM堆内存使用率 34 | */ 35 | private Double jvmHeapUsage; 36 | 37 | /** 38 | * JVM非堆内存使用率 39 | */ 40 | private Double jvmNonHeapUsage; 41 | 42 | /** 43 | * 系统运行时间(毫秒) 44 | */ 45 | private Long uptime; 46 | 47 | /** 48 | * 创建时间 49 | */ 50 | private LocalDateTime createTime; 51 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/DataGeneratorFactory.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator; 2 | 3 | import com.datagenerator.generator.impl.TemplateDataGenerator; 4 | import com.datagenerator.generator.rule.DataRuleFactory; 5 | import org.springframework.stereotype.Component; 6 | 7 | @Component 8 | public class DataGeneratorFactory { 9 | 10 | private final DataRuleFactory ruleFactory; 11 | 12 | public DataGeneratorFactory(DataRuleFactory ruleFactory) { 13 | this.ruleFactory = ruleFactory; 14 | } 15 | 16 | public DataGenerator createGenerator(String dataFormat) { 17 | if (dataFormat == null || dataFormat.trim().isEmpty()) { 18 | throw new IllegalArgumentException("数据格式不能为空"); 19 | } 20 | 21 | switch (dataFormat.toUpperCase()) { 22 | case "JSON": 23 | case "AVRO": 24 | case "PROTOBUF": 25 | return new TemplateDataGenerator(ruleFactory); 26 | default: 27 | throw new IllegalArgumentException("不支持的数据格式: " + dataFormat); 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/config/CorsConfig.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.config; 2 | 3 | import org.springframework.context.annotation.Bean; 4 | import org.springframework.context.annotation.Configuration; 5 | import org.springframework.web.cors.CorsConfiguration; 6 | import org.springframework.web.cors.UrlBasedCorsConfigurationSource; 7 | import org.springframework.web.filter.CorsFilter; 8 | 9 | @Configuration 10 | public class CorsConfig { 11 | 12 | @Bean 13 | public CorsFilter corsFilter() { 14 | // 创建CORS配置 15 | CorsConfiguration config = new CorsConfiguration(); 16 | // 允许所有域名 17 | config.addAllowedOriginPattern("*"); 18 | // 允许所有请求头 19 | config.addAllowedHeader("*"); 20 | // 允许所有方法 21 | config.addAllowedMethod("*"); 22 | // 允许携带认证信息 23 | config.setAllowCredentials(true); 24 | 25 | // 创建CORS配置源 26 | UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource(); 27 | source.registerCorsConfiguration("/**", config); 28 | 29 | // 创建CORS过滤器 30 | return new CorsFilter(source); 31 | } 32 | } -------------------------------------------------------------------------------- /frontend/src/router/index.ts: -------------------------------------------------------------------------------- 1 | import { createRouter, createWebHistory } from 'vue-router' 2 | import DefaultLayout from '@/layouts/DefaultLayout.vue' 3 | 4 | const router = createRouter({ 5 | history: createWebHistory(), 6 | routes: [ 7 | { 8 | path: '/', 9 | component: DefaultLayout, 10 | children: [ 11 | { 12 | path: '', 13 | name: 'Home', 14 | component: () => import('@/views/Home.vue'), 15 | meta: { title: '首页' } 16 | }, 17 | { 18 | path: 'data-sources', 19 | name: 'DataSources', 20 | component: () => import('@/views/DataSources.vue'), 21 | meta: { title: '数据源管理' } 22 | }, 23 | { 24 | path: 'tasks', 25 | name: 'Tasks', 26 | component: () => import('@/views/Tasks.vue'), 27 | meta: { title: '任务管理' } 28 | }, 29 | { 30 | path: 'executions', 31 | name: 'Executions', 32 | component: () => import('@/views/Executions.vue'), 33 | meta: { title: '执行记录' } 34 | } 35 | ] 36 | } 37 | ] 38 | }) 39 | 40 | export default router -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/rule/impl/ReferenceRule.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator.rule.impl; 2 | 3 | import com.datagenerator.generator.rule.DataRule; 4 | import lombok.Data; 5 | 6 | import java.util.List; 7 | import java.util.Map; 8 | 9 | @Data 10 | public class ReferenceRule implements DataRule { 11 | private String field; 12 | private String table; 13 | private String condition; 14 | private boolean random = true; 15 | private int current = 0; 16 | private List> values; 17 | 18 | @Override 19 | public Object generate() { 20 | if (values == null || values.isEmpty()) { 21 | return null; 22 | } 23 | if (random) { 24 | return values.get((int) (Math.random() * values.size())).get(field); 25 | } else { 26 | Object value = values.get(current).get(field); 27 | current = (current + 1) % values.size(); 28 | return value; 29 | } 30 | } 31 | 32 | @Override 33 | public String getType() { 34 | return "reference"; 35 | } 36 | 37 | @Override 38 | public Object getParams() { 39 | return this; 40 | } 41 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/service/DataSourceService.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.service; 2 | 3 | import com.baomidou.mybatisplus.extension.service.IService; 4 | import com.datagenerator.entity.DataSource; 5 | import java.sql.SQLException; 6 | import java.util.List; 7 | import java.util.Map; 8 | 9 | public interface DataSourceService extends IService { 10 | /** 11 | * 测试数据源连接 12 | */ 13 | void testConnection(DataSource dataSource) throws SQLException; 14 | 15 | /** 16 | * 获取数据源下的所有表 17 | */ 18 | List getTables(Long dataSourceId); 19 | 20 | /** 21 | * 获取数据源下的所有Kafka主题 22 | */ 23 | List getTopics(Long dataSourceId); 24 | 25 | /** 26 | * 获取表结构 27 | * @param dataSourceId 数据源ID 28 | * @param tableName 表名 29 | * @return 表结构信息列表,每个元素包含字段名、类型和注释 30 | */ 31 | List> getTableColumns(Long dataSourceId, String tableName); 32 | 33 | /** 34 | * 获取表之间的依赖关系 35 | * @param dataSourceId 数据源ID 36 | * @param tables 表名数组 37 | * @return 表依赖关系,key为表名,value为该表依赖的表列表 38 | */ 39 | Map> getTableDependencies(Long dataSourceId, String[] tables); 40 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/task/SystemMonitorTask.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.task; 2 | 3 | import com.datagenerator.entity.SystemInfo; 4 | import com.datagenerator.service.SystemInfoService; 5 | import lombok.extern.slf4j.Slf4j; 6 | import org.springframework.scheduling.annotation.Scheduled; 7 | import org.springframework.stereotype.Component; 8 | 9 | import javax.annotation.Resource; 10 | 11 | @Slf4j 12 | @Component 13 | public class SystemMonitorTask { 14 | 15 | @Resource 16 | private SystemInfoService systemInfoService; 17 | 18 | /** 19 | * 每分钟执行一次系统信息收集 20 | */ 21 | @Scheduled(cron = "0 * * * * ?") 22 | public void collectSystemInfo() { 23 | try { 24 | log.info("开始收集系统信息..."); 25 | SystemInfo systemInfo = systemInfoService.getSystemInfo(); 26 | systemInfoService.saveSystemInfo(systemInfo); 27 | log.info("系统信息收集完成: CPU使用率={}%, 内存使用率={}%, JVM堆内存使用率={}%", 28 | systemInfo.getCpuUsage(), 29 | systemInfo.getMemoryUsage(), 30 | systemInfo.getJvmHeapUsage()); 31 | } catch (Exception e) { 32 | log.error("系统信息收集失败", e); 33 | } 34 | } 35 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/common/Result.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.common; 2 | 3 | import lombok.Data; 4 | 5 | @Data 6 | public class Result { 7 | private Integer code; 8 | private String message; 9 | private T data; 10 | 11 | private Result(Integer code, String message, T data) { 12 | this.code = code; 13 | this.message = message; 14 | this.data = data; 15 | } 16 | 17 | public static Result success() { 18 | return new Result<>(200, "操作成功", null); 19 | } 20 | 21 | public static Result success(T data) { 22 | return new Result<>(200, "操作成功", data); 23 | } 24 | 25 | public static Result success(String message, T data) { 26 | return new Result<>(200, message, data); 27 | } 28 | 29 | public static Result error(String message) { 30 | return new Result<>(500, message, null); 31 | } 32 | 33 | public static Result error(Integer code, String message) { 34 | return new Result<>(code, message, null); 35 | } 36 | 37 | public static Result error(Integer code, String message, T data) { 38 | return new Result<>(code, message, data); 39 | } 40 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/rule/impl/RandomRule.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator.rule.impl; 2 | 3 | import com.datagenerator.generator.rule.DataRule; 4 | import com.fasterxml.jackson.annotation.JsonAlias; 5 | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; 6 | import lombok.Data; 7 | 8 | @Data 9 | @JsonIgnoreProperties(ignoreUnknown = true) 10 | public class RandomRule implements DataRule { 11 | private double min = 0; 12 | private double max = 100; 13 | @JsonAlias("isInteger") 14 | private boolean integer = false; 15 | private boolean nullable = true; 16 | private Number defaultValue; 17 | 18 | @Override 19 | public Object generate() { 20 | if (defaultValue != null) { 21 | return defaultValue; 22 | } 23 | 24 | if (nullable && Math.random() < 0.1) { // 10%的概率生成null 25 | return null; 26 | } 27 | double value = min + Math.random() * (max - min); 28 | return integer ? (long) value : value; 29 | } 30 | 31 | @Override 32 | public String getType() { 33 | return "random"; 34 | } 35 | 36 | @Override 37 | public Object getParams() { 38 | return this; 39 | } 40 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/entity/DataSource.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.entity; 2 | 3 | import com.baomidou.mybatisplus.annotation.*; 4 | import lombok.Data; 5 | import java.time.LocalDateTime; 6 | 7 | @Data 8 | @TableName("data_source") 9 | public class DataSource { 10 | @TableId(type = IdType.AUTO) 11 | private Long id; 12 | 13 | private String name; 14 | private String type; // MYSQL, ORACLE, KAFKA等 15 | private String url; 16 | private String username; 17 | private String password; 18 | private String driverClassName; 19 | private String description; 20 | 21 | @TableField(fill = FieldFill.INSERT) 22 | private LocalDateTime createTime; 23 | 24 | @TableField(fill = FieldFill.INSERT_UPDATE) 25 | private LocalDateTime updateTime; 26 | 27 | @TableLogic 28 | private Integer deleted; 29 | 30 | // 手动添加getter方法,以防Lombok注解未被正确处理 31 | public String getUrl() { 32 | return url; 33 | } 34 | 35 | public String getUsername() { 36 | return username; 37 | } 38 | 39 | public String getPassword() { 40 | return password; 41 | } 42 | 43 | public String getName() { 44 | return name; 45 | } 46 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/validator/impl/RegexValidator.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.validator.impl; 2 | 3 | import com.datagenerator.validator.DataValidator; 4 | import com.datagenerator.validator.ValidationResult; 5 | import lombok.Data; 6 | 7 | import java.util.Map; 8 | 9 | @Data 10 | public class RegexValidator implements DataValidator { 11 | private String field; 12 | private String pattern; 13 | private String message; 14 | 15 | @Override 16 | public ValidationResult validate(Map data) { 17 | ValidationResult result = new ValidationResult(); 18 | 19 | if (field == null || !data.containsKey(field)) { 20 | return result; 21 | } 22 | 23 | Object value = data.get(field); 24 | if (!(value instanceof String)) { 25 | result.addError("字段 " + field + " 必须是字符串类型"); 26 | return result; 27 | } 28 | 29 | String str = (String) value; 30 | if (!str.matches(pattern)) { 31 | result.addError(message != null ? message : "字段 " + field + " 格式不正确"); 32 | } 33 | 34 | return result; 35 | } 36 | 37 | @Override 38 | public String getType() { 39 | return "regex"; 40 | } 41 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/controller/SystemInfoController.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.controller; 2 | 3 | import com.datagenerator.annotation.PerformanceMonitor; 4 | import com.datagenerator.common.Result; 5 | import com.datagenerator.entity.SystemInfo; 6 | import com.datagenerator.service.SystemInfoService; 7 | import io.swagger.v3.oas.annotations.Operation; 8 | import io.swagger.v3.oas.annotations.tags.Tag; 9 | import org.springframework.web.bind.annotation.GetMapping; 10 | import org.springframework.web.bind.annotation.RequestMapping; 11 | import org.springframework.web.bind.annotation.RestController; 12 | 13 | import javax.annotation.Resource; 14 | 15 | @Tag(name = "系统监控", description = "系统监控相关接口") 16 | @RestController 17 | @RequestMapping("/system") 18 | public class SystemInfoController { 19 | 20 | @Resource 21 | private SystemInfoService systemInfoService; 22 | 23 | @Operation(summary = "获取系统信息", description = "获取系统CPU、内存、磁盘等使用情况") 24 | @PerformanceMonitor(name = "获取系统信息", logParams = false, logResult = true) 25 | @GetMapping("/info") 26 | public Result getSystemInfo() { 27 | SystemInfo systemInfo = systemInfoService.getSystemInfo(); 28 | systemInfoService.saveSystemInfo(systemInfo); 29 | return Result.success(systemInfo); 30 | } 31 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/config/SwaggerConfig.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.config; 2 | 3 | import io.swagger.v3.oas.models.OpenAPI; 4 | import io.swagger.v3.oas.models.info.Info; 5 | import io.swagger.v3.oas.models.info.License; 6 | import io.swagger.v3.oas.models.Components; 7 | import io.swagger.v3.oas.models.security.SecurityScheme; 8 | import org.springframework.context.annotation.Bean; 9 | import org.springframework.context.annotation.Configuration; 10 | 11 | @Configuration 12 | public class SwaggerConfig { 13 | 14 | @Bean 15 | public OpenAPI openAPI() { 16 | return new OpenAPI() 17 | .info(new Info() 18 | .title("数据生成器API文档") 19 | .description("数据生成器后端接口文档") 20 | .version("1.0.0") 21 | .license(new License() 22 | .name("Apache 2.0") 23 | .url("http://www.apache.org/licenses/LICENSE-2.0.html"))) 24 | .components(new Components() 25 | .addSecuritySchemes("bearer-jwt", new SecurityScheme() 26 | .type(SecurityScheme.Type.HTTP) 27 | .scheme("bearer") 28 | .bearerFormat("JWT"))); 29 | } 30 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/rule/impl/EnumRule.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator.rule.impl; 2 | 3 | import com.datagenerator.generator.rule.DataRule; 4 | import lombok.Data; 5 | 6 | import java.util.List; 7 | 8 | @Data 9 | public class EnumRule implements DataRule { 10 | private List values; 11 | private boolean random = true; 12 | private int current = 0; 13 | private Object defaultValue; 14 | private boolean nullable = true; 15 | 16 | @Override 17 | public Object generate() { 18 | if (defaultValue != null) { 19 | return defaultValue; 20 | } 21 | 22 | if (nullable && Math.random() < 0.1) { // 10%的概率生成null 23 | return null; 24 | } 25 | 26 | if (values == null || values.isEmpty()) { 27 | return null; 28 | } 29 | if (random) { 30 | return values.get((int) (Math.random() * values.size())); 31 | } else { 32 | Object value = values.get(current); 33 | current = (current + 1) % values.size(); 34 | return value; 35 | } 36 | } 37 | 38 | @Override 39 | public String getType() { 40 | return "enum"; 41 | } 42 | 43 | @Override 44 | public Object getParams() { 45 | return this; 46 | } 47 | } -------------------------------------------------------------------------------- /frontend/public/favicon.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | DG Favicon 5 | 6 | 7 | 13 | 14 | -------------------------------------------------------------------------------- /deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 输出颜色设置 4 | RED='\033[0;31m' 5 | GREEN='\033[0;32m' 6 | YELLOW='\033[1;33m' 7 | NC='\033[0m' 8 | 9 | # 打印带颜色的信息 10 | print_info() { 11 | echo -e "${GREEN}[INFO] $1${NC}" 12 | } 13 | 14 | print_warning() { 15 | echo -e "${YELLOW}[WARNING] $1${NC}" 16 | } 17 | 18 | print_error() { 19 | echo -e "${RED}[ERROR] $1${NC}" 20 | } 21 | 22 | # 检查Docker是否安装 23 | if ! command -v docker &> /dev/null; then 24 | print_error "Docker未安装,请先安装Docker" 25 | exit 1 26 | fi 27 | 28 | # 检查Docker Compose是否安装 29 | if ! command -v docker-compose &> /dev/null; then 30 | print_error "Docker Compose未安装,请先安装Docker Compose" 31 | exit 1 32 | fi 33 | 34 | # 停止并删除现有容器 35 | print_info "停止并删除现有容器..." 36 | docker-compose down 37 | 38 | # 删除旧的镜像 39 | print_info "删除旧的镜像..." 40 | docker rmi data-generator-frontend data-generator-backend 2>/dev/null || true 41 | 42 | # 构建新的镜像 43 | print_info "开始构建镜像..." 44 | docker-compose build --no-cache 45 | 46 | # 启动服务 47 | print_info "启动服务..." 48 | docker-compose up -d 49 | 50 | # 检查服务状态 51 | print_info "检查服务状态..." 52 | sleep 10 53 | 54 | if docker-compose ps | grep -q "Up"; then 55 | print_info "服务已成功启动!" 56 | echo -e "${GREEN}访问地址:${NC}" 57 | echo -e " 前端: ${GREEN}http://localhost${NC}" 58 | echo -e " 后端: ${GREEN}http://localhost:8080${NC}" 59 | echo -e " 数据库: ${GREEN}localhost:3306${NC}" 60 | else 61 | print_error "服务启动失败,请检查日志:" 62 | docker-compose logs 63 | fi -------------------------------------------------------------------------------- /frontend/public/favicon-instructions.txt: -------------------------------------------------------------------------------- 1 | 请按照以下步骤创建favicon.ico文件: 2 | 3 | 1. 复制以下base64编码: 4 | iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAC4klEQVRYR8WXS2gTURSGv5lJmqTRNqGtVVFBBUVciAsRXIgbQVwIIoqCuHLlRsGFCxeuXLhQEMS9G8GFCxcuBBERRPBRqYIvtGprH2maNslkRu6dSZqZZJJMWuuBEObOvfd8/zn3P+fegVJKscbNWOP4rAOsWQSUUhhjCCHw+XwIIVZ9YFZdAiklxhhc18XzPKSUfwdAKYUQAsdxiEajNDU1MTQ0xOTkJMFgECHE6kZAKYXf7yeRSNDe3k5rayuO4zA+Ps7MzAyu6+I4DkKIlQeQUuL3+4nFYnR0dNDT00MkEsF1XTKZDJlMBs/zVh5ASolSimg0SldXF/v27aO5uRnP8/A8j3Q6TTqdLgAJIVYGQEpJIBAgHo/T3d3N/v37aWhoQGuN1ppUKsXs7CxCCKSUKwMgpcTv9xOPx+np6eHAgQM0NjYWAFKpFHNzc0gp8fl8KwOgtSYQCNDQ0MDu3bs5dOgQsVisoIFUKkU2my1oYEUApJQEg0Gampo4cOAAhw8fJhwOo7VmcXGRubk5HMcpaMDzPIQQvwcgpSQUCtHc3MyRI0c4evQooVAIrTWLi4vMz8/jOE5BA0qpFQAoTUFPTw/Hjh0jGAyitWZhYYH5+fmCiL0UrAiA4zg0NzfT29vL8ePHCQQCaK2Zn59nYWEBx3EKIi4HWBaA1hqtNZFIhL1793Ly5EmCwSBaa+bm5lhcXCxIQTnAsgCUUgQCAXbs2MGpU6cIh8MYY5idnS1kQT4FpQDLAlBK4ff72bp1K2fOnCEajWKMYWZmhsXFxbIaWBaA1hqfz8eWLVs4e/YsTU1NGGOYnp4mm82WZWBZAMYYfD4fW7Zs4dy5c8TjcYwxTE1NlRVxWQBjDFJKtm3bxvnz52loaMAYw+TkZFkNLAvAcRw2bdrEhQsXiMViGGOYmJgom4JlAWitSSQSXLx4kXg8jjGG8fHxshpYFoDWmo0bN3Lp0iUSiQTGGMbGxsqK+D9/TPJt3QGrHoFvQFNt9BIbIiIAAAAASUVORK5CYII= 5 | 6 | 2. 访问以下任一在线工具: 7 | - https://base64.guru/converter/decode/image 8 | - https://codebeautify.org/base64-to-image-converter 9 | - https://www.base64decode.org/ 10 | 11 | 3. 将base64编码粘贴到工具中,转换为图像 12 | 13 | 4. 下载生成的图像,并将其保存为favicon.ico 14 | 15 | 5. 将favicon.ico文件放在frontend/public/目录下 16 | 17 | 6. 重启前端服务以应用更改 -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/config/KafkaConfig.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.config; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.common.serialization.StringSerializer; 5 | import org.springframework.beans.factory.annotation.Value; 6 | import org.springframework.context.annotation.Bean; 7 | import org.springframework.context.annotation.Configuration; 8 | import org.springframework.kafka.core.DefaultKafkaProducerFactory; 9 | import org.springframework.kafka.core.KafkaTemplate; 10 | import org.springframework.kafka.core.ProducerFactory; 11 | 12 | import java.util.HashMap; 13 | import java.util.Map; 14 | 15 | @Configuration 16 | public class KafkaConfig { 17 | 18 | @Value("${spring.kafka.bootstrap-servers}") 19 | private String bootstrapServers; 20 | 21 | @Bean 22 | public ProducerFactory producerFactory() { 23 | Map configProps = new HashMap<>(); 24 | configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); 25 | configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 26 | configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 27 | return new DefaultKafkaProducerFactory<>(configProps); 28 | } 29 | 30 | @Bean 31 | public KafkaTemplate kafkaTemplate() { 32 | return new KafkaTemplate<>(producerFactory()); 33 | } 34 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/entity/DataTask.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.entity; 2 | 3 | import com.baomidou.mybatisplus.annotation.*; 4 | import lombok.Data; 5 | import java.time.LocalDateTime; 6 | 7 | @Data 8 | @TableName("data_task") 9 | public class DataTask { 10 | @TableId(type = IdType.AUTO) 11 | private Long id; 12 | 13 | private String name; 14 | private Long dataSourceId; 15 | private String targetType; // TABLE, TOPIC 16 | private String targetName; // 表名或主题名 17 | private String writeMode; // OVERWRITE, APPEND, UPDATE 18 | private String dataFormat; // JSON, AVRO, PROTOBUF 19 | private String template; // 数据生成模板 20 | private Integer batchSize; 21 | private Integer frequency; // 生成频率(秒) 22 | private Integer concurrentNum; 23 | private String status; // RUNNING, STOPPED, COMPLETED, FAILED 24 | private String cronExpression; // 定时任务表达式 25 | 26 | @TableField(exist = false) 27 | private Long maxId; // 用于记录当前最大ID 28 | 29 | @TableField(fill = FieldFill.INSERT) 30 | private LocalDateTime createTime; 31 | 32 | @TableField(fill = FieldFill.INSERT_UPDATE) 33 | private LocalDateTime updateTime; 34 | 35 | @TableLogic 36 | private Integer deleted; 37 | 38 | // 手动添加getter方法,以防Lombok注解未被正确处理 39 | public String getTemplate() { 40 | return template; 41 | } 42 | 43 | public String getWriteMode() { 44 | return writeMode; 45 | } 46 | 47 | public String getName() { 48 | return name; 49 | } 50 | } -------------------------------------------------------------------------------- /backend/src/main/resources/logback-spring.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | ${LOG_PATTERN} 10 | 11 | 12 | 13 | 14 | 15 | ${LOG_PATH}/data-generator.log 16 | 17 | ${LOG_PATH}/data-generator.%d{yyyy-MM-dd}.log 18 | 30 19 | 20 | 21 | ${LOG_PATTERN} 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/annotation/ApiOperationAspect.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.annotation; 2 | 3 | import io.swagger.v3.oas.annotations.Operation; 4 | import org.aspectj.lang.ProceedingJoinPoint; 5 | import org.aspectj.lang.annotation.Around; 6 | import org.aspectj.lang.annotation.Aspect; 7 | import org.aspectj.lang.reflect.MethodSignature; 8 | import org.springframework.stereotype.Component; 9 | 10 | import java.lang.reflect.Method; 11 | 12 | @Aspect 13 | @Component 14 | public class ApiOperationAspect { 15 | 16 | @Around("@annotation(com.datagenerator.annotation.ApiOperation)") 17 | public Object around(ProceedingJoinPoint point) throws Throwable { 18 | MethodSignature signature = (MethodSignature) point.getSignature(); 19 | Method method = signature.getMethod(); 20 | ApiOperation apiOperation = method.getAnnotation(ApiOperation.class); 21 | 22 | // 添加Swagger注解 23 | Operation operation = method.getAnnotation(Operation.class); 24 | if (operation == null) { 25 | operation = method.getDeclaringClass().getAnnotation(Operation.class); 26 | } 27 | 28 | if (operation == null) { 29 | operation = method.getDeclaringClass().getMethod(method.getName(), method.getParameterTypes()) 30 | .getAnnotation(Operation.class); 31 | } 32 | 33 | if (operation == null) { 34 | operation = method.getDeclaringClass().getMethod(method.getName(), method.getParameterTypes()) 35 | .getDeclaringClass().getAnnotation(Operation.class); 36 | } 37 | 38 | return point.proceed(); 39 | } 40 | } -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | 3 | services: 4 | # MySQL服务 5 | mysql: 6 | image: mysql:8.0 7 | container_name: data-generator-mysql 8 | environment: 9 | MYSQL_ROOT_PASSWORD: root 10 | MYSQL_DATABASE: data_generator 11 | MYSQL_USER: data_generator 12 | MYSQL_PASSWORD: data_generator 13 | volumes: 14 | - mysql_data:/var/lib/mysql 15 | ports: 16 | - "3306:3306" 17 | networks: 18 | - data-generator-network 19 | healthcheck: 20 | test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] 21 | interval: 10s 22 | timeout: 5s 23 | retries: 5 24 | 25 | # Kafka服务 26 | zookeeper: 27 | image: wurstmeister/zookeeper 28 | container_name: data-generator-zookeeper 29 | ports: 30 | - "2181:2181" 31 | 32 | kafka: 33 | image: wurstmeister/kafka 34 | container_name: data-generator-kafka 35 | ports: 36 | - "9092:9092" 37 | environment: 38 | KAFKA_ADVERTISED_HOST_NAME: kafka 39 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 40 | KAFKA_CREATE_TOPICS: "test:1:1" 41 | volumes: 42 | - /var/run/docker.sock:/var/run/docker.sock 43 | depends_on: 44 | - zookeeper 45 | 46 | # 后端服务 47 | backend: 48 | build: 49 | context: ./backend 50 | dockerfile: Dockerfile 51 | container_name: data-generator-backend 52 | environment: 53 | SPRING_DATASOURCE_URL: jdbc:mysql://mysql:3306/data_generator?useSSL=false&allowPublicKeyRetrieval=true 54 | SPRING_DATASOURCE_USERNAME: data_generator 55 | SPRING_DATASOURCE_PASSWORD: data_generator 56 | ports: 57 | - "8080:8080" 58 | depends_on: 59 | mysql: 60 | condition: service_healthy 61 | networks: 62 | - data-generator-network 63 | 64 | # 前端服务 65 | frontend: 66 | build: 67 | context: ./frontend 68 | dockerfile: Dockerfile 69 | container_name: data-generator-frontend 70 | ports: 71 | - "80:80" 72 | depends_on: 73 | - backend 74 | networks: 75 | - data-generator-network 76 | 77 | networks: 78 | data-generator-network: 79 | driver: bridge 80 | 81 | volumes: 82 | mysql_data: -------------------------------------------------------------------------------- /scripts/package/package.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 设置版本号 4 | VERSION="1.0.0" 5 | PACKAGE_NAME="data-generator-${VERSION}-bin" 6 | 7 | echo "开始打包数据生成器应用 v${VERSION}..." 8 | 9 | # 创建临时目录 10 | mkdir -p ${PACKAGE_NAME} 11 | 12 | # 复制必要的文件 13 | echo "复制项目文件..." 14 | cp -r backend ${PACKAGE_NAME}/ 15 | cp -r frontend ${PACKAGE_NAME}/ 16 | cp docker-compose.yml ${PACKAGE_NAME}/ 17 | cp start.sh ${PACKAGE_NAME}/ 18 | cp stop.sh ${PACKAGE_NAME}/ 19 | cp start.bat ${PACKAGE_NAME}/ 20 | cp stop.bat ${PACKAGE_NAME}/ 21 | cp README.md ${PACKAGE_NAME}/ 22 | 23 | # 创建bin目录 24 | mkdir -p ${PACKAGE_NAME}/bin 25 | 26 | # 创建启动脚本 27 | cat > ${PACKAGE_NAME}/bin/startup.sh << 'EOF' 28 | #!/bin/bash 29 | 30 | # 获取脚本所在目录的绝对路径 31 | SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd) 32 | BASE_DIR=$(dirname "$SCRIPT_DIR") 33 | 34 | # 切换到项目根目录 35 | cd "$BASE_DIR" 36 | 37 | # 执行启动脚本 38 | bash start.sh 39 | EOF 40 | 41 | # 创建停止脚本 42 | cat > ${PACKAGE_NAME}/bin/shutdown.sh << 'EOF' 43 | #!/bin/bash 44 | 45 | # 获取脚本所在目录的绝对路径 46 | SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd) 47 | BASE_DIR=$(dirname "$SCRIPT_DIR") 48 | 49 | # 切换到项目根目录 50 | cd "$BASE_DIR" 51 | 52 | # 执行停止脚本 53 | bash stop.sh 54 | EOF 55 | 56 | # 创建Windows启动脚本 57 | cat > ${PACKAGE_NAME}/bin/startup.bat << 'EOF' 58 | @echo off 59 | rem 获取脚本所在目录 60 | set "SCRIPT_DIR=%~dp0" 61 | set "BASE_DIR=%SCRIPT_DIR%.." 62 | 63 | rem 切换到项目根目录 64 | cd /d "%BASE_DIR%" 65 | 66 | rem 执行启动脚本 67 | call start.bat 68 | EOF 69 | 70 | # 创建Windows停止脚本 71 | cat > ${PACKAGE_NAME}/bin/shutdown.bat << 'EOF' 72 | @echo off 73 | rem 获取脚本所在目录 74 | set "SCRIPT_DIR=%~dp0" 75 | set "BASE_DIR=%SCRIPT_DIR%.." 76 | 77 | rem 切换到项目根目录 78 | cd /d "%BASE_DIR%" 79 | 80 | rem 执行停止脚本 81 | call stop.bat 82 | EOF 83 | 84 | # 添加执行权限 85 | chmod +x ${PACKAGE_NAME}/start.sh 86 | chmod +x ${PACKAGE_NAME}/stop.sh 87 | chmod +x ${PACKAGE_NAME}/bin/startup.sh 88 | chmod +x ${PACKAGE_NAME}/bin/shutdown.sh 89 | 90 | # 创建压缩包 91 | echo "创建压缩包..." 92 | tar -czf ${PACKAGE_NAME}.tar.gz ${PACKAGE_NAME} 93 | 94 | # 清理临时目录 95 | echo "清理临时文件..." 96 | rm -rf ${PACKAGE_NAME} 97 | 98 | echo "打包完成: ${PACKAGE_NAME}.tar.gz" 99 | echo "解压后,可以使用以下命令启动应用:" 100 | echo "Linux/Mac: ./bin/startup.sh" 101 | echo "Windows: bin\\startup.bat" -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/aspect/PerformanceMonitorAspect.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.aspect; 2 | 3 | import com.datagenerator.annotation.PerformanceMonitor; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import lombok.extern.slf4j.Slf4j; 6 | import org.aspectj.lang.ProceedingJoinPoint; 7 | import org.aspectj.lang.annotation.Around; 8 | import org.aspectj.lang.annotation.Aspect; 9 | import org.aspectj.lang.reflect.MethodSignature; 10 | import org.springframework.stereotype.Component; 11 | 12 | import javax.annotation.Resource; 13 | import java.lang.reflect.Method; 14 | 15 | @Slf4j 16 | @Aspect 17 | @Component 18 | public class PerformanceMonitorAspect { 19 | 20 | @Resource 21 | private ObjectMapper objectMapper; 22 | 23 | @Around("@annotation(com.datagenerator.annotation.PerformanceMonitor)") 24 | public Object around(ProceedingJoinPoint point) throws Throwable { 25 | // 获取方法签名 26 | MethodSignature signature = (MethodSignature) point.getSignature(); 27 | Method method = signature.getMethod(); 28 | PerformanceMonitor monitor = method.getAnnotation(PerformanceMonitor.class); 29 | 30 | // 获取接口名称 31 | String name = monitor.name(); 32 | if (name.isEmpty()) { 33 | name = method.getDeclaringClass().getSimpleName() + "." + method.getName(); 34 | } 35 | 36 | // 记录开始时间 37 | long startTime = System.currentTimeMillis(); 38 | 39 | try { 40 | // 记录请求参数 41 | if (monitor.logParams()) { 42 | log.info("接口[{}]请求参数:{}", name, objectMapper.writeValueAsString(point.getArgs())); 43 | } 44 | 45 | // 执行方法 46 | Object result = point.proceed(); 47 | 48 | // 记录执行时间 49 | if (monitor.logTime()) { 50 | long endTime = System.currentTimeMillis(); 51 | log.info("接口[{}]执行时间:{}ms", name, endTime - startTime); 52 | } 53 | 54 | // 记录返回结果 55 | if (monitor.logResult()) { 56 | log.info("接口[{}]返回结果:{}", name, objectMapper.writeValueAsString(result)); 57 | } 58 | 59 | return result; 60 | } catch (Throwable e) { 61 | // 记录异常信息 62 | if (monitor.logException()) { 63 | log.error("接口[{}]发生异常:", name, e); 64 | } 65 | throw e; 66 | } 67 | } 68 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/rule/DataRuleFactory.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator.rule; 2 | 3 | import com.datagenerator.generator.rule.impl.*; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.DeserializationFeature; 6 | import lombok.extern.slf4j.Slf4j; 7 | import org.springframework.stereotype.Component; 8 | 9 | @Slf4j 10 | @Component 11 | public class DataRuleFactory { 12 | 13 | private final ObjectMapper objectMapper; 14 | 15 | public DataRuleFactory() { 16 | this.objectMapper = new ObjectMapper() 17 | .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) 18 | .configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, true); 19 | } 20 | 21 | public DataRule createRule(String type, Object params) { 22 | try { 23 | log.info("创建规则,类型: {}, 参数: {}", type, params); 24 | 25 | DataRule rule; 26 | switch (type.toLowerCase()) { 27 | case "sequence": 28 | rule = objectMapper.convertValue(params, SequenceRule.class); 29 | break; 30 | case "random": 31 | rule = objectMapper.convertValue(params, RandomRule.class); 32 | break; 33 | case "enum": 34 | rule = objectMapper.convertValue(params, EnumRule.class); 35 | break; 36 | case "date": 37 | rule = objectMapper.convertValue(params, DateRule.class); 38 | break; 39 | case "string": 40 | rule = objectMapper.convertValue(params, StringRule.class); 41 | break; 42 | case "reference": 43 | rule = objectMapper.convertValue(params, ReferenceRule.class); 44 | break; 45 | case "fixed": 46 | rule = objectMapper.convertValue(params, FixedRule.class); 47 | break; 48 | default: 49 | log.error("不支持的数据生成规则类型: {}", type); 50 | throw new IllegalArgumentException("不支持的数据生成规则类型: " + type); 51 | } 52 | 53 | log.info("规则创建成功: {}", rule); 54 | return rule; 55 | } catch (Exception e) { 56 | log.error("创建数据生成规则失败, 类型: {}, 参数: {}, 错误: {}", type, params, e.getMessage(), e); 57 | throw new RuntimeException("创建数据生成规则失败: " + e.getMessage(), e); 58 | } 59 | } 60 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/service/impl/ExecutionRecordServiceImpl.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.service.impl; 2 | 3 | import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; 4 | import com.datagenerator.entity.ExecutionRecord; 5 | import com.datagenerator.mapper.ExecutionRecordMapper; 6 | import com.datagenerator.service.ExecutionRecordService; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.springframework.stereotype.Service; 9 | 10 | import java.time.LocalDateTime; 11 | 12 | @Slf4j 13 | @Service 14 | public class ExecutionRecordServiceImpl extends ServiceImpl implements ExecutionRecordService { 15 | 16 | @Override 17 | public ExecutionRecord createRecord(Long taskId) { 18 | ExecutionRecord record = new ExecutionRecord(); 19 | record.setTaskId(taskId); 20 | record.setStartTime(LocalDateTime.now()); 21 | record.setStatus("RUNNING"); 22 | record.setTotalCount(0L); 23 | record.setSuccessCount(0L); 24 | record.setErrorCount(0L); 25 | save(record); 26 | return record; 27 | } 28 | 29 | @Override 30 | public boolean updateStatus(Long recordId, Integer status, String errorMessage, Long recordsCount) { 31 | ExecutionRecord record = getById(recordId); 32 | if (record == null) { 33 | throw new RuntimeException("执行记录不存在"); 34 | } 35 | 36 | // 根据状态码设置状态文本 37 | if (status == 1) { 38 | record.setStatus("SUCCESS"); // 成功 39 | } else if (status == 0) { 40 | record.setStatus("FAILED"); // 失败 41 | } else if (status == 2) { 42 | record.setStatus("STOPPED"); // 停止 43 | } else { 44 | record.setStatus("COMPLETED"); // 默认完成 45 | } 46 | 47 | record.setErrorMessage(errorMessage); 48 | record.setTotalCount(recordsCount); 49 | 50 | // 根据状态设置成功和失败记录数 51 | if ("SUCCESS".equals(record.getStatus()) || "COMPLETED".equals(record.getStatus())) { 52 | record.setSuccessCount(recordsCount); 53 | record.setErrorCount(0L); 54 | } else if ("FAILED".equals(record.getStatus())) { 55 | record.setSuccessCount(0L); 56 | record.setErrorCount(recordsCount); 57 | } else if ("STOPPED".equals(record.getStatus())) { 58 | // 停止状态下保持原有的成功和失败记录数 59 | } 60 | 61 | // 设置结束时间 62 | record.setEndTime(LocalDateTime.now()); 63 | 64 | log.info("更新执行记录状态: recordId={}, status={}, totalCount={}", recordId, record.getStatus(), recordsCount); 65 | return updateById(record); 66 | } 67 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/exception/GlobalExceptionHandler.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.exception; 2 | 3 | import com.datagenerator.common.Result; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.springframework.validation.BindException; 6 | import org.springframework.validation.FieldError; 7 | import org.springframework.web.bind.MethodArgumentNotValidException; 8 | import org.springframework.web.bind.annotation.ExceptionHandler; 9 | import org.springframework.web.bind.annotation.RestControllerAdvice; 10 | 11 | import javax.validation.ConstraintViolation; 12 | import javax.validation.ConstraintViolationException; 13 | import java.util.List; 14 | import java.util.Set; 15 | import java.util.stream.Collectors; 16 | 17 | @Slf4j 18 | @RestControllerAdvice 19 | public class GlobalExceptionHandler { 20 | 21 | @ExceptionHandler(BusinessException.class) 22 | public Result handleBusinessException(BusinessException e) { 23 | log.error("业务异常:{}", e.getMessage()); 24 | return Result.error(e.getCode(), e.getMessage()); 25 | } 26 | 27 | @ExceptionHandler(MethodArgumentNotValidException.class) 28 | public Result handleMethodArgumentNotValidException(MethodArgumentNotValidException e) { 29 | List fieldErrors = e.getBindingResult().getFieldErrors(); 30 | String message = fieldErrors.stream() 31 | .map(error -> error.getField() + ":" + error.getDefaultMessage()) 32 | .collect(Collectors.joining(", ")); 33 | log.error("参数校验失败:{}", message); 34 | return Result.error(400, message); 35 | } 36 | 37 | @ExceptionHandler(BindException.class) 38 | public Result handleBindException(BindException e) { 39 | List fieldErrors = e.getBindingResult().getFieldErrors(); 40 | String message = fieldErrors.stream() 41 | .map(error -> error.getField() + ":" + error.getDefaultMessage()) 42 | .collect(Collectors.joining(", ")); 43 | log.error("参数绑定失败:{}", message); 44 | return Result.error(400, message); 45 | } 46 | 47 | @ExceptionHandler(ConstraintViolationException.class) 48 | public Result handleConstraintViolationException(ConstraintViolationException e) { 49 | Set> violations = e.getConstraintViolations(); 50 | String message = violations.stream() 51 | .map(violation -> violation.getPropertyPath() + ":" + violation.getMessage()) 52 | .collect(Collectors.joining(", ")); 53 | log.error("参数校验失败:{}", message); 54 | return Result.error(400, message); 55 | } 56 | 57 | @ExceptionHandler(Exception.class) 58 | public Result handleException(Exception e) { 59 | log.error("系统异常:", e); 60 | return Result.error("系统异常,请联系管理员"); 61 | } 62 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/service/impl/SystemInfoServiceImpl.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.service.impl; 2 | 3 | import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; 4 | import com.datagenerator.entity.SystemInfo; 5 | import com.datagenerator.mapper.SystemInfoMapper; 6 | import com.datagenerator.service.SystemInfoService; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.springframework.stereotype.Service; 9 | 10 | import java.lang.management.ManagementFactory; 11 | import java.lang.management.MemoryMXBean; 12 | import java.lang.management.MemoryUsage; 13 | import java.lang.management.OperatingSystemMXBean; 14 | import java.time.LocalDateTime; 15 | 16 | @Slf4j 17 | @Service 18 | public class SystemInfoServiceImpl extends ServiceImpl implements SystemInfoService { 19 | 20 | @Override 21 | public SystemInfo getSystemInfo() { 22 | SystemInfo systemInfo = new SystemInfo(); 23 | try { 24 | OperatingSystemMXBean osBean = ManagementFactory.getOperatingSystemMXBean(); 25 | MemoryMXBean memoryBean = ManagementFactory.getMemoryMXBean(); 26 | 27 | // 获取CPU使用率 28 | if (osBean instanceof com.sun.management.OperatingSystemMXBean) { 29 | com.sun.management.OperatingSystemMXBean sunOsBean = (com.sun.management.OperatingSystemMXBean) osBean; 30 | systemInfo.setCpuUsage(sunOsBean.getSystemCpuLoad() * 100); 31 | } 32 | 33 | // 获取内存使用率 34 | long totalMemory = Runtime.getRuntime().totalMemory(); 35 | long freeMemory = Runtime.getRuntime().freeMemory(); 36 | systemInfo.setMemoryUsage((double) (totalMemory - freeMemory) / totalMemory * 100); 37 | 38 | // 获取JVM堆内存使用率 39 | MemoryUsage heapUsage = memoryBean.getHeapMemoryUsage(); 40 | systemInfo.setJvmHeapUsage((double) heapUsage.getUsed() / heapUsage.getMax() * 100); 41 | 42 | // 获取JVM非堆内存使用率 43 | MemoryUsage nonHeapUsage = memoryBean.getNonHeapMemoryUsage(); 44 | systemInfo.setJvmNonHeapUsage((double) nonHeapUsage.getUsed() / nonHeapUsage.getMax() * 100); 45 | 46 | // 获取系统运行时间 47 | systemInfo.setUptime(ManagementFactory.getRuntimeMXBean().getUptime()); 48 | 49 | // 设置创建时间 50 | systemInfo.setCreateTime(LocalDateTime.now()); 51 | 52 | } catch (Exception e) { 53 | log.error("获取系统信息失败", e); 54 | } 55 | return systemInfo; 56 | } 57 | 58 | @Override 59 | public void saveSystemInfo(SystemInfo systemInfo) { 60 | try { 61 | save(systemInfo); 62 | } catch (Exception e) { 63 | log.error("保存系统信息失败", e); 64 | } 65 | } 66 | } -------------------------------------------------------------------------------- /backend/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 8888 3 | servlet: 4 | context-path: / 5 | 6 | spring: 7 | application: 8 | name: data-generator 9 | 10 | # 数据源配置 11 | datasource: 12 | driver-class-name: com.mysql.cj.jdbc.Driver 13 | url: jdbc:mysql://localhost:3306/data_generator?useUnicode=true&characterEncoding=utf8&serverTimezone=Asia/Shanghai&autoReconnect=true&failOverReadOnly=false&socketTimeout=30000&connectTimeout=30000 14 | username: datagenerator 15 | password: datagenerator123 16 | druid: 17 | initial-size: 5 18 | min-idle: 5 19 | max-active: 20 20 | max-wait: 60000 21 | validation-query: SELECT 1 22 | test-while-idle: true 23 | test-on-borrow: true 24 | test-on-return: false 25 | pool-prepared-statements: true 26 | max-pool-prepared-statement-per-connection-size: 20 27 | filters: stat,wall 28 | connection-properties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 29 | keep-alive: true 30 | time-between-eviction-runs-millis: 60000 31 | min-evictable-idle-time-millis: 300000 32 | validation-query-timeout: 1 33 | remove-abandoned: true 34 | remove-abandoned-timeout: 180 35 | log-abandoned: true 36 | 37 | # Redis配置 38 | redis: 39 | host: localhost 40 | port: 6379 41 | database: 0 42 | 43 | # Kafka配置 44 | kafka: 45 | bootstrap-servers: localhost:9092 46 | producer: 47 | key-serializer: org.apache.kafka.common.serialization.StringSerializer 48 | value-serializer: org.apache.kafka.common.serialization.StringSerializer 49 | consumer: 50 | group-id: data-generator 51 | auto-offset-reset: earliest 52 | key-deserializer: org.apache.kafka.common.serialization.StringDeserializer 53 | value-deserializer: org.apache.kafka.common.serialization.StringDeserializer 54 | 55 | # 邮件配置 56 | mail: 57 | host: smtp.qq.com 58 | port: 587 59 | username: your-email@qq.com 60 | password: your-email-password 61 | properties: 62 | mail: 63 | smtp: 64 | auth: true 65 | starttls: 66 | enable: true 67 | 68 | # MyBatis Plus配置 69 | mybatis-plus: 70 | mapper-locations: classpath*:/mapper/**/*.xml 71 | type-aliases-package: com.datagenerator.entity 72 | configuration: 73 | map-underscore-to-camel-case: true 74 | log-impl: org.apache.ibatis.logging.stdout.StdOutImpl 75 | global-config: 76 | db-config: 77 | logic-delete-field: deleted 78 | logic-delete-value: 1 79 | logic-not-delete-value: 0 80 | 81 | # 日志配置 82 | logging: 83 | level: 84 | com.datagenerator: debug 85 | org.springframework: info 86 | file: 87 | name: logs/data-generator.log 88 | 89 | # Swagger配置 90 | springdoc: 91 | swagger-ui: 92 | path: /swagger-ui.html 93 | tags-sorter: alpha 94 | operations-sorter: alpha 95 | api-docs: 96 | path: /v3/api-docs 97 | group-configs: 98 | - group: default 99 | paths-to-match: /** 100 | packages-to-scan: com.datagenerator.controller -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/controller/TaskController.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.controller; 2 | 3 | import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; 4 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 5 | import com.datagenerator.common.Result; 6 | import com.datagenerator.entity.DataTask; 7 | import com.datagenerator.service.TaskService; 8 | import lombok.RequiredArgsConstructor; 9 | import lombok.extern.slf4j.Slf4j; 10 | import org.springframework.web.bind.annotation.*; 11 | 12 | import java.util.List; 13 | 14 | @Slf4j 15 | @RestController 16 | @RequestMapping("/api/tasks") 17 | @RequiredArgsConstructor 18 | public class TaskController { 19 | 20 | private final TaskService taskService; 21 | 22 | @GetMapping 23 | public Result> list() { 24 | try { 25 | log.info("开始获取所有任务列表"); 26 | LambdaQueryWrapper wrapper = new LambdaQueryWrapper<>(); 27 | wrapper.eq(DataTask::getDeleted, 0) 28 | .orderByDesc(DataTask::getCreateTime); 29 | List tasks = taskService.list(wrapper); 30 | log.info("获取所有任务列表成功,size={}", tasks.size()); 31 | return Result.success(tasks); 32 | } catch (Exception e) { 33 | log.error("获取所有任务列表失败", e); 34 | return Result.error(500, "获取所有任务列表失败: " + e.getMessage(), null); 35 | } 36 | } 37 | 38 | @GetMapping("/page") 39 | public Result> page( 40 | @RequestParam(defaultValue = "1") Integer current, 41 | @RequestParam(defaultValue = "10") Integer size) { 42 | try { 43 | log.info("开始获取任务列表,current={}, size={}", current, size); 44 | Page page = new Page<>(current, size); 45 | LambdaQueryWrapper wrapper = new LambdaQueryWrapper<>(); 46 | wrapper.eq(DataTask::getDeleted, 0) 47 | .orderByDesc(DataTask::getCreateTime); 48 | log.info("执行分页查询,wrapper={}", wrapper.getSqlSegment()); 49 | Page result = taskService.page(page, wrapper); 50 | log.info("获取任务列表成功,total={}, records={}", result.getTotal(), result.getRecords().size()); 51 | return Result.success(result); 52 | } catch (Exception e) { 53 | log.error("获取任务列表失败", e); 54 | return Result.error(500, "获取任务列表失败: " + e.getMessage(), null); 55 | } 56 | } 57 | 58 | @GetMapping("/{id}") 59 | public Result getById(@PathVariable Long id) { 60 | return Result.success(taskService.getById(id)); 61 | } 62 | 63 | @PostMapping 64 | public Result save(@RequestBody DataTask task) { 65 | return Result.success(taskService.save(task)); 66 | } 67 | 68 | @PutMapping("/{id}") 69 | public Result update(@PathVariable Long id, @RequestBody DataTask task) { 70 | task.setId(id); 71 | return Result.success(taskService.updateById(task)); 72 | } 73 | 74 | @DeleteMapping("/{id}") 75 | public Result delete(@PathVariable Long id) { 76 | return Result.success(taskService.removeById(id)); 77 | } 78 | 79 | @PostMapping("/{id}/start") 80 | public Result startTask(@PathVariable Long id) { 81 | return Result.success(taskService.startTask(id)); 82 | } 83 | 84 | @PostMapping("/{id}/stop") 85 | public Result stopTask(@PathVariable Long id) { 86 | return Result.success(taskService.stopTask(id)); 87 | } 88 | 89 | @PostMapping("/{id}/execute") 90 | public Result executeTask(@PathVariable Long id) { 91 | return Result.success(taskService.executeTask(id)); 92 | } 93 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/rule/impl/DateRule.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator.rule.impl; 2 | 3 | import com.datagenerator.generator.rule.DataRule; 4 | import lombok.Data; 5 | 6 | import java.time.LocalDate; 7 | import java.time.LocalDateTime; 8 | import java.time.LocalTime; 9 | import java.time.format.DateTimeFormatter; 10 | import java.util.Date; 11 | 12 | @Data 13 | public class DateRule implements DataRule { 14 | private String type = "datetime"; // datetime, date, time 15 | private String startDate; 16 | private String endDate; 17 | private String format; 18 | private boolean random = true; 19 | private long current = 0; 20 | private long step = 86400000; // 1天 21 | private String defaultValue; 22 | private boolean nullable = true; 23 | 24 | @Override 25 | public Object generate() { 26 | try { 27 | if (defaultValue != null) { 28 | return defaultValue; 29 | } 30 | 31 | if (nullable && Math.random() < 0.1) { // 10%的概率生成null 32 | return null; 33 | } 34 | 35 | if (startDate == null || endDate == null) { 36 | return new Date(); 37 | } 38 | 39 | long start = parseDate(startDate).getTime(); 40 | long end = parseDate(endDate).getTime(); 41 | long value; 42 | 43 | if (random) { 44 | value = start + (long) (Math.random() * (end - start)); 45 | } else { 46 | value = start + current * step; 47 | if (value > end) { 48 | value = start; 49 | } 50 | current++; 51 | } 52 | 53 | Date date = new Date(value); 54 | if (format != null) { 55 | return formatDate(date); 56 | } 57 | 58 | switch (type) { 59 | case "date": 60 | return new java.sql.Date(value); 61 | case "time": 62 | return new java.sql.Time(value); 63 | default: 64 | return date; 65 | } 66 | } catch (Exception e) { 67 | return new Date(); 68 | } 69 | } 70 | 71 | @Override 72 | public String getType() { 73 | return "date"; 74 | } 75 | 76 | @Override 77 | public Object getParams() { 78 | return this; 79 | } 80 | 81 | private Date parseDate(String dateStr) { 82 | if (format != null) { 83 | DateTimeFormatter formatter = DateTimeFormatter.ofPattern(format); 84 | if (type.equals("date")) { 85 | return java.sql.Date.valueOf(LocalDate.parse(dateStr, formatter)); 86 | } else if (type.equals("time")) { 87 | return java.sql.Time.valueOf(LocalTime.parse(dateStr, formatter)); 88 | } else { 89 | return java.sql.Timestamp.valueOf(LocalDateTime.parse(dateStr, formatter)); 90 | } 91 | } 92 | return new Date(dateStr); 93 | } 94 | 95 | private String formatDate(Date date) { 96 | if (format == null) { 97 | return date.toString(); 98 | } 99 | DateTimeFormatter formatter = DateTimeFormatter.ofPattern(format); 100 | if (type.equals("date")) { 101 | return formatter.format(date.toInstant().atZone(java.time.ZoneId.systemDefault()).toLocalDate()); 102 | } else if (type.equals("time")) { 103 | return formatter.format(date.toInstant().atZone(java.time.ZoneId.systemDefault()).toLocalTime()); 104 | } else { 105 | return formatter.format(date.toInstant().atZone(java.time.ZoneId.systemDefault()).toLocalDateTime()); 106 | } 107 | } 108 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/controller/ExecutionRecordController.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.controller; 2 | 3 | import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; 4 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 5 | import com.datagenerator.common.PageRequest; 6 | import com.datagenerator.common.Result; 7 | import com.datagenerator.entity.DataTask; 8 | import com.datagenerator.entity.ExecutionRecord; 9 | import com.datagenerator.service.ExecutionRecordService; 10 | import com.datagenerator.service.TaskService; 11 | import lombok.RequiredArgsConstructor; 12 | import org.springframework.format.annotation.DateTimeFormat; 13 | import org.springframework.util.StringUtils; 14 | import org.springframework.web.bind.annotation.*; 15 | 16 | import java.time.LocalDateTime; 17 | import java.util.List; 18 | import java.util.stream.Collectors; 19 | 20 | @RestController 21 | @RequestMapping("/api/execution-records") 22 | @RequiredArgsConstructor 23 | public class ExecutionRecordController { 24 | 25 | private final ExecutionRecordService executionRecordService; 26 | private final TaskService taskService; 27 | 28 | @GetMapping("/page") 29 | public Result> page( 30 | PageRequest pageRequest, 31 | String taskName, 32 | String status, 33 | @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss") LocalDateTime startTime, 34 | @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss") LocalDateTime endTime) { 35 | 36 | Page page = new Page<>(pageRequest.getPageNum(), pageRequest.getPageSize()); 37 | LambdaQueryWrapper wrapper = new LambdaQueryWrapper<>(); 38 | 39 | // 基本条件 40 | wrapper.eq(ExecutionRecord::getDeleted, 0); 41 | 42 | // 按状态筛选 43 | if (StringUtils.hasText(status)) { 44 | wrapper.eq(ExecutionRecord::getStatus, status); 45 | } 46 | 47 | // 按时间范围筛选 48 | if (startTime != null) { 49 | wrapper.ge(ExecutionRecord::getStartTime, startTime); 50 | } 51 | if (endTime != null) { 52 | wrapper.le(ExecutionRecord::getEndTime, endTime); 53 | } 54 | 55 | // 按任务名称筛选 56 | if (StringUtils.hasText(taskName)) { 57 | // 先查询符合名称的任务ID 58 | LambdaQueryWrapper taskWrapper = new LambdaQueryWrapper<>(); 59 | taskWrapper.like(DataTask::getName, taskName) 60 | .eq(DataTask::getDeleted, 0); 61 | List tasks = taskService.list(taskWrapper); 62 | 63 | if (!tasks.isEmpty()) { 64 | List taskIds = tasks.stream().map(DataTask::getId).collect(Collectors.toList()); 65 | wrapper.in(ExecutionRecord::getTaskId, taskIds); 66 | } else { 67 | // 如果没有找到匹配的任务,返回空结果 68 | return Result.success(new Page<>()); 69 | } 70 | } 71 | 72 | // 排序 73 | wrapper.orderByDesc(ExecutionRecord::getCreateTime); 74 | 75 | return Result.success(executionRecordService.page(page, wrapper)); 76 | } 77 | 78 | @GetMapping("/{id}") 79 | public Result getById(@PathVariable Long id) { 80 | return Result.success(executionRecordService.getById(id)); 81 | } 82 | 83 | @GetMapping("/task/{taskId}") 84 | public Result> getByTaskId(@PathVariable Long taskId, PageRequest pageRequest) { 85 | Page page = new Page<>(pageRequest.getPageNum(), pageRequest.getPageSize()); 86 | LambdaQueryWrapper wrapper = new LambdaQueryWrapper<>(); 87 | wrapper.eq(ExecutionRecord::getTaskId, taskId) 88 | .eq(ExecutionRecord::getDeleted, 0) 89 | .orderByDesc(ExecutionRecord::getCreateTime); 90 | return Result.success(executionRecordService.page(page, wrapper)); 91 | } 92 | 93 | @DeleteMapping("/{id}") 94 | public Result deleteById(@PathVariable Long id) { 95 | return Result.success(executionRecordService.removeById(id)); 96 | } 97 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/controller/DataSourceController.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.controller; 2 | 3 | import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; 4 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 5 | import com.datagenerator.common.PageRequest; 6 | import com.datagenerator.common.Result; 7 | import com.datagenerator.entity.DataSource; 8 | import com.datagenerator.model.ApiResponse; 9 | import com.datagenerator.service.DataSourceService; 10 | import lombok.RequiredArgsConstructor; 11 | import org.springframework.http.HttpStatus; 12 | import org.springframework.http.ResponseEntity; 13 | import org.springframework.web.bind.annotation.*; 14 | 15 | import java.sql.SQLException; 16 | import java.util.List; 17 | import java.util.Map; 18 | 19 | @RestController 20 | @RequestMapping("/api/data-sources") 21 | @RequiredArgsConstructor 22 | public class DataSourceController { 23 | 24 | private final DataSourceService dataSourceService; 25 | 26 | @GetMapping("/page") 27 | public Result> page(PageRequest pageRequest) { 28 | Page page = new Page<>(pageRequest.getPageNum(), pageRequest.getPageSize()); 29 | LambdaQueryWrapper wrapper = new LambdaQueryWrapper<>(); 30 | wrapper.eq(DataSource::getDeleted, 0); 31 | if (pageRequest.getOrderBy() != null) { 32 | wrapper.orderBy(true, pageRequest.getAsc(), DataSource::getCreateTime); 33 | } 34 | return Result.success(dataSourceService.page(page, wrapper)); 35 | } 36 | 37 | @GetMapping("/{id}") 38 | public Result getById(@PathVariable Long id) { 39 | return Result.success(dataSourceService.getById(id)); 40 | } 41 | 42 | @PostMapping 43 | public Result save(@RequestBody DataSource dataSource) { 44 | return Result.success(dataSourceService.save(dataSource)); 45 | } 46 | 47 | @PutMapping 48 | public Result update(@RequestBody DataSource dataSource) { 49 | return Result.success(dataSourceService.updateById(dataSource)); 50 | } 51 | 52 | @DeleteMapping("/{id}") 53 | public Result delete(@PathVariable Long id) { 54 | DataSource dataSource = new DataSource(); 55 | dataSource.setId(id); 56 | dataSource.setDeleted(1); 57 | return Result.success(dataSourceService.updateById(dataSource)); 58 | } 59 | 60 | @PostMapping("/test") 61 | public ResponseEntity testConnection(@RequestBody DataSource dataSource) { 62 | try { 63 | dataSourceService.testConnection(dataSource); 64 | return ResponseEntity.ok(new ApiResponse(200, "连接测试成功")); 65 | } catch (SQLException e) { 66 | // 返回错误状态码和具体的错误信息 67 | return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) 68 | .body(new ApiResponse(500, e.getMessage())); 69 | } catch (Exception e) { 70 | return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) 71 | .body(new ApiResponse(500, e.getMessage())); 72 | } 73 | } 74 | 75 | @GetMapping("/{id}/tables") 76 | public Result> getTables(@PathVariable Long id) { 77 | return Result.success(dataSourceService.getTables(id)); 78 | } 79 | 80 | @GetMapping("/{id}/topics") 81 | public Result> getTopics(@PathVariable Long id) { 82 | return Result.success(dataSourceService.getTopics(id)); 83 | } 84 | 85 | @GetMapping("/{id}/tables/{tableName}/columns") 86 | public Result>> getTableColumns( 87 | @PathVariable Long id, 88 | @PathVariable String tableName) { 89 | return Result.success(dataSourceService.getTableColumns(id, tableName)); 90 | } 91 | 92 | /** 93 | * 获取表之间的依赖关系 94 | * @param id 数据源ID 95 | * @param tables 表名列表,逗号分隔 96 | * @return 表依赖关系,key为表名,value为该表依赖的表列表 97 | */ 98 | @GetMapping("/{id}/table-dependencies") 99 | public Result>> getTableDependencies( 100 | @PathVariable Long id, 101 | @RequestParam String tables) { 102 | String[] tableArray = tables.split(","); 103 | return Result.success(dataSourceService.getTableDependencies(id, tableArray)); 104 | } 105 | } -------------------------------------------------------------------------------- /frontend/src/layouts/DefaultLayout.vue: -------------------------------------------------------------------------------- 1 | 61 | 62 | 68 | 69 | -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/scheduler/TaskScheduler.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.scheduler; 2 | 3 | import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; 4 | import com.datagenerator.entity.DataTask; 5 | import com.datagenerator.service.TaskService; 6 | import lombok.extern.slf4j.Slf4j; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.context.annotation.Lazy; 9 | import org.springframework.scheduling.annotation.Scheduled; 10 | import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler; 11 | import org.springframework.stereotype.Component; 12 | 13 | import javax.annotation.PostConstruct; 14 | import java.util.Date; 15 | import java.util.List; 16 | import java.util.Map; 17 | import java.util.concurrent.ConcurrentHashMap; 18 | import java.util.concurrent.ScheduledFuture; 19 | 20 | @Slf4j 21 | @Component 22 | public class TaskScheduler implements TaskSchedulingService { 23 | 24 | @Autowired 25 | @Lazy 26 | private TaskService taskService; 27 | 28 | @Autowired 29 | private ThreadPoolTaskScheduler threadPoolTaskScheduler; 30 | 31 | private final Map> scheduledTasks = new ConcurrentHashMap<>(); 32 | 33 | @PostConstruct 34 | public void init() { 35 | log.info("初始化任务调度器"); 36 | List tasks = taskService.lambdaQuery() 37 | .eq(DataTask::getStatus, "RUNNING") 38 | .eq(DataTask::getDeleted, 0) 39 | .list(); 40 | 41 | for (DataTask task : tasks) { 42 | scheduleTask(task); 43 | } 44 | } 45 | 46 | @Override 47 | public void scheduleTask(DataTask task) { 48 | log.info("开始调度任务: {}, 状态: {}, 频率: {}秒", task.getName(), task.getStatus(), task.getFrequency()); 49 | 50 | if (!"RUNNING".equals(task.getStatus()) || task.getDeleted() == 1) { 51 | log.warn("任务状态不正确或已删除,无法调度: {}, 状态: {}, 是否删除: {}", 52 | task.getName(), task.getStatus(), task.getDeleted()); 53 | return; 54 | } 55 | 56 | // 如果任务已经调度,先取消 57 | cancelTask(task.getId()); 58 | 59 | try { 60 | // 创建定时任务 61 | Runnable runnable = () -> { 62 | try { 63 | log.debug("开始执行任务: {}", task.getName()); 64 | // 直接执行数据生成,不更新任务状态 65 | taskService.executeTask(task.getId()); 66 | log.debug("任务执行完成: {}", task.getName()); 67 | } catch (Exception e) { 68 | log.error("任务执行异常: {}", task.getName(), e); 69 | } 70 | }; 71 | 72 | // 使用任务配置的频率(秒)作为数据生成的间隔,设置初始延迟为0 73 | // 修改为scheduleWithFixedDelay,确保在上一个任务完成后才开始下一个任务 74 | ScheduledFuture future = threadPoolTaskScheduler.scheduleWithFixedDelay( 75 | runnable, 76 | new Date(), // 从当前时间开始 77 | task.getFrequency() * 1000L // 间隔时间 78 | ); 79 | 80 | scheduledTasks.put(task.getId(), future); 81 | log.info("任务调度成功: {}, 数据生成频率: {}秒", task.getName(), task.getFrequency()); 82 | } catch (Exception e) { 83 | log.error("任务调度失败: {}", task.getName(), e); 84 | } 85 | } 86 | 87 | @Override 88 | public void cancelTask(Long taskId) { 89 | ScheduledFuture future = scheduledTasks.remove(taskId); 90 | if (future != null) { 91 | future.cancel(true); 92 | log.info("取消任务调度: {}", taskId); 93 | } else { 94 | log.debug("任务未在调度中: {}", taskId); 95 | } 96 | } 97 | 98 | @Scheduled(fixedRate = 60000) 99 | public void checkTasks() { 100 | log.debug("开始检查任务状态"); 101 | try { 102 | List tasks = taskService.lambdaQuery() 103 | .eq(DataTask::getStatus, "RUNNING") 104 | .eq(DataTask::getDeleted, 0) 105 | .list(); 106 | 107 | log.debug("当前运行中的任务数量: {}", tasks.size()); 108 | 109 | for (DataTask task : tasks) { 110 | // 如果任务没有被调度,则重新调度 111 | if (!scheduledTasks.containsKey(task.getId())) { 112 | log.info("发现未调度的运行中任务,重新调度: {}", task.getName()); 113 | scheduleTask(task); 114 | } 115 | } 116 | } catch (Exception e) { 117 | log.error("检查任务状态时发生异常", e); 118 | } 119 | } 120 | 121 | public void executeTask(Long taskId) { 122 | taskService.executeTask(taskId); 123 | } 124 | } -------------------------------------------------------------------------------- /backend/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | 8 | org.springframework.boot 9 | spring-boot-starter-parent 10 | 2.7.5 11 | 12 | 13 | 14 | com.datagenerator 15 | data-generator 16 | 1.0.0 17 | data-generator 18 | Data Generator Backend 19 | 20 | 21 | 1.8 22 | 3.5.2 23 | 8.0.28 24 | 3.0.0 25 | 1.2.15 26 | 27 | 28 | 29 | 30 | 31 | org.springframework.boot 32 | spring-boot-starter-web 33 | 34 | 35 | org.springframework.boot 36 | spring-boot-starter-validation 37 | 38 | 39 | org.springframework.boot 40 | spring-boot-starter-aop 41 | 42 | 43 | org.springframework.kafka 44 | spring-kafka 45 | 2.8.11 46 | 47 | 48 | 49 | 50 | com.baomidou 51 | mybatis-plus-boot-starter 52 | ${mybatis-plus.version} 53 | 54 | 55 | 56 | 57 | mysql 58 | mysql-connector-java 59 | ${mysql.version} 60 | 61 | 62 | 63 | 64 | com.alibaba 65 | druid-spring-boot-starter 66 | ${druid.version} 67 | 68 | 69 | 70 | 71 | org.springdoc 72 | springdoc-openapi-ui 73 | 1.6.15 74 | 75 | 76 | 77 | 78 | org.projectlombok 79 | lombok 80 | true 81 | 82 | 83 | 84 | 85 | com.fasterxml.jackson.core 86 | jackson-databind 87 | 2.13.4 88 | 89 | 90 | 91 | 92 | org.springframework.boot 93 | spring-boot-starter-test 94 | test 95 | 96 | 97 | 98 | 99 | 100 | 101 | org.springframework.boot 102 | spring-boot-maven-plugin 103 | 104 | 105 | 106 | org.projectlombok 107 | lombok 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | central 118 | Maven Central 119 | https://repo.maven.apache.org/maven2 120 | 121 | 122 | aliyun 123 | Aliyun Maven Repository 124 | https://maven.aliyun.com/repository/public 125 | 126 | 127 | -------------------------------------------------------------------------------- /scripts/package/simple-package.ps1: -------------------------------------------------------------------------------- 1 | # 设置版本号 2 | $VERSION = "1.0.0" 3 | $PACKAGE_NAME = "data-generator-$VERSION-bin" 4 | 5 | Write-Host "开始打包数据生成器应用 v$VERSION..." 6 | 7 | # 创建临时目录 8 | if (Test-Path $PACKAGE_NAME) { 9 | Remove-Item -Path $PACKAGE_NAME -Recurse -Force 10 | } 11 | New-Item -Path $PACKAGE_NAME -ItemType Directory | Out-Null 12 | New-Item -Path "$PACKAGE_NAME\bin" -ItemType Directory | Out-Null 13 | New-Item -Path "$PACKAGE_NAME\backend" -ItemType Directory | Out-Null 14 | New-Item -Path "$PACKAGE_NAME\frontend" -ItemType Directory | Out-Null 15 | 16 | # 复制必要的文件 17 | Write-Host "复制项目文件..." 18 | Copy-Item -Path "docker-compose.yml" -Destination "$PACKAGE_NAME\" -Force 19 | Copy-Item -Path "start.bat" -Destination "$PACKAGE_NAME\" -Force 20 | Copy-Item -Path "stop.bat" -Destination "$PACKAGE_NAME\" -Force 21 | Copy-Item -Path "start.sh" -Destination "$PACKAGE_NAME\" -Force 22 | Copy-Item -Path "stop.sh" -Destination "$PACKAGE_NAME\" -Force 23 | Copy-Item -Path "README.md" -Destination "$PACKAGE_NAME\" -Force 24 | 25 | # 创建启动和停止脚本 26 | Write-Host "创建启动和停止脚本..." 27 | 28 | # Windows启动脚本 29 | $startupBat = "@echo off`r`n" 30 | $startupBat += "rem 获取脚本所在目录`r`n" 31 | $startupBat += "set ""SCRIPT_DIR=%~dp0""`r`n" 32 | $startupBat += "set ""BASE_DIR=%SCRIPT_DIR%..""`r`n" 33 | $startupBat += "`r`n" 34 | $startupBat += "rem 切换到项目根目录`r`n" 35 | $startupBat += "cd /d ""%BASE_DIR%""`r`n" 36 | $startupBat += "`r`n" 37 | $startupBat += "rem 执行启动脚本`r`n" 38 | $startupBat += "call start.bat`r`n" 39 | [System.IO.File]::WriteAllText("$PACKAGE_NAME\bin\startup.bat", $startupBat, [System.Text.Encoding]::ASCII) 40 | 41 | # Windows停止脚本 42 | $shutdownBat = "@echo off`r`n" 43 | $shutdownBat += "rem 获取脚本所在目录`r`n" 44 | $shutdownBat += "set ""SCRIPT_DIR=%~dp0""`r`n" 45 | $shutdownBat += "set ""BASE_DIR=%SCRIPT_DIR%..""`r`n" 46 | $shutdownBat += "`r`n" 47 | $shutdownBat += "rem 切换到项目根目录`r`n" 48 | $shutdownBat += "cd /d ""%BASE_DIR%""`r`n" 49 | $shutdownBat += "`r`n" 50 | $shutdownBat += "rem 执行停止脚本`r`n" 51 | $shutdownBat += "call stop.bat`r`n" 52 | [System.IO.File]::WriteAllText("$PACKAGE_NAME\bin\shutdown.bat", $shutdownBat, [System.Text.Encoding]::ASCII) 53 | 54 | # Linux启动脚本 55 | $startupSh = "#!/bin/bash`n" 56 | $startupSh += "`n" 57 | $startupSh += "# 获取脚本所在目录的绝对路径`n" 58 | $startupSh += "SCRIPT_DIR=\$(cd \"\$(dirname \"\$0\")\" && pwd)`n" 59 | $startupSh += "BASE_DIR=\$(dirname \"\$SCRIPT_DIR\")`n" 60 | $startupSh += "`n" 61 | $startupSh += "# 切换到项目根目录`n" 62 | $startupSh += "cd \"\$BASE_DIR\"`n" 63 | $startupSh += "`n" 64 | $startupSh += "# 添加执行权限`n" 65 | $startupSh += "chmod +x start.sh`n" 66 | $startupSh += "`n" 67 | $startupSh += "# 执行启动脚本`n" 68 | $startupSh += "./start.sh`n" 69 | [System.IO.File]::WriteAllText("$PACKAGE_NAME\bin\startup.sh", $startupSh, [System.Text.Encoding]::ASCII) 70 | 71 | # Linux停止脚本 72 | $shutdownSh = "#!/bin/bash`n" 73 | $shutdownSh += "`n" 74 | $shutdownSh += "# 获取脚本所在目录的绝对路径`n" 75 | $shutdownSh += "SCRIPT_DIR=\$(cd \"\$(dirname \"\$0\")\" && pwd)`n" 76 | $shutdownSh += "BASE_DIR=\$(dirname \"\$SCRIPT_DIR\")`n" 77 | $shutdownSh += "`n" 78 | $shutdownSh += "# 切换到项目根目录`n" 79 | $shutdownSh += "cd \"\$BASE_DIR\"`n" 80 | $shutdownSh += "`n" 81 | $shutdownSh += "# 添加执行权限`n" 82 | $shutdownSh += "chmod +x stop.sh`n" 83 | $shutdownSh += "`n" 84 | $shutdownSh += "# 执行停止脚本`n" 85 | $shutdownSh += "./stop.sh`n" 86 | [System.IO.File]::WriteAllText("$PACKAGE_NAME\bin\shutdown.sh", $shutdownSh, [System.Text.Encoding]::ASCII) 87 | 88 | # 复制backend目录,排除node_modules和target 89 | Write-Host "复制backend目录..." 90 | $excludeDirs = @("node_modules", "target", ".git", ".idea", ".vscode") 91 | 92 | Get-ChildItem -Path "backend" -Recurse -File | ForEach-Object { 93 | $fullPath = $_.FullName 94 | $exclude = $false 95 | 96 | foreach ($dir in $excludeDirs) { 97 | if ($fullPath -match [regex]::Escape("\$dir\")) { 98 | $exclude = $true 99 | break 100 | } 101 | } 102 | 103 | if (-not $exclude) { 104 | $relativePath = $fullPath.Substring((Get-Location).Path.Length + 9) # +9 for "\backend\" 105 | $targetPath = "$PACKAGE_NAME\backend\$relativePath" 106 | $targetDir = Split-Path -Path $targetPath -Parent 107 | 108 | if (-not (Test-Path $targetDir)) { 109 | New-Item -Path $targetDir -ItemType Directory -Force | Out-Null 110 | } 111 | 112 | Copy-Item -Path $fullPath -Destination $targetPath -Force 113 | } 114 | } 115 | 116 | # 复制frontend目录,排除node_modules 117 | Write-Host "复制frontend目录..." 118 | Get-ChildItem -Path "frontend" -Recurse -File | ForEach-Object { 119 | $fullPath = $_.FullName 120 | $exclude = $false 121 | 122 | foreach ($dir in $excludeDirs) { 123 | if ($fullPath -match [regex]::Escape("\$dir\")) { 124 | $exclude = $true 125 | break 126 | } 127 | } 128 | 129 | if (-not $exclude) { 130 | $relativePath = $fullPath.Substring((Get-Location).Path.Length + 10) # +10 for "\frontend\" 131 | $targetPath = "$PACKAGE_NAME\frontend\$relativePath" 132 | $targetDir = Split-Path -Path $targetPath -Parent 133 | 134 | if (-not (Test-Path $targetDir)) { 135 | New-Item -Path $targetDir -ItemType Directory -Force | Out-Null 136 | } 137 | 138 | Copy-Item -Path $fullPath -Destination $targetPath -Force 139 | } 140 | } 141 | 142 | # 创建压缩包 143 | Write-Host "创建压缩包..." 144 | Compress-Archive -Path "$PACKAGE_NAME\*" -DestinationPath "$PACKAGE_NAME.zip" -Force 145 | 146 | # 清理临时目录 147 | Write-Host "清理临时文件..." 148 | Remove-Item -Path $PACKAGE_NAME -Recurse -Force 149 | 150 | Write-Host "打包完成!" 151 | Write-Host "创建了文件: $PACKAGE_NAME.zip" 152 | Write-Host "解压后,可以使用以下命令启动应用:" 153 | Write-Host "Linux/Mac: ./bin/startup.sh" 154 | Write-Host "Windows: bin/startup.bat" -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/service/impl/TaskServiceImpl.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.service.impl; 2 | 3 | import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; 4 | import com.datagenerator.entity.DataTask; 5 | import com.datagenerator.entity.ExecutionRecord; 6 | import com.datagenerator.mapper.TaskMapper; 7 | import com.datagenerator.scheduler.TaskSchedulingService; 8 | import com.datagenerator.service.DataGenerateService; 9 | import com.datagenerator.service.ExecutionRecordService; 10 | import com.datagenerator.service.TaskService; 11 | import lombok.extern.slf4j.Slf4j; 12 | import org.springframework.beans.factory.annotation.Autowired; 13 | import org.springframework.context.annotation.Lazy; 14 | import org.springframework.stereotype.Service; 15 | import org.springframework.transaction.annotation.Transactional; 16 | import java.time.Duration; 17 | import java.time.LocalDateTime; 18 | import java.util.Map; 19 | import java.util.concurrent.*; 20 | 21 | @Slf4j 22 | @Service 23 | public class TaskServiceImpl extends ServiceImpl implements TaskService { 24 | 25 | @Autowired 26 | private DataGenerateService dataGenerateService; 27 | 28 | @Autowired 29 | @Lazy 30 | private TaskSchedulingService taskSchedulingService; 31 | 32 | @Autowired 33 | private ExecutionRecordService executionRecordService; 34 | 35 | private static final int MAX_RETRY_ATTEMPTS = 3; 36 | private static final long RETRY_DELAY_MS = 1000; 37 | 38 | @Override 39 | @Transactional(rollbackFor = Exception.class) 40 | public boolean startTask(Long taskId) { 41 | log.info("开始启动任务,taskId={}", taskId); 42 | DataTask task = getById(taskId); 43 | if (task == null) { 44 | log.error("任务不存在,taskId={}", taskId); 45 | return false; 46 | } 47 | task.setStatus("RUNNING"); 48 | boolean result = updateById(task); 49 | if (result) { 50 | // 启动成功后,开始调度数据生成 51 | taskSchedulingService.scheduleTask(task); 52 | } 53 | log.info("任务启动{},taskId={}", result ? "成功" : "失败", taskId); 54 | return result; 55 | } 56 | 57 | @Override 58 | @Transactional(rollbackFor = Exception.class) 59 | public boolean stopTask(Long taskId) { 60 | log.info("开始停止任务,taskId={}", taskId); 61 | 62 | try { 63 | // 1. 直接使用SQL更新任务状态为STOPPED,绕过缓存 64 | getBaseMapper().update(null, 65 | new com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper() 66 | .eq(DataTask::getId, taskId) 67 | .set(DataTask::getStatus, "STOPPED") 68 | .set(DataTask::getUpdateTime, LocalDateTime.now())); 69 | 70 | log.info("已直接更新数据库中的任务状态为STOPPED,taskId={}", taskId); 71 | 72 | // 2. 取消任务调度 73 | taskSchedulingService.cancelTask(taskId); 74 | log.info("已取消任务调度,taskId={}", taskId); 75 | 76 | // 3. 关闭Kafka连接 77 | try { 78 | dataGenerateService.forceCloseKafkaConnections(taskId); 79 | log.info("已强制关闭Kafka连接,taskId={}", taskId); 80 | } catch (Exception e) { 81 | log.warn("强制关闭Kafka连接时发生异常,taskId={}", taskId, e); 82 | } 83 | 84 | // 4. 更新正在运行的执行记录状态为STOPPED 85 | try { 86 | // 查询该任务最近的RUNNING状态的执行记录 87 | com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper wrapper = 88 | new com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper<>(); 89 | wrapper.eq(ExecutionRecord::getTaskId, taskId) 90 | .eq(ExecutionRecord::getStatus, "RUNNING") 91 | .orderByDesc(ExecutionRecord::getCreateTime) 92 | .last("LIMIT 1"); 93 | 94 | ExecutionRecord record = executionRecordService.getOne(wrapper); 95 | if (record != null) { 96 | // 更新执行记录状态为STOPPED 97 | executionRecordService.updateStatus(record.getId(), 2, "任务被手动停止", record.getTotalCount()); 98 | log.info("已更新执行记录状态为STOPPED,recordId={}", record.getId()); 99 | } 100 | } catch (Exception e) { 101 | log.warn("更新执行记录状态时发生异常,taskId={}", taskId, e); 102 | } 103 | 104 | // 5. 清除缓存中的任务 105 | getBaseMapper().selectById(taskId); 106 | 107 | return true; 108 | } catch (Exception e) { 109 | log.error("停止任务失败,taskId={}", taskId, e); 110 | return false; 111 | } 112 | } 113 | 114 | @Override 115 | @Transactional 116 | public boolean executeTask(Long id) { 117 | try { 118 | DataTask task = getById(id); 119 | if (task == null) { 120 | throw new RuntimeException("任务不存在"); 121 | } 122 | 123 | // 如果任务不是运行中状态或者是停止中状态,跳过执行 124 | if (!"RUNNING".equals(task.getStatus()) || "STOPPING".equals(task.getStatus())) { 125 | log.info("任务状态不是运行中或正在停止中,跳过执行: taskId={}, status={}", id, task.getStatus()); 126 | return false; 127 | } 128 | 129 | // 执行任务 130 | dataGenerateService.executeTask(task); 131 | 132 | // 记录最后执行时间 133 | task.setUpdateTime(LocalDateTime.now()); 134 | updateById(task); 135 | 136 | return true; 137 | } catch (Exception e) { 138 | log.error("数据生成失败,taskId={}", id, e); 139 | // 记录错误但不改变任务状态,让任务继续执行 140 | return false; 141 | } 142 | } 143 | } -------------------------------------------------------------------------------- /frontend/src/views/Home.vue: -------------------------------------------------------------------------------- 1 | 109 | 110 | 163 | 164 | -------------------------------------------------------------------------------- /scripts/package/package.cmd: -------------------------------------------------------------------------------- 1 | @echo off 2 | setlocal enabledelayedexpansion 3 | 4 | REM 设置版本号 5 | set VERSION=1.0.0 6 | set PACKAGE_NAME=data-generator-%VERSION%-bin 7 | 8 | echo 开始打包数据生成器应用 v%VERSION%... 9 | 10 | REM 创建临时目录 11 | if exist %PACKAGE_NAME% rmdir /s /q %PACKAGE_NAME% 12 | mkdir %PACKAGE_NAME% 13 | mkdir %PACKAGE_NAME%\bin 14 | mkdir %PACKAGE_NAME%\backend 15 | mkdir %PACKAGE_NAME%\frontend 16 | 17 | REM 复制必要的文件 18 | echo 复制项目文件... 19 | copy docker-compose.yml %PACKAGE_NAME%\ 20 | copy start.bat %PACKAGE_NAME%\ 21 | copy stop.bat %PACKAGE_NAME%\ 22 | copy start.sh %PACKAGE_NAME%\ 23 | copy stop.sh %PACKAGE_NAME%\ 24 | copy README.md %PACKAGE_NAME%\ 25 | 26 | REM 创建启动和停止脚本 27 | echo 创建启动和停止脚本... 28 | 29 | REM Windows启动脚本 30 | echo @echo off > %PACKAGE_NAME%\bin\startup.bat 31 | echo rem 获取脚本所在目录 >> %PACKAGE_NAME%\bin\startup.bat 32 | echo set "SCRIPT_DIR=%%~dp0" >> %PACKAGE_NAME%\bin\startup.bat 33 | echo set "BASE_DIR=%%SCRIPT_DIR%%.." >> %PACKAGE_NAME%\bin\startup.bat 34 | echo. >> %PACKAGE_NAME%\bin\startup.bat 35 | echo rem 切换到项目根目录 >> %PACKAGE_NAME%\bin\startup.bat 36 | echo cd /d "%%BASE_DIR%%" >> %PACKAGE_NAME%\bin\startup.bat 37 | echo. >> %PACKAGE_NAME%\bin\startup.bat 38 | echo rem 执行启动脚本 >> %PACKAGE_NAME%\bin\startup.bat 39 | echo call start.bat >> %PACKAGE_NAME%\bin\startup.bat 40 | 41 | REM Windows停止脚本 42 | echo @echo off > %PACKAGE_NAME%\bin\shutdown.bat 43 | echo rem 获取脚本所在目录 >> %PACKAGE_NAME%\bin\shutdown.bat 44 | echo set "SCRIPT_DIR=%%~dp0" >> %PACKAGE_NAME%\bin\shutdown.bat 45 | echo set "BASE_DIR=%%SCRIPT_DIR%%.." >> %PACKAGE_NAME%\bin\shutdown.bat 46 | echo. >> %PACKAGE_NAME%\bin\shutdown.bat 47 | echo rem 切换到项目根目录 >> %PACKAGE_NAME%\bin\shutdown.bat 48 | echo cd /d "%%BASE_DIR%%" >> %PACKAGE_NAME%\bin\shutdown.bat 49 | echo. >> %PACKAGE_NAME%\bin\shutdown.bat 50 | echo rem 执行停止脚本 >> %PACKAGE_NAME%\bin\shutdown.bat 51 | echo call stop.bat >> %PACKAGE_NAME%\bin\shutdown.bat 52 | 53 | REM 使用PowerShell创建Linux脚本 54 | powershell -Command "Set-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value '#!/bin/bash' -Encoding ASCII" 55 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value '' -Encoding ASCII" 56 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value '# 获取脚本所在目录的绝对路径' -Encoding ASCII" 57 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value 'SCRIPT_DIR=$(cd \"$(dirname \"$0\")\" && pwd)' -Encoding ASCII" 58 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value 'BASE_DIR=$(dirname \"$SCRIPT_DIR\")' -Encoding ASCII" 59 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value '' -Encoding ASCII" 60 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value '# 切换到项目根目录' -Encoding ASCII" 61 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value 'cd \"$BASE_DIR\"' -Encoding ASCII" 62 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value '' -Encoding ASCII" 63 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value '# 添加执行权限' -Encoding ASCII" 64 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value 'chmod +x start.sh' -Encoding ASCII" 65 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value '' -Encoding ASCII" 66 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value '# 执行启动脚本' -Encoding ASCII" 67 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\startup.sh' -Value './start.sh' -Encoding ASCII" 68 | 69 | powershell -Command "Set-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value '#!/bin/bash' -Encoding ASCII" 70 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value '' -Encoding ASCII" 71 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value '# 获取脚本所在目录的绝对路径' -Encoding ASCII" 72 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value 'SCRIPT_DIR=$(cd \"$(dirname \"$0\")\" && pwd)' -Encoding ASCII" 73 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value 'BASE_DIR=$(dirname \"$SCRIPT_DIR\")' -Encoding ASCII" 74 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value '' -Encoding ASCII" 75 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value '# 切换到项目根目录' -Encoding ASCII" 76 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value 'cd \"$BASE_DIR\"' -Encoding ASCII" 77 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value '' -Encoding ASCII" 78 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value '# 添加执行权限' -Encoding ASCII" 79 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value 'chmod +x stop.sh' -Encoding ASCII" 80 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value '' -Encoding ASCII" 81 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value '# 执行停止脚本' -Encoding ASCII" 82 | powershell -Command "Add-Content -Path '%PACKAGE_NAME%\bin\shutdown.sh' -Value './stop.sh' -Encoding ASCII" 83 | 84 | REM 使用PowerShell复制目录并排除node_modules 85 | echo 使用PowerShell复制目录... 86 | powershell -Command "Get-ChildItem -Path 'backend' -Recurse -File | Where-Object { $_.FullName -notlike '*\node_modules\*' -and $_.FullName -notlike '*\target\*' -and $_.FullName -notlike '*\.git\*' -and $_.FullName -notlike '*\.idea\*' -and $_.FullName -notlike '*\.vscode\*' } | ForEach-Object { $destPath = $_.FullName.Replace((Get-Location).Path + '\backend', '%PACKAGE_NAME%\backend'); $destDir = Split-Path -Path $destPath -Parent; if (-not (Test-Path $destDir)) { New-Item -Path $destDir -ItemType Directory -Force | Out-Null }; Copy-Item -Path $_.FullName -Destination $destPath -Force }" 87 | 88 | powershell -Command "Get-ChildItem -Path 'frontend' -Recurse -File | Where-Object { $_.FullName -notlike '*\node_modules\*' -and $_.FullName -notlike '*\.git\*' -and $_.FullName -notlike '*\.idea\*' -and $_.FullName -notlike '*\.vscode\*' } | ForEach-Object { $destPath = $_.FullName.Replace((Get-Location).Path + '\frontend', '%PACKAGE_NAME%\frontend'); $destDir = Split-Path -Path $destPath -Parent; if (-not (Test-Path $destDir)) { New-Item -Path $destDir -ItemType Directory -Force | Out-Null }; Copy-Item -Path $_.FullName -Destination $destPath -Force }" 89 | 90 | REM 创建压缩包 91 | echo 创建压缩包... 92 | powershell -Command "Compress-Archive -Path '%PACKAGE_NAME%\*' -DestinationPath '%PACKAGE_NAME%.zip' -Force" 93 | 94 | REM 清理临时目录 95 | echo 清理临时文件... 96 | rmdir /s /q %PACKAGE_NAME% 97 | 98 | echo 打包完成! 99 | echo 创建了文件: %PACKAGE_NAME%.zip 100 | echo 解压后,可以使用以下命令启动应用: 101 | echo Linux/Mac: ./bin/startup.sh 102 | echo Windows: bin/startup.bat 103 | 104 | endlocal -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/service/DataGenerateService.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.service; 2 | 3 | import com.datagenerator.entity.DataTask; 4 | import com.datagenerator.entity.DataSource; 5 | import com.datagenerator.entity.ExecutionRecord; 6 | import com.datagenerator.generator.DataGenerator; 7 | import com.datagenerator.generator.DataGeneratorFactory; 8 | import com.datagenerator.generator.impl.RelationalDatabaseGenerator; 9 | import com.datagenerator.util.JsonUtil; 10 | import lombok.RequiredArgsConstructor; 11 | import lombok.extern.slf4j.Slf4j; 12 | import org.springframework.kafka.core.KafkaTemplate; 13 | import org.springframework.stereotype.Service; 14 | 15 | import java.sql.Connection; 16 | import java.sql.DriverManager; 17 | import java.sql.PreparedStatement; 18 | import java.sql.ResultSet; 19 | import java.sql.Statement; 20 | import java.sql.BatchUpdateException; 21 | import java.util.ArrayList; 22 | import java.util.Collections; 23 | import java.util.Date; 24 | import java.util.HashMap; 25 | import java.util.List; 26 | import java.util.Map; 27 | import java.util.concurrent.*; 28 | import java.sql.SQLException; 29 | import com.fasterxml.jackson.databind.ObjectMapper; 30 | import com.fasterxml.jackson.databind.JsonNode; 31 | import com.fasterxml.jackson.databind.node.ObjectNode; 32 | import com.fasterxml.jackson.databind.node.ArrayNode; 33 | import java.util.Iterator; 34 | import org.springframework.kafka.core.DefaultKafkaProducerFactory; 35 | import org.springframework.kafka.core.ProducerFactory; 36 | import java.util.concurrent.TimeUnit; 37 | import com.datagenerator.mapper.TaskMapper; 38 | import javax.annotation.PostConstruct; 39 | import java.time.LocalDateTime; 40 | import java.util.Arrays; 41 | 42 | @Slf4j 43 | @Service 44 | @RequiredArgsConstructor 45 | public class DataGenerateService { 46 | 47 | private final DataSourceService dataSourceService; 48 | private static DataSourceService staticDataSourceService; 49 | private final ExecutionRecordService executionRecordService; 50 | private final KafkaTemplate kafkaProducer; 51 | private final DataGeneratorFactory dataGeneratorFactory; 52 | private final TaskMapper taskMapper; 53 | private final RelationalDatabaseGenerator relationalDatabaseGenerator; 54 | 55 | // 存储每个任务的Kafka生产者工厂 56 | private final Map> producerFactories = new ConcurrentHashMap<>(); 57 | 58 | private final ObjectMapper objectMapper = new ObjectMapper(); 59 | 60 | @PostConstruct 61 | public void init() { 62 | staticDataSourceService = dataSourceService; 63 | } 64 | 65 | /** 66 | * 执行数据生成任务 67 | */ 68 | public void executeTask(DataTask task) { 69 | log.info("开始执行数据生成任务:{}", task.getId()); 70 | 71 | // 检查任务状态 72 | if (!"RUNNING".equals(task.getStatus())) { 73 | log.warn("任务 {} 状态不是运行中状态,跳过执行", task.getId()); 74 | return; 75 | } 76 | 77 | // 获取数据源 78 | DataSource dataSource = dataSourceService.getById(task.getDataSourceId()); 79 | if (dataSource == null || dataSource.getDeleted() == 1) { 80 | log.error("任务 {} 的数据源不存在或已删除", task.getId()); 81 | return; 82 | } 83 | 84 | // 创建执行记录 85 | ExecutionRecord record = new ExecutionRecord(); 86 | record.setTaskId(task.getId()); 87 | record.setStartTime(LocalDateTime.now()); 88 | record.setStatus("RUNNING"); 89 | executionRecordService.save(record); 90 | 91 | try { 92 | // 根据数据源类型处理 93 | if ("KAFKA".equals(dataSource.getType())) { 94 | processKafkaTask(task, dataSource); 95 | } else { 96 | // 处理关系型数据库任务 97 | String[] tables = task.getTargetName().split(","); 98 | relationalDatabaseGenerator.generateData(task, dataSource, tables); 99 | } 100 | 101 | // 更新执行记录状态 102 | record.setStatus("SUCCESS"); 103 | record.setEndTime(LocalDateTime.now()); 104 | executionRecordService.updateById(record); 105 | 106 | // 更新任务最后执行时间,但不改变任务状态 107 | task.setUpdateTime(LocalDateTime.now()); 108 | taskMapper.updateById(task); 109 | 110 | log.info("任务 {} 执行完成", task.getId()); 111 | 112 | } catch (Exception e) { 113 | log.error("任务 {} 执行失败:{}", task.getId(), e.getMessage(), e); 114 | 115 | // 更新执行记录状态 116 | record.setStatus("FAILED"); 117 | record.setEndTime(LocalDateTime.now()); 118 | record.setErrorMessage(e.getMessage()); 119 | executionRecordService.updateById(record); 120 | 121 | // 更新任务最后执行时间,但不改变任务状态 122 | task.setUpdateTime(LocalDateTime.now()); 123 | taskMapper.updateById(task); 124 | } 125 | } 126 | 127 | /** 128 | * 处理Kafka任务 129 | */ 130 | private void processKafkaTask(DataTask task, DataSource dataSource) { 131 | log.info("开始处理Kafka任务:{}", task.getId()); 132 | 133 | try { 134 | // 获取或创建Kafka生产者工厂 135 | ProducerFactory producerFactory = producerFactories.computeIfAbsent( 136 | task.getId(), 137 | id -> { 138 | Map configs = new HashMap<>(); 139 | configs.put("bootstrap.servers", dataSource.getUrl()); 140 | configs.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 141 | configs.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 142 | return new DefaultKafkaProducerFactory<>(configs); 143 | } 144 | ); 145 | 146 | // 创建数据生成器 147 | DataGenerator generator = dataGeneratorFactory.createGenerator(task.getDataFormat()); 148 | 149 | // 生成并发送数据 150 | List> data = generator.generate(task.getTemplate(), task.getBatchSize()); 151 | for (Map record : data) { 152 | String value = JsonUtil.toJson(record); 153 | kafkaProducer.send(task.getTargetName(), value); 154 | } 155 | 156 | log.info("Kafka任务 {} 数据生成完成,共生成 {} 条记录", task.getId(), data.size()); 157 | 158 | } catch (Exception e) { 159 | log.error("Kafka任务处理失败:{}", e.getMessage(), e); 160 | throw e; 161 | } 162 | } 163 | 164 | /** 165 | * 强制关闭Kafka连接 166 | */ 167 | public void forceCloseKafkaConnections(Long taskId) { 168 | ProducerFactory factory = producerFactories.remove(taskId); 169 | if (factory != null) { 170 | try { 171 | factory.reset(); 172 | log.info("成功关闭任务 {} 的Kafka连接", taskId); 173 | } catch (Exception e) { 174 | log.error("关闭任务 {} 的Kafka连接时发生错误:{}", taskId, e.getMessage()); 175 | } 176 | } 177 | } 178 | } -------------------------------------------------------------------------------- /backend/src/main/resources/db/init.sql: -------------------------------------------------------------------------------- 1 | -- MySQL dump 10.13 Distrib 5.7.42, for Win64 (x86_64) 2 | -- 3 | -- Host: localhost Database: data_generator 4 | -- ------------------------------------------------------ 5 | -- Server version 5.7.42 6 | 7 | /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; 8 | /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; 9 | /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; 10 | /*!40101 SET NAMES utf8 */; 11 | /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; 12 | /*!40103 SET TIME_ZONE='+00:00' */; 13 | /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; 14 | /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; 15 | /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; 16 | /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; 17 | 18 | -- 19 | -- Table structure for table `data_source` 20 | -- 21 | 22 | DROP TABLE IF EXISTS `data_source`; 23 | /*!40101 SET @saved_cs_client = @@character_set_client */; 24 | /*!40101 SET character_set_client = utf8 */; 25 | CREATE TABLE `data_source` ( 26 | `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键ID', 27 | `name` varchar(100) NOT NULL COMMENT '数据源名称', 28 | `type` varchar(20) NOT NULL COMMENT '数据源类型', 29 | `url` varchar(500) NOT NULL COMMENT '连接URL', 30 | `username` varchar(100) NOT NULL COMMENT '用户名', 31 | `password` varchar(100) NOT NULL COMMENT '密码', 32 | `driver_class_name` varchar(100) DEFAULT NULL COMMENT '驱动类名', 33 | `description` varchar(500) DEFAULT NULL COMMENT '描述', 34 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 35 | `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 36 | `deleted` tinyint(4) NOT NULL DEFAULT '0' COMMENT '是否删除', 37 | PRIMARY KEY (`id`) 38 | ) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COMMENT='数据源配置表'; 39 | /*!40101 SET character_set_client = @saved_cs_client */; 40 | 41 | -- 42 | -- Table structure for table `data_task` 43 | -- 44 | 45 | DROP TABLE IF EXISTS `data_task`; 46 | /*!40101 SET @saved_cs_client = @@character_set_client */; 47 | /*!40101 SET character_set_client = utf8 */; 48 | CREATE TABLE `data_task` ( 49 | `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键ID', 50 | `name` varchar(100) NOT NULL COMMENT '任务名称', 51 | `data_source_id` bigint(20) NOT NULL COMMENT '数据源ID', 52 | `target_type` varchar(20) NOT NULL COMMENT '目标类型', 53 | `target_name` varchar(5000) NOT NULL COMMENT '目标名称', 54 | `write_mode` varchar(20) NOT NULL COMMENT '写入模式', 55 | `data_format` varchar(20) NOT NULL COMMENT '数据格式', 56 | `template` text COMMENT '数据生成模板', 57 | `batch_size` int(11) NOT NULL DEFAULT '1000' COMMENT '批量大小', 58 | `frequency` int(11) NOT NULL DEFAULT '1' COMMENT '生成频率(秒)', 59 | `concurrent_num` int(11) NOT NULL DEFAULT '1' COMMENT '并发数', 60 | `status` varchar(20) NOT NULL DEFAULT 'STOPPED' COMMENT '任务状态', 61 | `cron_expression` varchar(100) DEFAULT NULL COMMENT '定时任务表达式', 62 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 63 | `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 64 | `deleted` tinyint(4) NOT NULL DEFAULT '0' COMMENT '是否删除', 65 | PRIMARY KEY (`id`), 66 | KEY `data_source_id` (`data_source_id`), 67 | CONSTRAINT `data_task_ibfk_1` FOREIGN KEY (`data_source_id`) REFERENCES `data_source` (`id`) 68 | ) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8mb4 COMMENT='数据生成任务表'; 69 | /*!40101 SET character_set_client = @saved_cs_client */; 70 | 71 | -- 72 | -- Table structure for table `system_info` 73 | -- 74 | 75 | DROP TABLE IF EXISTS `system_info`; 76 | /*!40101 SET @saved_cs_client = @@character_set_client */; 77 | /*!40101 SET character_set_client = utf8 */; 78 | CREATE TABLE `system_info` ( 79 | `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键ID', 80 | `cpu_usage` double DEFAULT NULL COMMENT 'CPU使用率', 81 | `memory_usage` double DEFAULT NULL COMMENT '内存使用率', 82 | `disk_usage` double DEFAULT NULL COMMENT '磁盘使用率', 83 | `jvm_heap_usage` double DEFAULT NULL COMMENT 'JVM堆内存使用率', 84 | `jvm_non_heap_usage` double DEFAULT NULL COMMENT 'JVM非堆内存使用率', 85 | `uptime` bigint(20) DEFAULT NULL COMMENT '系统运行时间(毫秒)', 86 | `create_time` datetime DEFAULT NULL COMMENT '创建时间', 87 | PRIMARY KEY (`id`) 88 | ) ENGINE=InnoDB AUTO_INCREMENT=1248 DEFAULT CHARSET=utf8mb4 COMMENT='系统信息表'; 89 | /*!40101 SET character_set_client = @saved_cs_client */; 90 | 91 | -- 92 | -- Table structure for table `task_execution` 93 | -- 94 | 95 | DROP TABLE IF EXISTS `task_execution`; 96 | /*!40101 SET @saved_cs_client = @@character_set_client */; 97 | /*!40101 SET character_set_client = utf8 */; 98 | CREATE TABLE `task_execution` ( 99 | `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键ID', 100 | `task_id` bigint(20) NOT NULL COMMENT '任务ID', 101 | `start_time` datetime NOT NULL COMMENT '开始时间', 102 | `end_time` datetime DEFAULT NULL COMMENT '结束时间', 103 | `status` varchar(20) NOT NULL COMMENT '执行状态', 104 | `total_count` bigint(20) NOT NULL DEFAULT '0' COMMENT '总记录数', 105 | `success_count` bigint(20) NOT NULL DEFAULT '0' COMMENT '成功记录数', 106 | `error_count` bigint(20) NOT NULL DEFAULT '0' COMMENT '失败记录数', 107 | `error_message` text COMMENT '错误信息', 108 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 109 | `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 110 | `deleted` tinyint(4) NOT NULL DEFAULT '0' COMMENT '是否删除', 111 | PRIMARY KEY (`id`), 112 | KEY `task_id` (`task_id`), 113 | CONSTRAINT `task_execution_ibfk_1` FOREIGN KEY (`task_id`) REFERENCES `data_task` (`id`) 114 | ) ENGINE=InnoDB AUTO_INCREMENT=217 DEFAULT CHARSET=utf8mb4 COMMENT='任务执行记录表'; 115 | /*!40101 SET character_set_client = @saved_cs_client */; 116 | 117 | -- 118 | -- Dumping routines for database 'data_generator' 119 | -- 120 | /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; 121 | 122 | /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; 123 | /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; 124 | /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; 125 | /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; 126 | /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; 127 | /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; 128 | /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; 129 | 130 | -- Dump completed on 2025-03-11 21:35:50 131 | -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/impl/TemplateDataGenerator.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator.impl; 2 | 3 | import com.datagenerator.generator.DataGenerator; 4 | import com.datagenerator.generator.rule.DataRule; 5 | import com.datagenerator.generator.rule.DataRuleFactory; 6 | import com.datagenerator.metadata.ForeignKeyMetadata; 7 | import com.fasterxml.jackson.databind.ObjectMapper; 8 | import com.fasterxml.jackson.databind.JsonNode; 9 | import com.fasterxml.jackson.databind.DeserializationFeature; 10 | import com.fasterxml.jackson.databind.node.ObjectNode; 11 | import lombok.extern.slf4j.Slf4j; 12 | import org.springframework.stereotype.Component; 13 | 14 | import java.util.*; 15 | 16 | @Slf4j 17 | @Component 18 | public class TemplateDataGenerator implements DataGenerator { 19 | 20 | private final ObjectMapper objectMapper; 21 | private final DataRuleFactory ruleFactory; 22 | private Map rules; 23 | private List fields; 24 | private Map foreignKeyMetadataMap; 25 | 26 | public TemplateDataGenerator(DataRuleFactory ruleFactory) { 27 | this.objectMapper = new ObjectMapper() 28 | .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) 29 | .configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, true); 30 | this.ruleFactory = ruleFactory; 31 | this.rules = new HashMap<>(); 32 | this.fields = new ArrayList<>(); 33 | this.foreignKeyMetadataMap = new HashMap<>(); 34 | } 35 | 36 | public void setForeignKeyMetadata(Map foreignKeyMetadataMap) { 37 | this.foreignKeyMetadataMap = foreignKeyMetadataMap; 38 | } 39 | 40 | @Override 41 | public List> generate(String template, int count) { 42 | try { 43 | if (template == null || template.trim().isEmpty()) { 44 | throw new IllegalArgumentException("模板不能为空"); 45 | } 46 | 47 | log.info("开始解析模板,模板内容: {}", template); 48 | 49 | // 解析模板 50 | Map templateMap; 51 | try { 52 | // 先尝试解析为JsonNode以便于调试 53 | JsonNode jsonNode = objectMapper.readTree(template); 54 | log.info("模板JSON解析结果: {}", jsonNode.toPrettyString()); 55 | 56 | // 如果JSON解析成功,再转换为Map 57 | templateMap = objectMapper.convertValue(jsonNode, Map.class); 58 | log.info("模板Map解析结果: {}", templateMap); 59 | } catch (Exception e) { 60 | log.error("模板解析失败,请确保模板是有效的JSON格式。模板内容: {}, 错误: {}", template, e.getMessage(), e); 61 | throw new IllegalArgumentException("模板解析失败: " + e.getMessage()); 62 | } 63 | 64 | // 创建规则 65 | rules = new HashMap<>(); 66 | fields = new ArrayList<>(); 67 | 68 | for (Map.Entry entry : templateMap.entrySet()) { 69 | String field = entry.getKey(); 70 | Object value = entry.getValue(); 71 | 72 | log.info("处理字段 {}, 值类型: {}, 值内容: {}", field, 73 | value != null ? value.getClass().getName() : "null", 74 | value); 75 | 76 | if (!(value instanceof Map)) { 77 | String actualType = value != null ? value.getClass().getName() : "null"; 78 | String actualValue = String.valueOf(value); 79 | log.error("字段 {} 的配置无效。期望类型: Map, 实际类型: {}, 实际值: {}", 80 | field, actualType, actualValue); 81 | throw new IllegalArgumentException(String.format( 82 | "字段 %s 的配置无效,应为对象类型,实际为: %s,值: %s", 83 | field, actualType, actualValue)); 84 | } 85 | 86 | @SuppressWarnings("unchecked") 87 | Map ruleConfig = (Map) value; 88 | 89 | log.info("字段 {} 的规则配置: {}", field, ruleConfig); 90 | 91 | String type = (String) ruleConfig.get("type"); 92 | if (type == null) { 93 | log.error("字段 {} 缺少 type 属性,完整配置: {}", field, ruleConfig); 94 | throw new IllegalArgumentException("字段 " + field + " 缺少 type 属性"); 95 | } 96 | 97 | Object params = ruleConfig.get("params"); 98 | if (params == null) { 99 | log.error("字段 {} 缺少 params 属性,完整配置: {}", field, ruleConfig); 100 | throw new IllegalArgumentException("字段 " + field + " 缺少 params 属性"); 101 | } 102 | 103 | try { 104 | log.info("为字段 {} 创建规则,类型: {}, 参数: {}", field, type, params); 105 | DataRule rule = ruleFactory.createRule(type, params); 106 | rules.put(field, rule); 107 | fields.add(field); 108 | log.info("成功创建字段 {} 的规则: {}", field, rule); 109 | } catch (Exception e) { 110 | log.error("创建字段 {} 的规则失败。类型: {}, 参数: {}, 错误: {}", 111 | field, type, params, e.getMessage(), e); 112 | throw new IllegalArgumentException( 113 | String.format("创建字段 %s 的规则失败: %s", field, e.getMessage())); 114 | } 115 | } 116 | 117 | // 生成数据 118 | List> data = new ArrayList<>(); 119 | for (int i = 0; i < count; i++) { 120 | Map row = new HashMap<>(); 121 | for (String field : fields) { 122 | DataRule rule = rules.get(field); 123 | Object value = rule.generate(); 124 | row.put(field, value); 125 | if (i == 0) { 126 | log.debug("字段 {} 生成的第一个值: {}", field, value); 127 | } 128 | } 129 | data.add(row); 130 | } 131 | 132 | log.info("成功生成 {} 条数据", count); 133 | if (!data.isEmpty()) { 134 | log.debug("第一条数据示例: {}", data.get(0)); 135 | } 136 | 137 | return data; 138 | } catch (Exception e) { 139 | log.error("生成数据失败: {}", e.getMessage(), e); 140 | throw new RuntimeException("生成数据失败: " + e.getMessage(), e); 141 | } 142 | } 143 | 144 | @Override 145 | public List getFields() { 146 | return fields; 147 | } 148 | 149 | private Object generateValue(String fieldName, JsonNode fieldConfig) { 150 | String type = fieldConfig.path("type").asText(); 151 | ObjectNode params = (ObjectNode) fieldConfig.path("params"); 152 | 153 | try { 154 | switch (type.toLowerCase()) { 155 | case "foreignkey": 156 | String referencedTable = fieldConfig.path("referencedTable").asText(); 157 | String referencedColumn = fieldConfig.path("referencedColumn").asText(); 158 | String key = referencedTable + "." + referencedColumn; 159 | ForeignKeyMetadata fkMetadata = foreignKeyMetadataMap.get(key); 160 | if (fkMetadata == null || fkMetadata.getValidValues().isEmpty()) { 161 | throw new IllegalArgumentException("没有找到有效的外键值: " + key); 162 | } 163 | // 随机选择一个有效的外键值 164 | List validValues = new ArrayList<>(fkMetadata.getValidValues()); 165 | return validValues.get(new Random().nextInt(validValues.size())); 166 | default: 167 | DataRule rule = rules.get(fieldName); 168 | if (rule == null) { 169 | throw new IllegalArgumentException("未知的数据生成规则类型: " + type); 170 | } 171 | return rule.generate(); 172 | } 173 | } catch (Exception e) { 174 | log.error("生成字段 {} 的值失败: {}", fieldName, e.getMessage()); 175 | throw new IllegalArgumentException("创建字段 " + fieldName + " 的规则失败: " + e.getMessage()); 176 | } 177 | } 178 | } -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/generator/rule/impl/StringRule.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.generator.rule.impl; 2 | 3 | import com.datagenerator.generator.rule.DataRule; 4 | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; 5 | import lombok.Data; 6 | import lombok.extern.slf4j.Slf4j; 7 | 8 | import java.util.Random; 9 | import java.util.regex.Pattern; 10 | 11 | @Data 12 | @Slf4j 13 | @JsonIgnoreProperties(ignoreUnknown = true) 14 | public class StringRule implements DataRule { 15 | private String prefix = ""; 16 | private String suffix = ""; 17 | private int minLength = 5; 18 | private int maxLength = 10; 19 | private String charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; 20 | private String pattern; 21 | private boolean random = true; 22 | private int current = 0; 23 | private String defaultValue; 24 | private boolean nullable = true; 25 | 26 | @Override 27 | public Object generate() { 28 | try { 29 | if (defaultValue != null) { 30 | return defaultValue; 31 | } 32 | 33 | if (nullable && Math.random() < 0.1) { // 10%的概率生成null 34 | return null; 35 | } 36 | 37 | // 如果指定了模式,使用模式生成 38 | if (pattern != null && !pattern.isEmpty()) { 39 | return generateByPattern(); 40 | } 41 | 42 | // 否则使用常规方式生成 43 | return generateByLength(); 44 | } catch (Exception e) { 45 | log.error("生成字符串失败: {}", e.getMessage(), e); 46 | return generateDefault(); 47 | } 48 | } 49 | 50 | private String generateByPattern() { 51 | try { 52 | if (pattern == null || pattern.isEmpty()) { 53 | return generateByLength(); 54 | } 55 | 56 | // 检查是否是JSON模板 57 | if (pattern.startsWith("{") || pattern.startsWith("[")) { 58 | return processJsonTemplate(pattern); 59 | } 60 | 61 | // 如果是简单的正则表达式模式,直接返回 62 | return pattern; 63 | } catch (Exception e) { 64 | log.error("按模式生成字符串失败: {}", e.getMessage(), e); 65 | return generateDefault(); 66 | } 67 | } 68 | 69 | private String processJsonTemplate(String template) { 70 | try { 71 | log.debug("处理JSON模板: {}", template); 72 | 73 | // 处理枚举值 ${enum:value1|value2|value3} 74 | java.util.regex.Pattern enumPattern = java.util.regex.Pattern.compile("\\$\\{enum:([^}]+)\\}"); 75 | java.util.regex.Matcher enumMatcher = enumPattern.matcher(template); 76 | StringBuffer sb = new StringBuffer(); 77 | while (enumMatcher.find()) { 78 | String values = enumMatcher.group(1); 79 | String[] options = values.split("\\|"); 80 | int index = (int) (Math.random() * options.length); 81 | String replacement = options[index]; 82 | // 使用quoteReplacement确保替换值中的特殊字符被正确处理 83 | replacement = java.util.regex.Matcher.quoteReplacement(replacement); 84 | enumMatcher.appendReplacement(sb, replacement); 85 | } 86 | enumMatcher.appendTail(sb); 87 | template = sb.toString(); 88 | 89 | // 处理随机数 ${random:min-max} 90 | java.util.regex.Pattern randomPattern = java.util.regex.Pattern.compile("\\$\\{random:([0-9]+)-([0-9]+)\\}"); 91 | java.util.regex.Matcher randomMatcher = randomPattern.matcher(template); 92 | sb = new StringBuffer(); 93 | while (randomMatcher.find()) { 94 | long min = Long.parseLong(randomMatcher.group(1)); 95 | long max = Long.parseLong(randomMatcher.group(2)); 96 | long value = min + (long) (Math.random() * (max - min + 1)); 97 | // 使用quoteReplacement确保替换值中的特殊字符被正确处理 98 | String replacement = java.util.regex.Matcher.quoteReplacement(String.valueOf(value)); 99 | randomMatcher.appendReplacement(sb, replacement); 100 | } 101 | randomMatcher.appendTail(sb); 102 | template = sb.toString(); 103 | 104 | // 处理字符串模式 ${string:pattern} 105 | java.util.regex.Pattern stringPattern = java.util.regex.Pattern.compile("\\$\\{string:([^}]+)\\}"); 106 | java.util.regex.Matcher stringMatcher = stringPattern.matcher(template); 107 | sb = new StringBuffer(); 108 | while (stringMatcher.find()) { 109 | String strPattern = stringMatcher.group(1); 110 | String replacement = ""; 111 | 112 | // 简单处理一些常见的模式 113 | if (strPattern.contains("[a-zA-Z0-9]")) { 114 | int minLen = 8; 115 | int maxLen = 16; 116 | if (strPattern.contains("{")) { 117 | String[] parts = strPattern.split("\\{|\\}"); 118 | for (String part : parts) { 119 | if (part.contains(",")) { 120 | String[] range = part.split(","); 121 | minLen = Integer.parseInt(range[0]); 122 | maxLen = Integer.parseInt(range[1]); 123 | break; 124 | } else if (part.matches("\\d+")) { 125 | minLen = maxLen = Integer.parseInt(part); 126 | break; 127 | } 128 | } 129 | } 130 | 131 | // 使用安全的方法生成随机字符串 132 | replacement = generateRandomAlphanumeric(minLen + (int) (Math.random() * (maxLen - minLen + 1))); 133 | } 134 | // 处理特定前缀的模式 135 | else if (strPattern.startsWith("sku")) { 136 | replacement = "sku" + String.format("%06d", (int) (Math.random() * 1000000)); 137 | } else if (strPattern.startsWith("page")) { 138 | replacement = "page" + String.format("%03d", (int) (Math.random() * 1000)); 139 | } else if (strPattern.startsWith("act")) { 140 | replacement = "act" + String.format("%03d", (int) (Math.random() * 1000)); 141 | } else if (strPattern.startsWith("pos")) { 142 | replacement = "pos" + String.format("%03d", (int) (Math.random() * 1000)); 143 | } else if (strPattern.startsWith("ad")) { 144 | replacement = "ad" + generateRandomAlphanumeric(6); 145 | } else if (strPattern.startsWith("err")) { 146 | replacement = "err" + generateRandomAlphanumeric(6); 147 | } else { 148 | replacement = generateRandomAlphanumeric(minLength + (int)(Math.random() * (maxLength - minLength + 1))); 149 | } 150 | 151 | // 使用quoteReplacement确保替换值中的特殊字符被正确处理 152 | replacement = java.util.regex.Matcher.quoteReplacement(replacement); 153 | stringMatcher.appendReplacement(sb, replacement); 154 | } 155 | stringMatcher.appendTail(sb); 156 | template = sb.toString(); 157 | 158 | // 直接处理字符串值中的"}"符号 159 | // 这是一个更直接的方法,专门针对您提供的模板结构 160 | if (template.contains("\"")) { 161 | // 使用正则表达式匹配JSON字符串值 162 | java.util.regex.Pattern jsonStringPattern = java.util.regex.Pattern.compile("\"([^\"]*)\""); 163 | java.util.regex.Matcher jsonStringMatcher = jsonStringPattern.matcher(template); 164 | sb = new StringBuffer(); 165 | while (jsonStringMatcher.find()) { 166 | String stringValue = jsonStringMatcher.group(1); 167 | // 移除字符串值中的"}"符号 168 | stringValue = stringValue.replace("}", ""); 169 | // 使用quoteReplacement确保替换值中的特殊字符被正确处理 170 | String replacement = "\"" + stringValue + "\""; 171 | replacement = java.util.regex.Matcher.quoteReplacement(replacement); 172 | jsonStringMatcher.appendReplacement(sb, replacement); 173 | } 174 | jsonStringMatcher.appendTail(sb); 175 | template = sb.toString(); 176 | } 177 | 178 | log.debug("处理后的JSON模板: {}", template); 179 | return template; 180 | } catch (Exception e) { 181 | log.error("处理JSON模板失败: {}", e.getMessage(), e); 182 | // 出错时,尝试一个简单的修复:移除所有字符串值中的"}"符号 183 | try { 184 | // 使用正则表达式匹配JSON字符串值 185 | java.util.regex.Pattern jsonStringPattern = java.util.regex.Pattern.compile("\"([^\"]*)\""); 186 | java.util.regex.Matcher jsonStringMatcher = jsonStringPattern.matcher(template); 187 | StringBuffer sb = new StringBuffer(); 188 | while (jsonStringMatcher.find()) { 189 | String stringValue = jsonStringMatcher.group(1); 190 | // 移除字符串值中的"}"符号 191 | stringValue = stringValue.replace("}", ""); 192 | // 使用quoteReplacement确保替换值中的特殊字符被正确处理 193 | String replacement = "\"" + stringValue + "\""; 194 | replacement = java.util.regex.Matcher.quoteReplacement(replacement); 195 | jsonStringMatcher.appendReplacement(sb, replacement); 196 | } 197 | jsonStringMatcher.appendTail(sb); 198 | return sb.toString(); 199 | } catch (Exception ex) { 200 | log.error("简单修复也失败了: {}", ex.getMessage(), ex); 201 | return template; 202 | } 203 | } 204 | } 205 | 206 | /** 207 | * 生成只包含字母和数字的随机字符串 208 | */ 209 | private String generateRandomAlphanumeric(int length) { 210 | StringBuilder sb = new StringBuilder(); 211 | String chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; 212 | for (int i = 0; i < length; i++) { 213 | sb.append(chars.charAt((int) (Math.random() * chars.length()))); 214 | } 215 | return sb.toString(); 216 | } 217 | 218 | private String generateByLength() { 219 | int length = minLength; 220 | if (maxLength > minLength) { 221 | length += (int) (Math.random() * (maxLength - minLength + 1)); 222 | } 223 | 224 | StringBuilder sb = new StringBuilder(); 225 | 226 | if (prefix != null) { 227 | sb.append(prefix); 228 | } 229 | 230 | if (random) { 231 | Random random = new Random(); 232 | for (int i = 0; i < length; i++) { 233 | sb.append(charset.charAt(random.nextInt(charset.length()))); 234 | } 235 | } else { 236 | for (int i = 0; i < length; i++) { 237 | sb.append(charset.charAt(current % charset.length())); 238 | current = (current + 1) % charset.length(); 239 | } 240 | } 241 | 242 | if (suffix != null) { 243 | sb.append(suffix); 244 | } 245 | 246 | return sb.toString(); 247 | } 248 | 249 | private String generateDefault() { 250 | return "default_" + System.currentTimeMillis(); 251 | } 252 | 253 | @Override 254 | public String getType() { 255 | return "string"; 256 | } 257 | 258 | @Override 259 | public Object getParams() { 260 | return this; 261 | } 262 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /backend/src/main/java/com/datagenerator/service/impl/DataSourceServiceImpl.java: -------------------------------------------------------------------------------- 1 | package com.datagenerator.service.impl; 2 | 3 | import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; 4 | import com.datagenerator.entity.DataSource; 5 | import com.datagenerator.mapper.DataSourceMapper; 6 | import com.datagenerator.service.DataSourceService; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.apache.kafka.clients.admin.AdminClient; 9 | import org.apache.kafka.clients.admin.ListTopicsOptions; 10 | import org.springframework.stereotype.Service; 11 | 12 | import java.sql.Connection; 13 | import java.sql.DatabaseMetaData; 14 | import java.sql.DriverManager; 15 | import java.sql.ResultSet; 16 | import java.sql.SQLException; 17 | import java.sql.Statement; 18 | import java.util.ArrayList; 19 | import java.util.HashMap; 20 | import java.util.List; 21 | import java.util.Map; 22 | import java.util.Properties; 23 | import java.util.Set; 24 | import java.util.concurrent.ConcurrentHashMap; 25 | import java.time.Duration; 26 | import javax.annotation.PreDestroy; 27 | 28 | @Slf4j 29 | @Service 30 | public class DataSourceServiceImpl extends ServiceImpl implements DataSourceService { 31 | 32 | private final Map adminClientMap = new ConcurrentHashMap<>(); 33 | 34 | @PreDestroy 35 | public void cleanup() { 36 | log.info("正在关闭所有Kafka AdminClient连接..."); 37 | adminClientMap.forEach((id, client) -> { 38 | try { 39 | client.close(Duration.ofSeconds(5)); 40 | log.info("成功关闭数据源ID={}的Kafka连接", id); 41 | } catch (Exception e) { 42 | log.error("关闭数据源ID={}的Kafka连接时发生错误", id, e); 43 | } 44 | }); 45 | adminClientMap.clear(); 46 | } 47 | 48 | private AdminClient getOrCreateAdminClient(DataSource dataSource) { 49 | return adminClientMap.computeIfAbsent(dataSource.getId(), id -> { 50 | Properties props = new Properties(); 51 | props.put("bootstrap.servers", dataSource.getUrl()); 52 | props.put("request.timeout.ms", "5000"); 53 | props.put("connections.max.idle.ms", "10000"); 54 | 55 | if (dataSource.getUsername() != null && !dataSource.getUsername().isEmpty()) { 56 | props.put("security.protocol", "SASL_PLAINTEXT"); 57 | props.put("sasl.mechanism", "PLAIN"); 58 | String jaasTemplate = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s\" password=\"%s\";"; 59 | String jaasConfig = String.format(jaasTemplate, dataSource.getUsername(), dataSource.getPassword()); 60 | props.put("sasl.jaas.config", jaasConfig); 61 | } 62 | 63 | return AdminClient.create(props); 64 | }); 65 | } 66 | 67 | public void closeKafkaConnection(Long dataSourceId) { 68 | AdminClient client = adminClientMap.remove(dataSourceId); 69 | if (client != null) { 70 | try { 71 | client.close(Duration.ofSeconds(5)); 72 | log.info("成功关闭数据源ID={}的Kafka连接", dataSourceId); 73 | } catch (Exception e) { 74 | log.error("关闭数据源ID={}的Kafka连接时发生错误", dataSourceId, e); 75 | } 76 | } 77 | } 78 | 79 | @Override 80 | public void testConnection(DataSource dataSource) throws SQLException { 81 | log.info("开始测试数据源连接, 类型: {}, URL: {}", dataSource.getType(), dataSource.getUrl()); 82 | 83 | if ("KAFKA".equalsIgnoreCase(dataSource.getType())) { 84 | log.info("正在测试Kafka连接..."); 85 | try { 86 | AdminClient adminClient = getOrCreateAdminClient(dataSource); 87 | adminClient.listTopics(new ListTopicsOptions().timeoutMs(5000)).names().get(); 88 | log.info("Kafka连接测试成功"); 89 | } catch (Exception e) { 90 | log.error("Kafka连接测试失败", e); 91 | closeKafkaConnection(dataSource.getId()); 92 | throw new SQLException("Kafka连接失败: " + e.getMessage()); 93 | } 94 | } else { 95 | log.info("正在测试数据库连接..."); 96 | try { 97 | log.info("加载数据库驱动: {}", dataSource.getDriverClassName()); 98 | Class.forName(dataSource.getDriverClassName()); 99 | log.info("尝试建立数据库连接..."); 100 | Connection conn = DriverManager.getConnection( 101 | dataSource.getUrl(), 102 | dataSource.getUsername(), 103 | dataSource.getPassword() 104 | ); 105 | 106 | // 获取并记录当前数据库名称 107 | try (Statement stmt = conn.createStatement(); 108 | ResultSet rs = stmt.executeQuery("SELECT DATABASE()")) { 109 | if (rs.next()) { 110 | String dbName = rs.getString(1); 111 | log.info("成功连接到数据库: {}", dbName); 112 | } 113 | } catch (Exception e) { 114 | log.warn("无法获取当前数据库名称: {}", e.getMessage()); 115 | } 116 | 117 | conn.close(); 118 | log.info("数据库连接测试成功"); 119 | } catch (SQLException e) { 120 | log.error("数据库连接测试失败", e); 121 | throw e; 122 | } catch (ClassNotFoundException e) { 123 | log.error("数据库驱动加载失败", e); 124 | throw new SQLException("数据库驱动加载失败: " + e.getMessage()); 125 | } 126 | } 127 | } 128 | 129 | @Override 130 | public List getTables(Long dataSourceId) { 131 | DataSource dataSource = getById(dataSourceId); 132 | if (dataSource == null) { 133 | throw new RuntimeException("数据源不存在"); 134 | } 135 | 136 | if ("KAFKA".equalsIgnoreCase(dataSource.getType())) { 137 | return getTopics(dataSourceId); 138 | } 139 | 140 | List tables = new ArrayList<>(); 141 | try { 142 | Class.forName(dataSource.getDriverClassName()); 143 | try (Connection conn = DriverManager.getConnection( 144 | dataSource.getUrl(), 145 | dataSource.getUsername(), 146 | dataSource.getPassword())) { 147 | DatabaseMetaData metaData = conn.getMetaData(); 148 | ResultSet rs = metaData.getTables(null, null, "%", new String[]{"TABLE"}); 149 | while (rs.next()) { 150 | tables.add(rs.getString("TABLE_NAME")); 151 | } 152 | } 153 | } catch (Exception e) { 154 | log.error("获取表列表失败", e); 155 | throw new RuntimeException("获取表列表失败: " + e.getMessage(), e); 156 | } 157 | return tables; 158 | } 159 | 160 | @Override 161 | public List getTopics(Long dataSourceId) { 162 | DataSource dataSource = getById(dataSourceId); 163 | if (dataSource == null) { 164 | throw new RuntimeException("数据源不存在"); 165 | } 166 | 167 | if (!"KAFKA".equalsIgnoreCase(dataSource.getType())) { 168 | throw new RuntimeException("不支持的数据源类型:" + dataSource.getType()); 169 | } 170 | 171 | List topics = new ArrayList<>(); 172 | try { 173 | AdminClient adminClient = getOrCreateAdminClient(dataSource); 174 | Set topicNames = adminClient.listTopics(new ListTopicsOptions().timeoutMs(5000)) 175 | .names() 176 | .get(); 177 | topics.addAll(topicNames); 178 | log.info("成功获取到 {} 个主题", topics.size()); 179 | } catch (Exception e) { 180 | log.error("获取Kafka主题列表失败", e); 181 | closeKafkaConnection(dataSourceId); 182 | throw new RuntimeException("获取Kafka主题列表失败: " + e.getMessage(), e); 183 | } 184 | 185 | return topics; 186 | } 187 | 188 | @Override 189 | public List> getTableColumns(Long dataSourceId, String tableName) { 190 | DataSource dataSource = getById(dataSourceId); 191 | if (dataSource == null) { 192 | throw new RuntimeException("数据源不存在"); 193 | } 194 | 195 | List> columns = new ArrayList<>(); 196 | try { 197 | Class.forName(dataSource.getDriverClassName()); 198 | try (Connection conn = DriverManager.getConnection( 199 | dataSource.getUrl(), 200 | dataSource.getUsername(), 201 | dataSource.getPassword())) { 202 | DatabaseMetaData metaData = conn.getMetaData(); 203 | ResultSet rs = metaData.getColumns(null, null, tableName, null); 204 | while (rs.next()) { 205 | Map column = new HashMap<>(); 206 | column.put("name", rs.getString("COLUMN_NAME")); 207 | column.put("type", rs.getString("TYPE_NAME")); 208 | column.put("comment", rs.getString("REMARKS")); 209 | columns.add(column); 210 | } 211 | } 212 | } catch (Exception e) { 213 | log.error("获取表结构失败", e); 214 | throw new RuntimeException("获取表结构失败", e); 215 | } 216 | return columns; 217 | } 218 | 219 | @Override 220 | public Map> getTableDependencies(Long dataSourceId, String[] tables) { 221 | log.info("获取表依赖关系,dataSourceId={}, tables={}", dataSourceId, String.join(",", tables)); 222 | Map> dependencies = new HashMap<>(); 223 | 224 | // 初始化依赖图 225 | for (String table : tables) { 226 | dependencies.put(table, new ArrayList<>()); 227 | } 228 | 229 | DataSource dataSource = getById(dataSourceId); 230 | if (dataSource == null) { 231 | log.error("数据源不存在,id={}", dataSourceId); 232 | return dependencies; 233 | } 234 | 235 | try (Connection conn = getConnection(dataSource)) { 236 | DatabaseMetaData metaData = conn.getMetaData(); 237 | 238 | // 获取每个表的外键依赖 239 | for (String table : tables) { 240 | try (ResultSet foreignKeys = metaData.getImportedKeys(conn.getCatalog(), null, table)) { 241 | while (foreignKeys.next()) { 242 | String pkTable = foreignKeys.getString("PKTABLE_NAME"); 243 | String fkColumn = foreignKeys.getString("FKCOLUMN_NAME"); 244 | String pkColumn = foreignKeys.getString("PKCOLUMN_NAME"); 245 | 246 | log.info("发现依赖关系: {}.{} 依赖于 {}.{}", table, fkColumn, pkTable, pkColumn); 247 | 248 | // 只考虑在当前处理列表中的表 249 | if (containsIgnoreCase(tables, pkTable) && !dependencies.get(table).contains(pkTable)) { 250 | dependencies.get(table).add(pkTable); 251 | } 252 | } 253 | } 254 | } 255 | 256 | return dependencies; 257 | } catch (Exception e) { 258 | log.error("获取表依赖关系时发生错误", e); 259 | return dependencies; 260 | } 261 | } 262 | 263 | /** 264 | * 忽略大小写检查数组中是否包含某个字符串 265 | */ 266 | private boolean containsIgnoreCase(String[] array, String target) { 267 | for (String item : array) { 268 | if (item.equalsIgnoreCase(target)) { 269 | return true; 270 | } 271 | } 272 | return false; 273 | } 274 | 275 | /** 276 | * 获取数据库连接 277 | */ 278 | private Connection getConnection(DataSource dataSource) throws SQLException { 279 | String url = dataSource.getUrl(); 280 | String username = dataSource.getUsername(); 281 | String password = dataSource.getPassword(); 282 | 283 | if ("MYSQL".equals(dataSource.getType())) { 284 | return DriverManager.getConnection(url, username, password); 285 | } else if ("POSTGRESQL".equals(dataSource.getType())) { 286 | return DriverManager.getConnection(url, username, password); 287 | } else if ("ORACLE".equals(dataSource.getType())) { 288 | return DriverManager.getConnection(url, username, password); 289 | } else { 290 | throw new SQLException("不支持的数据库类型: " + dataSource.getType()); 291 | } 292 | } 293 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 通用数据生成器 (Universal Data Generator) 2 | 3 | ## 项目概述 4 | 通用数据生成器是一个灵活且强大的工具,用于生成模拟数据并支持多种数据源。该工具可以帮助开发人员、测试人员和数据分析师快速生成符合特定业务场景的测试数据。目前版本支持MySQL和Kafka数据源。 5 | 6 | ## 核心功能 7 | 8 | ### 1. 多数据源支持 9 | - **关系型数据库** 10 | - MySQL(当前版本已支持) 11 | - Oracle(计划支持) 12 | - PostgreSQL(计划支持) 13 | - SQL Server(计划支持) 14 | - 其他支持JDBC的数据库(计划支持) 15 | 16 | - **消息队列** 17 | - Apache Kafka(当前版本已支持) 18 | - RabbitMQ(计划支持) 19 | - RocketMQ(计划支持) 20 | - 其他支持的消息中间件(计划支持) 21 | 22 | ### 2. 数据源配置 23 | - 支持配置多个数据源连接 24 | - 支持连接池管理 25 | - 支持SSL/TLS加密连接 26 | - 支持代理服务器配置 27 | 28 | ### 3. 数据生成规则 29 | - **表/主题选择** 30 | - 支持选择指定数据库的所有表 31 | - 支持选择指定数据库的部分表 32 | - 支持选择Kafka主题 33 | - 支持创建新的Kafka主题 34 | 35 | - **字段配置** 36 | - 支持自定义字段生成规则 37 | - 内置多种数据类型生成器: 38 | - 数值类型(整数、浮点数) 39 | - 字符串类型(随机字符串、姓名、地址等) 40 | - 日期时间类型 41 | - 布尔类型 42 | - JSON类型 43 | - 数组类型 44 | - 自定义类型 45 | 46 | - **数据关联** 47 | - 支持表间关联关系 48 | - 支持外键约束 49 | - 支持数据一致性维护 50 | 51 | ### 4. 数据输出控制 52 | - **写入模式** 53 | - 覆盖模式:清空目标表后写入 54 | - 追加模式:保留现有数据,追加新数据 55 | - 更新模式:根据主键更新现有数据 56 | 57 | - **批量控制** 58 | - 支持配置批量写入大小 59 | - 支持配置写入频率 60 | - 支持配置并发写入数 61 | 62 | ### 5. 数据格式配置 63 | - **Kafka消息格式** 64 | - JSON格式(支持自定义结构) 65 | - Avro格式 66 | - Protobuf格式 67 | - 自定义格式 68 | 69 | - **数据模板** 70 | - 支持自定义数据模板 71 | - 支持模板变量替换 72 | - 支持条件判断和循环 73 | 74 | ### 6. 任务管理 75 | - 支持创建多个数据生成任务 76 | - 支持任务调度(定时执行) 77 | - 支持任务暂停/恢复 78 | - 支持任务监控和统计 79 | 80 | ### 7. 监控和日志 81 | - 实时监控数据生成进度 82 | - 详细的执行日志记录 83 | - 错误告警和通知 84 | - 性能指标统计 85 | 86 | ## 技术架构 87 | 88 | ### 1. 核心组件 89 | - 数据源连接管理器 90 | - 数据生成引擎 91 | - 任务调度器 92 | - 监控系统 93 | - 配置管理系统 94 | 95 | ### 2. 扩展性设计 96 | - 插件化架构 97 | - 自定义数据生成器接口 98 | - 自定义数据源适配器 99 | - 自定义输出格式处理器 100 | 101 | ## 使用场景 102 | 103 | ### 1. 测试数据生成 104 | - 单元测试 105 | - 集成测试 106 | - 性能测试 107 | - 压力测试 108 | 109 | ### 2. 开发环境搭建 110 | - 快速构建开发环境 111 | - 模拟生产环境数据 112 | - 数据迁移测试 113 | 114 | ### 3. 数据分析 115 | - 数据可视化测试 116 | - 报表开发测试 117 | - 数据挖掘测试 118 | 119 | ## 部署要求 120 | 121 | ### 1. 系统要求 122 | - JDK 11或更高版本 123 | - 最小内存:4GB 124 | - 推荐内存:8GB或更高 125 | - 磁盘空间:根据数据量配置 126 | 127 | ### 2. 依赖组件 128 | - 数据库驱动 129 | - 消息队列客户端 130 | - 配置中心(可选) 131 | - 监控系统(可选) 132 | 133 | ## 安全特性 134 | 135 | ### 1. 数据安全 136 | - 敏感数据脱敏 137 | - 数据加密传输 138 | - 访问权限控制 139 | 140 | ### 2. 系统安全 141 | - 用户认证 142 | - 角色授权 143 | - 操作审计 144 | - 安全日志 145 | 146 | ## 后续规划 147 | 148 | ### 1. 功能增强 149 | - 支持更多数据源 150 | - 增强数据生成规则 151 | - 优化性能 152 | - 提供Web管理界面 153 | 154 | ### 2. 生态集成 155 | - 支持容器化部署 156 | - 支持云平台集成 157 | - 支持CI/CD集成 158 | - 支持监控系统集成 159 | 160 | ## 贡献指南 161 | 欢迎提交Issue和Pull Request来帮助改进项目。在提交代码前,请确保: 162 | 1. 代码符合项目规范 163 | 2. 添加必要的测试用例 164 | 3. 更新相关文档 165 | 4. 提供清晰的提交信息 166 | 167 | ## 许可证 168 | 本项目采用 MIT 许可证 169 | 170 | # 数据生成器应用 171 | 172 | 这是一个用于生成模拟数据的应用程序,**当前版本仅支持MySQL和Kafka数据源**。 173 | 174 | ## 系统要求 175 | 176 | - Docker 177 | - Docker Compose 178 | 179 | ## 开发环境搭建 180 | 181 | ### 1. 数据库初始化 182 | 183 | 在开始使用应用前,需要先创建数据库和相关表结构: 184 | 185 | ```sql 186 | CREATE TABLE `data_source` ( 187 | `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键ID', 188 | `name` varchar(100) NOT NULL COMMENT '数据源名称', 189 | `type` varchar(20) NOT NULL COMMENT '数据源类型', 190 | `url` varchar(500) NOT NULL COMMENT '连接URL', 191 | `username` varchar(100) NOT NULL COMMENT '用户名', 192 | `password` varchar(100) NOT NULL COMMENT '密码', 193 | `driver_class_name` varchar(100) DEFAULT NULL COMMENT '驱动类名', 194 | `description` varchar(500) DEFAULT NULL COMMENT '描述', 195 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 196 | `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 197 | `deleted` tinyint(4) NOT NULL DEFAULT '0' COMMENT '是否删除', 198 | PRIMARY KEY (`id`) 199 | ) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COMMENT='数据源配置表'; 200 | 201 | CREATE TABLE `data_task` ( 202 | `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键ID', 203 | `name` varchar(100) NOT NULL COMMENT '任务名称', 204 | `data_source_id` bigint(20) NOT NULL COMMENT '数据源ID', 205 | `target_type` varchar(20) NOT NULL COMMENT '目标类型', 206 | `target_name` varchar(5000) NOT NULL COMMENT '目标名称', 207 | `write_mode` varchar(20) NOT NULL COMMENT '写入模式', 208 | `data_format` varchar(20) NOT NULL COMMENT '数据格式', 209 | `template` text COMMENT '数据生成模板', 210 | `batch_size` int(11) NOT NULL DEFAULT '1000' COMMENT '批量大小', 211 | `frequency` int(11) NOT NULL DEFAULT '1' COMMENT '生成频率(秒)', 212 | `concurrent_num` int(11) NOT NULL DEFAULT '1' COMMENT '并发数', 213 | `status` varchar(20) NOT NULL DEFAULT 'STOPPED' COMMENT '任务状态', 214 | `cron_expression` varchar(100) DEFAULT NULL COMMENT '定时任务表达式', 215 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 216 | `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 217 | `deleted` tinyint(4) NOT NULL DEFAULT '0' COMMENT '是否删除', 218 | PRIMARY KEY (`id`), 219 | KEY `data_source_id` (`data_source_id`), 220 | CONSTRAINT `data_task_ibfk_1` FOREIGN KEY (`data_source_id`) REFERENCES `data_source` (`id`) 221 | ) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8mb4 COMMENT='数据生成任务表'; 222 | 223 | CREATE TABLE `system_info` ( 224 | `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键ID', 225 | `cpu_usage` double DEFAULT NULL COMMENT 'CPU使用率', 226 | `memory_usage` double DEFAULT NULL COMMENT '内存使用率', 227 | `disk_usage` double DEFAULT NULL COMMENT '磁盘使用率', 228 | `jvm_heap_usage` double DEFAULT NULL COMMENT 'JVM堆内存使用率', 229 | `jvm_non_heap_usage` double DEFAULT NULL COMMENT 'JVM非堆内存使用率', 230 | `uptime` bigint(20) DEFAULT NULL COMMENT '系统运行时间(毫秒)', 231 | `create_time` datetime DEFAULT NULL COMMENT '创建时间', 232 | PRIMARY KEY (`id`) 233 | ) ENGINE=InnoDB AUTO_INCREMENT=1248 DEFAULT CHARSET=utf8mb4 COMMENT='系统信息表'; 234 | 235 | CREATE TABLE `task_execution` ( 236 | `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键ID', 237 | `task_id` bigint(20) NOT NULL COMMENT '任务ID', 238 | `start_time` datetime NOT NULL COMMENT '开始时间', 239 | `end_time` datetime DEFAULT NULL COMMENT '结束时间', 240 | `status` varchar(20) NOT NULL COMMENT '执行状态', 241 | `total_count` bigint(20) NOT NULL DEFAULT '0' COMMENT '总记录数', 242 | `success_count` bigint(20) NOT NULL DEFAULT '0' COMMENT '成功记录数', 243 | `error_count` bigint(20) NOT NULL DEFAULT '0' COMMENT '失败记录数', 244 | `error_message` text COMMENT '错误信息', 245 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 246 | `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 247 | `deleted` tinyint(4) NOT NULL DEFAULT '0' COMMENT '是否删除', 248 | PRIMARY KEY (`id`), 249 | KEY `task_id` (`task_id`), 250 | CONSTRAINT `task_execution_ibfk_1` FOREIGN KEY (`task_id`) REFERENCES `data_task` (`id`) 251 | ) ENGINE=InnoDB AUTO_INCREMENT=217 DEFAULT CHARSET=utf8mb4 COMMENT='任务执行记录表'; 252 | 253 | ``` 254 | 255 | 您可以将上述SQL保存为`init.sql`文件,然后执行: 256 | 257 | ```bash 258 | mysql -u用户名 -p密码 < init.sql 259 | ``` 260 | 261 | 或者在MySQL客户端中直接执行这些SQL语句。 262 | 263 | ### 2. 后端开发环境启动 264 | 265 | #### 环境要求 266 | - JDK 11+ 267 | - Maven 3.6+ 268 | - MySQL 5.7+ 269 | - Kafka 2.8+ 270 | 271 | #### 步骤 272 | 273 | 1. 进入后端目录 274 | ```bash 275 | cd backend 276 | ``` 277 | 278 | 2. 编译项目 279 | ```bash 280 | mvn clean package -DskipTests 281 | ``` 282 | 283 | 3. 启动应用 284 | ```bash 285 | java -jar target/data-generator-backend-1.0.0.jar 286 | ``` 287 | 288 | 或者在开发IDE中直接运行`com.datagenerator.DataGeneratorApplication`类。 289 | 290 | #### 配置说明 291 | 292 | 后端配置文件位于`backend/src/main/resources/application.yml`,您可以根据需要修改数据库连接、Kafka配置等。 293 | 294 | ### 3. 前端开发环境启动 295 | 296 | #### 环境要求 297 | - Node.js 14+ 298 | - npm 6+ 或 yarn 1.22+ 299 | 300 | #### 步骤 301 | 302 | 1. 进入前端目录 303 | ```bash 304 | cd frontend 305 | ``` 306 | 307 | 2. 安装依赖 308 | ```bash 309 | npm install 310 | # 或 311 | yarn install 312 | ``` 313 | 314 | 3. 启动开发服务器 315 | ```bash 316 | npm run serve 317 | # 或 318 | yarn serve 319 | ``` 320 | 321 | 4. 构建生产版本 322 | ```bash 323 | npm run build 324 | # 或 325 | yarn build 326 | ``` 327 | 328 | #### 配置说明 329 | 330 | 前端API配置文件位于`frontend/src/config/index.js`,您可以根据需要修改API地址等配置。 331 | 332 | ## 项目结构 333 | 334 | - `frontend/`: 前端Vue应用 335 | - `backend/`: 后端Spring Boot应用 336 | - `docker-compose.yml`: Docker Compose配置文件 337 | - `start.sh`/`start.bat`: 启动脚本 338 | - `stop.sh`/`stop.bat`: 停止脚本 339 | 340 | ## 配置说明 341 | 342 | ### 数据库配置(MySQL) 343 | 344 | 默认的MySQL数据库配置: 345 | 346 | - 数据库名:data_generator 347 | - 用户名:datagenerator 348 | - 密码:datagenerator123 349 | 350 | 如需修改,请编辑`docker-compose.yml`文件中的相关配置。 351 | 352 | ### Kafka配置 353 | 354 | 默认创建的主题:test 355 | 356 | 如需添加更多主题,请修改`docker-compose.yml`文件中的`KAFKA_CREATE_TOPICS`配置。 357 | 358 | ## 故障排除 359 | 360 | 如果应用启动失败,可以查看Docker日志: 361 | 362 | ```bash 363 | docker-compose logs 364 | ``` 365 | 366 | 针对特定服务的日志: 367 | 368 | ```bash 369 | docker-compose logs backend 370 | docker-compose logs frontend 371 | docker-compose logs mysql 372 | docker-compose logs kafka 373 | ``` 374 | 375 | ### 常见问题 376 | 377 | 1. **数据库连接失败** 378 | - 检查数据库服务是否正常运行 379 | - 验证连接信息是否正确 380 | - 确认数据库用户是否有足够权限 381 | 382 | 2. **Kafka连接问题** 383 | - 检查Kafka服务是否正常运行 384 | - 验证主题是否已创建 385 | - 检查网络连接是否通畅 386 | 387 | 3. **前端无法连接后端API** 388 | - 确认后端服务是否正常运行 389 | - 检查前端配置中的API地址是否正确 390 | - 检查是否存在跨域问题 391 | 392 | # 数据生成器项目部署文档 393 | 394 | ## 项目结构 395 | 396 | ``` 397 | data-generator/ 398 | ├── backend/ # 后端项目目录 399 | │ ├── src/ # 源代码 400 | │ ├── pom.xml # Maven配置文件 401 | │ └── Dockerfile # 后端Docker构建文件 402 | ├── frontend/ # 前端项目目录 403 | │ ├── src/ # 源代码 404 | │ ├── package.json # npm配置文件 405 | │ ├── nginx.conf # nginx配置文件 406 | │ └── Dockerfile # 前端Docker构建文件 407 | ├── docker-compose.yml # Docker Compose配置文件 408 | ├── deploy.sh # 一键部署脚本 409 | └── README.md # 项目说明文档 410 | ``` 411 | 412 | ## 环境要求 413 | 414 | - Docker 20.10.0+ 415 | - Docker Compose 2.0.0+ 416 | - 操作系统:Linux/MacOS/Windows 417 | - 内存:至少4GB RAM 418 | - 磁盘空间:至少10GB可用空间 419 | 420 | ## 快速开始 421 | 422 | 1. 克隆项目: 423 | ```bash 424 | git clone <项目地址> 425 | cd data-generator 426 | ``` 427 | 428 | 2. 运行部署脚本: 429 | ```bash 430 | chmod +x deploy.sh # 给脚本添加执行权限(Linux/MacOS) 431 | ./deploy.sh # 运行部署脚本 432 | ``` 433 | 434 | 3. 访问应用: 435 | - 前端界面:http://localhost 436 | - 后端API:http://localhost:8080 437 | - 数据库:localhost:3306 438 | 439 | ## 配置说明 440 | 441 | ### 数据库配置 442 | - 数据库:MySQL 8.0 443 | - 用户名:data_generator 444 | - 密码:data_generator 445 | - 数据库名:data_generator 446 | 447 | ### 端口配置 448 | - 前端:80 449 | - 后端:8080 450 | - MySQL:3306 451 | 452 | ## 目录说明 453 | 454 | ### 后端项目 455 | - `backend/src/`: 后端源代码目录 456 | - `backend/pom.xml`: Maven项目配置文件 457 | - `backend/Dockerfile`: 后端Docker镜像构建文件 458 | 459 | ### 前端项目 460 | - `frontend/src/`: 前端源代码目录 461 | - `frontend/package.json`: npm项目配置文件 462 | - `frontend/nginx.conf`: nginx服务器配置文件 463 | - `frontend/Dockerfile`: 前端Docker镜像构建文件 464 | 465 | ## 部署说明 466 | 467 | ### 手动部署步骤 468 | 469 | 1. 构建镜像: 470 | ```bash 471 | docker-compose build 472 | ``` 473 | 474 | 2. 启动服务: 475 | ```bash 476 | docker-compose up -d 477 | ``` 478 | 479 | 3. 查看服务状态: 480 | ```bash 481 | docker-compose ps 482 | ``` 483 | 484 | 4. 查看服务日志: 485 | ```bash 486 | docker-compose logs 487 | ``` 488 | 489 | ### 停止服务 490 | 491 | ```bash 492 | docker-compose down 493 | ``` 494 | 495 | ## 常见问题 496 | 497 | 1. 端口冲突 498 | - 问题:服务启动失败,提示端口被占用 499 | - 解决:修改`docker-compose.yml`中的端口映射配置 500 | 501 | 2. 数据库连接失败 502 | - 问题:后端服务无法连接到数据库 503 | - 解决:检查数据库配置和网络连接 504 | 505 | 3. 前端访问后端API失败 506 | - 问题:前端页面无法加载数据 507 | - 解决:检查nginx配置中的代理设置 508 | 509 | ## 维护和更新 510 | 511 | 1. 更新应用: 512 | ```bash 513 | git pull # 获取最新代码 514 | ./deploy.sh # 重新部署 515 | ``` 516 | 517 | 2. 查看容器日志: 518 | ```bash 519 | docker-compose logs -f # 实时查看所有服务的日志 520 | docker-compose logs backend # 查看后端服务日志 521 | docker-compose logs frontend # 查看前端服务日志 522 | ``` 523 | 524 | 3. 备份数据: 525 | ```bash 526 | docker exec data-generator-mysql mysqldump -u root -p data_generator > backup.sql 527 | ``` 528 | 529 | ## 技术栈 530 | 531 | - 后端:Spring Boot 532 | - 前端:Vue.js 533 | - 数据库:MySQL 534 | - 服务器:Nginx 535 | - 容器化:Docker & Docker Compose 536 | 537 | ## 注意事项 538 | 539 | 1. 生产环境部署 540 | - 修改数据库密码 541 | - 配置HTTPS 542 | - 启用数据库备份 543 | - 配置监控告警 544 | 545 | 2. 安全建议 546 | - 定期更新依赖 547 | - 限制数据库远程访问 548 | - 使用安全的密码 549 | - 配置防火墙规则 550 | -------------------------------------------------------------------------------- /frontend/src/views/Executions.vue: -------------------------------------------------------------------------------- 1 | 152 | 153 | 452 | 453 | --------------------------------------------------------------------------------