├── .github
└── workflows
│ ├── dead-link-checker.yml
│ └── dlc.json
├── .gitignore
├── LANGS.md
├── README-ZH.md
├── README.md
├── en_US
├── Design_Documentation
│ ├── FlowExecution
│ │ ├── README.md
│ │ └── images
│ │ │ ├── flowexecution.drawio.png
│ │ │ └── workflow_execution_uml.png
│ ├── Orchestrator
│ │ ├── README.md
│ │ └── images
│ │ │ ├── Create_an_orchestration_sequence_diagram.png
│ │ │ ├── Import_Orchestration_Sequence_Diagram.png
│ │ │ ├── orchestrator_arch.png
│ │ │ └── orchestrator_uml.png
│ ├── UserGuide
│ │ ├── DSS-UserGuide_Module_Design.md
│ │ └── images
│ │ │ ├── 1653309535303.png
│ │ │ ├── 1653309930194.png
│ │ │ └── 16559707626688.png
│ ├── Workspace
│ │ ├── README.md
│ │ └── images
│ │ │ └── workspace_uml.drawio.png
│ ├── appconn
│ │ ├── DSS_Access_Scheduling_System.md
│ │ ├── appconn.md
│ │ └── images
│ │ │ ├── appconn_class_uml.png
│ │ │ ├── appconn_load_process.png
│ │ │ └── appconn_structure.png
│ ├── labelRoute
│ │ └── DSS_label-based_routing_forwarding.md
│ ├── project
│ │ ├── DSS_Engineering_Module_Design_Documentation.md
│ │ └── images
│ │ │ ├── project-create.png
│ │ │ ├── project-edit.png
│ │ │ └── project.png
│ ├── publish
│ │ ├── Workflow_Release_Design.md
│ │ └── images
│ │ │ └── workflow-publish.png
│ └── workflow
│ │ ├── DSS_Workflow_Architecture_Design.md
│ │ └── images
│ │ └── workflow.png
├── Development_Documentation
│ ├── AppConn_Development_Guide.md
│ ├── Compilation_Documentation.md
│ ├── Front-end_compilation_documentation.md
│ ├── How_to_Add_Workflow_Node_in_DSS_Workflow.md
│ ├── How_to_add_script_types_in_Scriptis.md
│ └── Third-party_System_Access_Development_Guide.md
├── Images
│ ├── Apiservice
│ │ ├── 105c29559c6ff03db92efc7cc0b7d15d.png
│ │ ├── 150207df2ea89d2b0c2f3ccb8d46577b.png
│ │ ├── 50bd31ef9795efe92aa333f7ac8518c6.png
│ │ ├── b7f2ba78f56e660c9345895205cd47ed.png
│ │ ├── c44c3ee7da22fdd19eb62379271a8410.png
│ │ ├── c65f98286cd82f3d9100f602a31f4302.png
│ │ ├── d61d36f82ef7fc7111ea37574dd0db22.png
│ │ ├── dfbf44e1fe710b76883fe0eb24346707.png
│ │ ├── ff7b18b4eec06f5dfd2da1e3693e2e59.png
│ │ ├── postman01.png
│ │ └── postman02.png
│ ├── Development_doc
│ │ └── Third-part_System_Access_Development_Guide
│ │ │ ├── DSS_frame_design.png
│ │ │ ├── Organizational_Structure_Specification.png
│ │ │ ├── SSO_password-free_jump.png
│ │ │ └── Schedulis_implements_DSS_Level1_specification.png
│ ├── Install_and_Deploy
│ │ ├── DSS&Linkis_one-click_deployment_document_stand-alone_version
│ │ │ └── eureka.png
│ │ ├── DSSUserGuide_Deploy
│ │ │ └── userguide_1.png
│ │ ├── DSS_Debug_Documentation
│ │ │ ├── img.png
│ │ │ ├── img_1.png
│ │ │ ├── img_2.png
│ │ │ ├── img_3.png
│ │ │ └── img_4.png
│ │ └── DolphinschedulerAppConn_deployment
│ │ │ ├── img.png
│ │ │ ├── img_1.png
│ │ │ ├── img_10.png
│ │ │ ├── img_11.png
│ │ │ ├── img_12.png
│ │ │ ├── img_13.png
│ │ │ ├── img_14.png
│ │ │ ├── img_2.png
│ │ │ ├── img_3.png
│ │ │ ├── img_4.png
│ │ │ ├── img_5.png
│ │ │ ├── img_6.png
│ │ │ ├── img_7.png
│ │ │ ├── img_8.png
│ │ │ └── img_9.png
│ ├── Using_Document
│ │ ├── Scriptis
│ │ │ ├── hive-6.png
│ │ │ ├── hive1.png
│ │ │ ├── hive2.png
│ │ │ ├── hive3.png
│ │ │ ├── hive4.png
│ │ │ ├── hive5.png
│ │ │ ├── hive7.png
│ │ │ ├── home.png
│ │ │ ├── udf-3.png
│ │ │ ├── udf1.png
│ │ │ └── udf2.png
│ │ └── workspace
│ │ │ ├── ws_img1.png
│ │ │ ├── ws_img2.png
│ │ │ ├── ws_img3.png
│ │ │ ├── ws_img5.png
│ │ │ └── ws_img6.png
│ ├── apiservice.png
│ ├── applicationshop.png
│ ├── loginpage.png
│ ├── projectpage.png
│ ├── runworkflow.png
│ ├── scriptis.png
│ ├── workflow.png
│ └── workspacepage.png
├── Installation_and_Deployment
│ ├── DSS&Linkis_one-click_deployment_document_stand-alone_version.md
│ ├── DSSUserGuide_Deploy_documentation.md
│ ├── DSS_1.0.1_upgrade_to_1.1.0_using_documentation.md
│ ├── DSS_Debug_Documentation.md
│ ├── DolphinScheduler_Plugin_Installation_Documentation.md
│ ├── SchedulisAppConn_Plugin_Installation_Documentation.md
│ └── Schedulis_Linkis_JobType_Installation_Documentation.md
├── README.md
├── SUMMARY.md
├── User_Manual
│ ├── DSS_How_To_Add_Users.md
│ ├── Data_Service_User_Manual.md
│ ├── Introduction_to_Data_Services.md
│ ├── Schedule_Center_Documentation.md
│ ├── Scriptis_Usage_Documentation.md
│ ├── Super_Administrator_Function.md
│ ├── User_Documentation.md
│ ├── Workflow_Usage_Documentation.md
│ └── images
│ │ ├── Create_Departments_And_Users.png
│ │ ├── Create_a_workspace.png
│ │ ├── Schedule_Center
│ │ ├── Cycle_Instance_Completion.png
│ │ ├── Edit_Scheduled_Tasks.png
│ │ ├── Page_Overview.png
│ │ ├── Process_Status_Statistics.png
│ │ ├── Process_Status_Statistics_By_Date.png
│ │ ├── Process_Time_Ranking.png
│ │ ├── Run_A_Workflow_On_A_Schedule.png
│ │ ├── Run_The_Workflow.png
│ │ ├── Task_Instance.png
│ │ ├── Timed_Page.png
│ │ ├── View_Logs.png
│ │ ├── Workflow_Definition.png
│ │ ├── Workflow_Instance.png
│ │ └── Workflow_Instance_And_Success_Rate_Statistics.png
│ │ ├── Super_Administrator_Function.png
│ │ ├── apiservicepage.png
│ │ ├── createapiservice.png
│ │ ├── createapiservice_param.png
│ │ ├── loginpage.png
│ │ ├── modifyapiservice.png
│ │ ├── postman1.png
│ │ ├── postman2.png
│ │ ├── project.png
│ │ ├── runworkflow.png
│ │ ├── scriptis.png
│ │ ├── scriptis_database.png
│ │ ├── scriptis_hdfs.png
│ │ ├── scriptis_summary.png
│ │ ├── scriptis_workspace.png
│ │ ├── scriptis_workspace_dir.png
│ │ ├── scriptis_workspace_file.png
│ │ ├── useapiservice.png
│ │ ├── workflow.png
│ │ └── workspace.png
└── Using_Document
│ ├── DSS_Interface_Summary.md
│ ├── DSS_User_Manual.md
│ ├── DataApiService_Usage_Documentation.md
│ ├── Scriptis
│ ├── Scriptis_User_Tests1_Scala.md
│ ├── Scriptis_User_Tests2_Hive.md
│ └── Scriptis_User_Tests3_SparkSQL.md
│ ├── User_login_authentication_system.md
│ └── Workspace_User_Manual.md
└── zh_CN
├── Images
├── apiservice.png
├── apiservice
│ ├── 105c29559c6ff03db92efc7cc0b7d15d.png
│ ├── 150207df2ea89d2b0c2f3ccb8d46577b.png
│ ├── 50bd31ef9795efe92aa333f7ac8518c6.png
│ ├── b7f2ba78f56e660c9345895205cd47ed.png
│ ├── c44c3ee7da22fdd19eb62379271a8410.png
│ ├── c65f98286cd82f3d9100f602a31f4302.png
│ ├── d61d36f82ef7fc7111ea37574dd0db22.png
│ ├── dfbf44e1fe710b76883fe0eb24346707.png
│ ├── ff7b18b4eec06f5dfd2da1e3693e2e59.png
│ ├── postman01.png
│ └── postman02.png
├── applicationshop.png
├── loginpage.png
├── projectpage.png
├── runworkflow.png
├── scriptis.png
├── workflow.png
├── workspacepage.png
├── 使用文档
│ ├── Scriptis
│ │ ├── hive-6.png
│ │ ├── hive1.png
│ │ ├── hive2.png
│ │ ├── hive3.png
│ │ ├── hive4.png
│ │ ├── hive5.png
│ │ ├── hive7.png
│ │ ├── home.png
│ │ ├── udf-3.png
│ │ ├── udf1.png
│ │ └── udf2.png
│ └── workspace
│ │ ├── ws_img1.png
│ │ ├── ws_img2.png
│ │ ├── ws_img3.png
│ │ ├── ws_img5.png
│ │ └── ws_img6.png
├── 安装部署
│ ├── DSS&Linkis一键部署文档单机版
│ │ └── eureka.png
│ ├── DSSUserGuide部署
│ │ └── userguide_1.png
│ ├── DSS调试
│ │ ├── img.png
│ │ ├── img_1.png
│ │ ├── img_2.png
│ │ ├── img_3.png
│ │ └── img_4.png
│ └── DolphinschedulerAppConn部署
│ │ ├── img.png
│ │ ├── img_1.png
│ │ ├── img_10.png
│ │ ├── img_11.png
│ │ ├── img_12.png
│ │ ├── img_13.png
│ │ ├── img_14.png
│ │ ├── img_2.png
│ │ ├── img_3.png
│ │ ├── img_4.png
│ │ ├── img_5.png
│ │ ├── img_6.png
│ │ ├── img_7.png
│ │ ├── img_8.png
│ │ └── img_9.png
└── 开发文档
│ └── 第三方系统如何接入DSS
│ ├── AppConn架构图.png
│ ├── AppConn调用举例.png
│ ├── DSS框架设计.png
│ ├── SSO免密跳转.png
│ ├── Schedulis实现DSS一级规范.png
│ ├── 开发流程规范.png
│ └── 组织结构规范.png
├── README.md
├── SUMMARY.md
├── 使用文档
├── DSS接口汇总.md
├── DSS用户手册.md
├── DataApiService使用文档.md
├── Scriptis
│ ├── Scriptis_User_Tests1_Scala.md
│ ├── Scriptis_User_Tests2_Hive.md
│ └── Scriptis_User_Tests3_SparkSQL.md
├── 工作空间使用手册.md
└── 用户登录认证体系.md
├── 安装部署
├── DSS&Linkis一键部署文档单机版.md
├── DSS1.0.1到1.1.0升级文档.md
├── DSS1.1.0到1.1.1升级文档.md
├── DSSUserGuide部署文档.md
├── DSS工作流内容从0.X版本迁移1.0版本说明文档.md
├── DSS调试文档.md
├── DSS部署文档单机版.md
├── DolphinScheduler插件安装文档.md
├── SchedulisAppConn插件安装文档.md
└── Schedulis_Linkis_JobType安装文档.md
├── 开发文档
├── AppConn开发指南.md
├── DSS工作流如何新增工作流节点.md
├── DSS编译文档.md
├── Scriptis如何新增脚本类型.md
├── 前端编译文档.md
└── 第三方系统接入DSS开发指南.md
├── 用户手册
├── DSS新增用户方式.md
├── Scriptis使用文档.md
├── images
│ ├── apiservicepage.png
│ ├── createapiservice.png
│ ├── createapiservice_param.png
│ ├── loginpage.png
│ ├── modifyapiservice.png
│ ├── postman1.png
│ ├── postman2.png
│ ├── project.png
│ ├── runworkflow.png
│ ├── scriptis.png
│ ├── scriptis_database.png
│ ├── scriptis_hdfs.png
│ ├── scriptis_summary.png
│ ├── scriptis_workspace.png
│ ├── scriptis_workspace_dir.png
│ ├── scriptis_workspace_file.png
│ ├── useapiservice.png
│ ├── workflow.png
│ ├── workspace.png
│ ├── 创建工作空间.png
│ ├── 创建部门和用户.png
│ ├── 新增用户功能-LDAP界面.png
│ ├── 调度中心
│ │ ├── 任务实例.png
│ │ ├── 周期实例完成情况.png
│ │ ├── 定时运行工作流.png
│ │ ├── 定时页面.png
│ │ ├── 工作流定义.png
│ │ ├── 工作流实例.png
│ │ ├── 工作流实例与成功率统计.png
│ │ ├── 按日期进行流程状态统计.png
│ │ ├── 查看日志.png
│ │ ├── 流程状态统计.png
│ │ ├── 流程耗时排名.png
│ │ ├── 编辑定时任务.png
│ │ ├── 运行工作流.png
│ │ └── 页面概述.png
│ └── 超级管理员功能.png
├── 工作流使用文档.md
├── 数据服务使用手册.md
├── 数据服务简介.md
├── 用户使用文档.md
├── 调度中心使用文档.md
└── 超级管理员功能.md
└── 设计文档
├── FlowExecution
├── README.md
└── images
│ ├── flowexecution.drawio.png
│ └── 工作流执行uml.png
├── Orchestrator
├── README.md
└── images
│ ├── orchestrator_arch.png
│ ├── orchestrator_uml.png
│ ├── 创建编排时序图.png
│ └── 导入编排时序图.png
├── UserGuide
├── DSS-UserGuide模块设计.md
└── images
│ ├── 1653309535303.png
│ ├── 数据结构.png
│ └── 核心流程图.png
├── Workspace
├── README.md
└── images
│ └── workspace_uml.drawio.png
├── appconn
├── DSS调度系统接入.md
├── appconn.md
└── images
│ ├── appconn_class_uml.png
│ ├── appconn_load_process.png
│ └── appconn_structure.png
├── labelRoute
└── DSS标签路由转发.md
├── project
├── DSS工程模块设计文档.md
└── images
│ ├── project-create.png
│ ├── project-edit.png
│ └── project.png
├── publish
├── images
│ └── workflow-publish.png
└── 工作流发布设计文档.md
└── workflow
├── DSS工作流架构设计.md
└── images
└── workflow.png
/.github/workflows/dead-link-checker.yml:
--------------------------------------------------------------------------------
1 | name: Link checker
2 |
3 | on:
4 | pull_request:
5 | push:
6 | schedule:
7 | - cron: "0 5 * * *"
8 |
9 | jobs:
10 | CheckDeadLinks:
11 | runs-on: ubuntu-latest
12 | timeout-minutes: 60
13 | steps:
14 | - uses: actions/checkout@v3
15 | - uses: gaurav-nelson/github-action-markdown-link-check@v1
16 | with:
17 | use-quiet-mode: 'no'
18 | use-verbose-mode: 'yes'
19 | folder-path: '../'
20 | config-file: '.github/workflows/dlc.json'
--------------------------------------------------------------------------------
/.github/workflows/dlc.json:
--------------------------------------------------------------------------------
1 | {
2 | "ignorePatterns": [
3 | {
4 | "pattern": "https://dolphinscheduler.apache.org/"
5 | },
6 | {
7 | "pattern": "https://web.mit.edu/"
8 | }
9 | ],
10 | "timeout": "10s",
11 | "retryOn429": true,
12 | "retryCount": 10,
13 | "fallbackRetryDelay": "1000s",
14 | "aliveStatusCodes": [
15 | 200,
16 | 401,
17 | 403
18 | ]
19 | }
20 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .idea
--------------------------------------------------------------------------------
/LANGS.md:
--------------------------------------------------------------------------------
1 | * [English](en_US)
2 | * [中文](zh_CN)
--------------------------------------------------------------------------------
/en_US/Design_Documentation/FlowExecution/images/flowexecution.drawio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/FlowExecution/images/flowexecution.drawio.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/FlowExecution/images/workflow_execution_uml.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/FlowExecution/images/workflow_execution_uml.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/Orchestrator/images/Create_an_orchestration_sequence_diagram.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/Orchestrator/images/Create_an_orchestration_sequence_diagram.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/Orchestrator/images/Import_Orchestration_Sequence_Diagram.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/Orchestrator/images/Import_Orchestration_Sequence_Diagram.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/Orchestrator/images/orchestrator_arch.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/Orchestrator/images/orchestrator_arch.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/Orchestrator/images/orchestrator_uml.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/Orchestrator/images/orchestrator_uml.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/UserGuide/DSS-UserGuide_Module_Design.md:
--------------------------------------------------------------------------------
1 | # DSS-UserGuide module design
2 |
3 | ### Introduce
4 |
5 | The DSS user manual module is a newly added function module of DSS1.0, which is used to provide user guidance for DSS users. It contains many problems and solutions encountered during the use of DSS, and also includes some function points. Users can self-service search for solutions to problems encountered. It can also be used to associate error codes in the later stage, which supports directly locating the solutions that have been entered in the knowledge base after the pop-up error codes. The guide module stores the files in the fields of the table in the form of html. It needs to parse the md file and convert it into html. Since some files have links and need to be jumped, it is necessary to build a gitbook to display and manage these documents. In order to be efficient To synchronize the dss-guide-user module, package the files on gitLab, upload and decompress them to the specified directory of the server where gitbook is located, and scan the specified directory regularly through guide-user to achieve the purpose of synchronization.
6 |
7 | ## Introduction to the main modules of dss_guide
8 |
9 | The DSS_Guide module mainly contains the definitions of Restful, Service, Dao, and Entity.
10 |
11 | ### GuideGroupService
12 |
13 | It is used to solve GuideGroup's ability to add, query, modify, save, delete, etc. It also has the ability to synchronize Summary.md. The guide module can parse this file, and then locate the file that needs to be read and write it to the database regularly according to the configuration paths of the parsed directories at all levels in the file, so as to complete the synchronization. When the service is running on other servers, In order to avoid repeated installation of gitbook, the guide module needs to pass the ip of the server where the configuration file is located, and then automatically synchronize the file to the server where the guide module is located for display.
14 |
15 | ### GuideContentService
16 |
17 | It is used to handle the save, query, update and delete operations of GuideContent.
18 |
19 | ### GuideChapterService
20 |
21 | It is used to deal with the specific content of manual chapters, including chapter search, ID-based query, deletion, saving, etc.
22 |
23 | ### GuideCatalogService
24 |
25 | It is used to synchronize the knowledge base, support batch insertion of directory content, and implement operations such as saving, deleting, and querying the directory structure classification.
26 |
27 |
28 | ### Core flow chart
29 |
30 | 
31 |
32 |
33 | ### data structure
34 |
35 | 
36 |
37 | ### dss_guide_group
38 |
39 | Divide the grouping of dss_guide, including group_id, path (access path), title, etc.
40 |
41 | ### dss_guide_chapter
42 |
43 | Used to store the detailed content of the dss_guide chapter, including catalog_id, title, content, content_html. Associate with the content of dss_guide_catalog.
44 |
45 | ### dss_guide_content
46 |
47 | It is used to store the description content after grouping, and will be planned under the corresponding group. Contains title, type, content, content_html, etc.
48 |
49 | ### dss_guide_catalog
50 |
51 | It is used to classify the content of dss_guide, which is equivalent to the directory structure of the knowledge base and has a hierarchical directory relationship.
52 |
--------------------------------------------------------------------------------
/en_US/Design_Documentation/UserGuide/images/1653309535303.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/UserGuide/images/1653309535303.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/UserGuide/images/1653309930194.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/UserGuide/images/1653309930194.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/UserGuide/images/16559707626688.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/UserGuide/images/16559707626688.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/Workspace/images/workspace_uml.drawio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/Workspace/images/workspace_uml.drawio.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/appconn/appconn.md:
--------------------------------------------------------------------------------
1 | DSS-AppConn Design Documentation
2 | ------
3 | ## Introduce
4 | The principle of the original AppJoint is to define a top-level interface AppJoint. The third party implements this interface and stores its own connection information in the DSS table, and implements a "proxy service" in DSS that communicates with the third-party system. At the initial stage of initialization , creating an instance of the service through the reflection mechanism, and using the connection information in the table, DSS can use the "proxy service" to establish HTTP communication with the third-party system, thereby invoking the third-party system. However, there are shortcomings in the design of AppJoint. Each connected application instance needs to generate an AppJoint instance. Different instances of the same application are not logically associated. The application instance AppConn of each system is DSS1.0. The top-level interface, in DSS1.0, its own orchestration mode, workflow, single-task node, etc., are all instances of AppConn. In addition, third-party systems that access DSS need to implement the AppConn interface to implement DSS. Integrate with third-party systems to call third-party applications. Logically, AppConn has a higher abstraction logic than AppJoint. AppConn is similar to a class instance, while AppJoint is similar to an instance.
5 |
6 | ### Introduction to related modules
7 | |Level 1 Module | Level 2 Module | Function Introduction|
8 | |-------------|-----------|----------------|
9 | |dss-appconn|appconns|Access DSS to implement AppConn related specification implementation code|
10 | | |dss-appconn-core|Appconn interface and basic class definition|
11 | | |dss-appconn-loader|Instantiation, loading and assembly of the AppConn compiled package of the connected application|
12 | | |dss-appconn-manager|Interact with framework modules and manage related AppConn instance information|
13 | | |dss-scheduler-appconn|Abstract AppConn Definition for Scheduling System Implementation|
14 | | |linkis-appconn-engineplugin|Implement the relevant specifications of linkis appconn and open up the interaction between DSS AppConn and Linkis|
15 |
16 |
17 |
18 | | Core interface/class | Core functions |
19 | |---------------------------|------------------------------|
20 | | DSSDictionaryRestful、DSSDictionaryServiceImpl | Provide dictionary information acquisition interface, query corresponding records from dictionary table through parameter key or parentKey |
21 | | DSSWorkspacePrivRestful、DSSWorkspacePrivServiceImpl | Provides viewing and editing functions for the permission information of the menu component of the workspace role |
22 | | DSSWorkspaceRestful、DSSWorkspaceServiceImpl | Provides basic functional interfaces of workspace, such as creating workspace, obtaining workspace list, obtaining permission information of menu components, etc. |
23 | | DSSWorkspaceRoleRestful、DSSWorkspaceRoleServiceImpl | Provides query and creation interfaces for workspace roles |
24 | | DSSWorkspaceUserRestful、DSSWorkspaceUserServiceImpl | Provides an interface for adding, deleting, modifying, and querying workspace users |
25 |
26 | ### AppConn Architecture Diagram
27 | 
28 | 
29 | 
30 |
--------------------------------------------------------------------------------
/en_US/Design_Documentation/appconn/images/appconn_class_uml.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/appconn/images/appconn_class_uml.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/appconn/images/appconn_load_process.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/appconn/images/appconn_load_process.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/appconn/images/appconn_structure.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/appconn/images/appconn_structure.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/project/DSS_Engineering_Module_Design_Documentation.md:
--------------------------------------------------------------------------------
1 | # DSS Engineering
2 |
3 |
4 |
5 | In actual development and production, projects are often used to manage and develop a type of data application. A project can be an actual type of data application, including workflow, single task, etc. The projects under each workspace are isolated from each other.
6 |
7 |
8 |
9 | ## 1. Architecture Design
10 |
11 | - DSS itself can create and manage projects. Including create, view, modify, delete and other functions. At the same time, DSS provides engineering integration specifications to complete the docking operation with external components.
12 | - DSS projects are created and bound to each other synchronously with projects of external systems (or entities at the same level) through project integration specifications.
13 | - The external system obtains the project corresponding to the user in the DSS through the project integration specification, and completes the unified management of the underlying entities.
14 | - The external system obtains the user's project authority in DSS through the project integration specification, and further restricts the operation authority of the native project.
15 |
16 | 
17 |
18 |
19 |
20 | ### 2.1.1, project creation
21 |
22 | Brief process description: Create a DSS project 》Create a third-party application project 》Save the relationship between the project and user permissions
23 |
24 | flow chart:
25 |
26 | 
27 |
28 | ### 2.1.2, project editing
29 |
30 | Brief process description: Edit user permission relationship 》Edit third-party project information 》Edit DSS project basic information
31 |
32 |
33 |
34 | flow chart:
35 |
36 | 
37 |
38 | ### 2.1.3, Project deletion
39 |
40 | Brief process description: determine whether there is delete permission "delete third-party application project "delete DSS project
41 |
42 | ###
43 |
44 | ## 3、Database table structure design
45 |
46 | ```
47 | --dss Engineering Basic Information Sheet
48 | dss_project:
49 | id
50 | name
51 | source
52 | description
53 | user_id
54 | username
55 | workspace_id
56 | create_time
57 | create_by
58 | update_time
59 | update_by
60 | org_id
61 | visibility
62 | is_transfer
63 | initial_org_id
64 | isArchive
65 | pic
66 | star_num
67 | product
68 | application_area
69 | business
70 | is_personal
71 | create_by_str
72 | update_by_str
73 | dev_process
74 | orchestrator_mod
75 | visible
76 |
77 | --dss project and user authority relationship table
78 | dss_project_user:
79 | id
80 | project_id
81 | username
82 | workspace_id
83 | priv
84 | last_update_time
85 |
86 | --Relationship between third-party application engineering and dss engineering
87 | dss_appconn_project_relation:
88 | id
89 | project_id
90 | appconn_instance_id
91 | appconn_instance_project_id
92 |
93 | ```
94 |
95 |
96 |
97 |
--------------------------------------------------------------------------------
/en_US/Design_Documentation/project/images/project-create.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/project/images/project-create.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/project/images/project-edit.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/project/images/project-edit.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/project/images/project.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/project/images/project.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/publish/Workflow_Release_Design.md:
--------------------------------------------------------------------------------
1 | ## Workflow release design
2 |
3 | ## I. Overview
4 |
5 | In actual production applications, the development center is responsible for debugging the business-related workflows. After the debugging is completed, the workflows will be released to the scheduling system for scheduled batch scheduling jobs to realize business automation.
6 |
7 | ### 2. Workflow release architecture design
8 |
9 | Workflow publishing is a relatively complex process, which involves functions such as importing, exporting, and publishing workflows.
10 |
11 | ##### 1. Publish the process call link diagram:
12 |
13 | 
14 |
15 | ##### 2. Description of important steps
16 |
17 | - Export: Export from Dev Center (dev), including workflow and 3rd party node export. Compress the generated workflow json file into a zip package through the bml service and upload it to the bml file service center.
18 | - Import: Import to production center (prod). Download the zip file saved in the bml file service center, parse the json file, obtain the workflow arrangement information, and save it to the database.
19 | - Publish: Convert the dss workflow arrangement information obtained by importing into the workflow arrangement information available to the scheduling system, compress it into a zip package, and publish it to the wtss scheduling system.
20 |
--------------------------------------------------------------------------------
/en_US/Design_Documentation/publish/images/workflow-publish.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/publish/images/workflow-publish.png
--------------------------------------------------------------------------------
/en_US/Design_Documentation/workflow/DSS_Workflow_Architecture_Design.md:
--------------------------------------------------------------------------------
1 | ## DSS workflow architecture design
2 |
3 | Workflow : refers to "the automation of part or the whole of the business process in the computer application environment". It is an abstract and general description of the business rules between the workflow and its operation steps. Data development work can be greatly facilitated by using workflow.
4 |
5 | ### 1. Workflow Architecture
6 |
7 | 
8 |
9 |
10 |
11 | ### 2. the introduction of the second module
12 |
13 | ##### dss-workflow-server
14 |
15 | The workflow core module includes the workflow CRUD function, the workflow publishing function, the CRUD function of the workflow appconn node, the workflow cs service function, and the PPC external service function.
16 |
17 | | Core service | Core function |
18 | | --------- | -------- |
19 | | DSSFlowService | Contains workflow and sub-workflow CRUD, as well as workflow version management methods |
20 | | WorkflowNodeService | Contains workflow node CRUD, copy, import, export and other methods |
21 | | PublishService | Provides functions related to workflow publishing |
22 | | DSSWorkflowReceiver | PRC Task Call Receiver |
23 | | DSSWorkflowChooser | PRC Task Invocation Selector |
24 |
25 | ##### dss-flow-execution-server
26 |
27 | The workflow execution module includes functions related to workflow execution, including real-time workflow execution, selected execution, rerun on failure, and workflow kill.
28 |
29 | ##### dss-workflow-sdk
30 |
31 | The workflow toolkit module provides external workflow resource file parsing functions.
32 |
33 | | Core Class | Core Function |
34 | | ---------------- | -------------------------------------------- |
35 | | FlowToJsonParser | Used to parse a flow into a DSSJsonFlow that DSS can use |
36 | | json2flow | Used to parse the workflow json into the required workflow object |
37 |
38 | ##### dss-workflow-common
39 |
40 | Workflow basic public module, which abstracts the public entity class and saves it in this module.
41 |
42 | ##### dss-linkis-node-execution
43 |
44 | dss calls linkis to execute the node module, which implements the task execution related interfaces provided by linkis.
45 |
46 | | Core Class | Core Function |
47 | | ----------------------- | ------------------------------------------------------------ |
48 | | LinkisNodeExecution | Contains core methods such as task running, task status, task results, task cancellation, and task logs |
49 | | LinkisExecutionListener | Monitor task execution |
50 | | DefaultRetryHandler | Provide a retry mechanism |
51 |
52 | ##### dss-workflow-conversion-standard
53 |
54 | The workflow transformation specification module defines the specification for transforming the DSS workflow into a workflow that can be used by other external applications.
55 |
56 | | Core Class | Core Function |
57 | | ------------------------------------- | -------------- |
58 | | ProjectConversionIntegrationStandard | Engineering Conversion Specification |
59 | | WorkflowConversionIntegrationStandard | Workflow Transformation Specification |
60 |
61 |
--------------------------------------------------------------------------------
/en_US/Design_Documentation/workflow/images/workflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Design_Documentation/workflow/images/workflow.png
--------------------------------------------------------------------------------
/en_US/Development_Documentation/Compilation_Documentation.md:
--------------------------------------------------------------------------------
1 | ## 1.Compile the overall DSS:
2 |
3 | After getting the project code from git, use maven to package the project installation package.
4 |
5 | (1) You can modify the versions of Linkis, Java, Scala, Maven and other software in the top-level pom.xml file to suit your company's big data environment, as follows:
6 |
7 | ```xml
8 |
9 | 1.1.1
10 | 1.1.1
11 | 2.11.12
12 | 1.8
13 | 3.3.3
14 |
15 |
16 | ```
17 |
18 | (2) **If you are using it locally for the first time, you must execute the following command in the directory where the outermost project pom.xml is located**:
19 |
20 | ```bash
21 | mvn -N install
22 | ```
23 |
24 | (3) Execute the following command in the directory where the outermost project pom.xml is located
25 |
26 | ```bash
27 | mvn clean install
28 | ```
29 |
30 |
31 |
32 | (4) Get the installation package, in the assembly->target directory of the project:
33 |
34 | ```
35 | wedatasphere-dss-x.x.x-dist.tar.gz
36 | ```
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/en_US/Development_Documentation/Front-end_compilation_documentation.md:
--------------------------------------------------------------------------------
1 | # DSS front-end compilation documentation
2 |
3 | ## start the process
4 |
5 | ### 1. Install Node.js
6 |
7 | Download Node.js to your computer and install it.
8 |
9 | Download address: [http://nodejs.cn/download/](http://nodejs.cn/download/) (The latest stable version may have incompatibility problems, you can use the tested versions v10.16.2, v- 14.15.0) Back-end students suggest that it be done under Linux
10 |
11 | **This step is only required for the first use. **
12 |
13 | ### Second, the installation project
14 |
15 | Execute the following command in the terminal command line:
16 |
17 | ```shell script
18 | cd DataSphereStudio/web
19 | lerna bootstrap
20 | ````
21 |
22 | Instruction introduction:
23 | 1. Go to the root directory of the project package: `cd DataSphereStudio/web`
24 | 2. Dependencies required to install the project: `lerna bootstrap`
25 | 3. If the lerna command is not installed, you can install it through the `npm install lerna -g` command
26 |
27 | ### Three, packaging project
28 |
29 | You can package the project and generate the compressed code by executing the following command on the terminal command line:
30 |
31 | ```shell script
32 | npm run build
33 | ````
34 |
35 | After the command is successfully executed, a folder named "dist" will appear in the project root directory, which is the packaged code. You can put this folder directly into your static server.
36 |
37 | ### Fourth, run the project
38 |
39 | If you want to run the project on a local browser and change the code to see the effect, you need to execute the following command in the terminal command line:
40 |
41 | ```shell script
42 | npm run serve
43 | ````
44 |
45 | Access the app in a browser (Chrome is recommended) via the link: ```http://localhost:8080/```
46 |
47 | When you run the project in this way, the effect of your code changes will be dynamically reflected on the browser.
48 |
49 | **Note: Because the project adopts front-end and back-end separate development, when running on the local browser, you need to set the browser to cross-domain to access the back-end interface:**
50 |
51 | Here is how the chrome browser sets up cross-domain:
52 |
53 | - Cross-domain configuration under windows system:
54 | 1. Close all chrome browsers.
55 | 2. Create a new chrome shortcut, right-click "Properties", select "Target" in the "Shortcut" tab, and add ```--args --disable-web-security --user-data-dir=C:\ MyChromeDevUserData````
56 | 3. Open chrome browser via shortcut
57 |
58 | - Cross-domain configuration under mac system:
59 |
60 | Execute the following command in the terminal command line (you need to replace yourname in the path, if it does not work, please check the location of the MyChromeDevUserData folder on your machine and copy the path to ```--user-data-dir=`` in the following command ` behind)
61 |
62 | ```shell script
63 | open -n /Applications/Google\ Chrome.app/ --args --disable-web-security --user-data-dir=/Users/yourname/MyChromeDevUserData/
64 | ````
65 | ####Method Ⅱ
66 | Use the proxy configuration of vue to find vue.config.js in the dss under web packages
67 | ```
68 | devServer: {
69 | proxy: {
70 | '/api': {
71 | target: 'http://127.0.0.1:9001', // 写成你的后端地址
72 | changeOrigin: true,
73 | pathRewrite: {
74 | '^/api': '/api'
75 | }
76 | }
77 | }
78 | }
79 | ```
--------------------------------------------------------------------------------
/en_US/Development_Documentation/How_to_add_script_types_in_Scriptis.md:
--------------------------------------------------------------------------------
1 | ## 1. New background
2 |
3 | In ```linkis-computation-governance\linkis-manager\label-common\src\main\java\com\webank\wedatasphere\linkis\manager\label\entity\engine\EngineType.scala```,
4 | Added a new enumeration engine type.
5 |
6 | For how to implement a new Linkis engine, please refer to: [How to implement a new engine](https://linkis.apache.org/docs/latest/development/new-engine-conn).
7 |
8 | ## 2. New front-end
9 |
10 | Find the ``web/src/common/config/scriptis.js``` file and add a new record to its array.
11 |
12 | The record is of type object and can be configured with the following properties:
13 |
14 | - rule: regular expression, used to match regular file name suffixes;
15 | - executable: whether the script type can be executed;
16 | - lang: Which language is used in the monaco editor, corresponding to its highlighting, associative words and other functions. You can refer to the languages supported by the official website of monaco editor, or customize the language;
17 | - application: corresponds to an engine type of Linkis and is used to pass the parameter executeApplicationName during websocket or http polling;
18 | - runType: corresponds to the script type of a Linkis engine, used to pass the parameter runType during websocket or http polling;
19 | - ext: the file suffix of the script type, such as: ````.hql```;
20 | - scriptType: The script type supported by the system, which is used for judgment when creating a new script. This attribute is used to distinguish script types at the front end because both application and runType may be duplicated;
21 | - abbr: the suffix name of the script type (this attribute is deprecated and will be deleted later);
22 | - label: The name of the script type displayed on the UI interface of the user's new script, with the first letter capitalized;
23 | - logo: The script's icon. The storage path is: ```web/src/apps/scriptis/assets/styles/home.scss```, please add a new LOGO at the end of the file.
24 | - isCanBeNew: Whether it is allowed to be newly created, which is used to display in scenarios where new scripts can be created, such as workspaces;
25 | - isCanBeOpen: Whether it is allowed to be opened, it can be double-clicked or right-clicked to open in modules such as workspace (this property is valid in the foreground, and the background also checks whether the script can be opened);
26 | - flowType: The type used in the workflow.
--------------------------------------------------------------------------------
/en_US/Images/Apiservice/105c29559c6ff03db92efc7cc0b7d15d.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Apiservice/105c29559c6ff03db92efc7cc0b7d15d.png
--------------------------------------------------------------------------------
/en_US/Images/Apiservice/150207df2ea89d2b0c2f3ccb8d46577b.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Apiservice/150207df2ea89d2b0c2f3ccb8d46577b.png
--------------------------------------------------------------------------------
/en_US/Images/Apiservice/50bd31ef9795efe92aa333f7ac8518c6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Apiservice/50bd31ef9795efe92aa333f7ac8518c6.png
--------------------------------------------------------------------------------
/en_US/Images/Apiservice/b7f2ba78f56e660c9345895205cd47ed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Apiservice/b7f2ba78f56e660c9345895205cd47ed.png
--------------------------------------------------------------------------------
/en_US/Images/Apiservice/c44c3ee7da22fdd19eb62379271a8410.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Apiservice/c44c3ee7da22fdd19eb62379271a8410.png
--------------------------------------------------------------------------------
/en_US/Images/Apiservice/c65f98286cd82f3d9100f602a31f4302.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Apiservice/c65f98286cd82f3d9100f602a31f4302.png
--------------------------------------------------------------------------------
/en_US/Images/Apiservice/d61d36f82ef7fc7111ea37574dd0db22.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Apiservice/d61d36f82ef7fc7111ea37574dd0db22.png
--------------------------------------------------------------------------------
/en_US/Images/Apiservice/dfbf44e1fe710b76883fe0eb24346707.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Apiservice/dfbf44e1fe710b76883fe0eb24346707.png
--------------------------------------------------------------------------------
/en_US/Images/Apiservice/ff7b18b4eec06f5dfd2da1e3693e2e59.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Apiservice/ff7b18b4eec06f5dfd2da1e3693e2e59.png
--------------------------------------------------------------------------------
/en_US/Images/Apiservice/postman01.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Apiservice/postman01.png
--------------------------------------------------------------------------------
/en_US/Images/Apiservice/postman02.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Apiservice/postman02.png
--------------------------------------------------------------------------------
/en_US/Images/Development_doc/Third-part_System_Access_Development_Guide/DSS_frame_design.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Development_doc/Third-part_System_Access_Development_Guide/DSS_frame_design.png
--------------------------------------------------------------------------------
/en_US/Images/Development_doc/Third-part_System_Access_Development_Guide/Organizational_Structure_Specification.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Development_doc/Third-part_System_Access_Development_Guide/Organizational_Structure_Specification.png
--------------------------------------------------------------------------------
/en_US/Images/Development_doc/Third-part_System_Access_Development_Guide/SSO_password-free_jump.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Development_doc/Third-part_System_Access_Development_Guide/SSO_password-free_jump.png
--------------------------------------------------------------------------------
/en_US/Images/Development_doc/Third-part_System_Access_Development_Guide/Schedulis_implements_DSS_Level1_specification.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Development_doc/Third-part_System_Access_Development_Guide/Schedulis_implements_DSS_Level1_specification.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DSS&Linkis_one-click_deployment_document_stand-alone_version/eureka.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DSS&Linkis_one-click_deployment_document_stand-alone_version/eureka.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DSSUserGuide_Deploy/userguide_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DSSUserGuide_Deploy/userguide_1.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DSS_Debug_Documentation/img.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DSS_Debug_Documentation/img.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DSS_Debug_Documentation/img_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DSS_Debug_Documentation/img_1.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DSS_Debug_Documentation/img_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DSS_Debug_Documentation/img_2.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DSS_Debug_Documentation/img_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DSS_Debug_Documentation/img_3.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DSS_Debug_Documentation/img_4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DSS_Debug_Documentation/img_4.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_1.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_10.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_10.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_11.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_11.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_12.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_12.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_13.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_13.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_14.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_14.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_2.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_3.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_4.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_5.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_6.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_7.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_8.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_8.png
--------------------------------------------------------------------------------
/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Install_and_Deploy/DolphinschedulerAppConn_deployment/img_9.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/Scriptis/hive-6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/Scriptis/hive-6.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/Scriptis/hive1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/Scriptis/hive1.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/Scriptis/hive2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/Scriptis/hive2.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/Scriptis/hive3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/Scriptis/hive3.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/Scriptis/hive4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/Scriptis/hive4.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/Scriptis/hive5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/Scriptis/hive5.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/Scriptis/hive7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/Scriptis/hive7.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/Scriptis/home.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/Scriptis/home.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/Scriptis/udf-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/Scriptis/udf-3.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/Scriptis/udf1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/Scriptis/udf1.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/Scriptis/udf2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/Scriptis/udf2.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/workspace/ws_img1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/workspace/ws_img1.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/workspace/ws_img2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/workspace/ws_img2.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/workspace/ws_img3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/workspace/ws_img3.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/workspace/ws_img5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/workspace/ws_img5.png
--------------------------------------------------------------------------------
/en_US/Images/Using_Document/workspace/ws_img6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/Using_Document/workspace/ws_img6.png
--------------------------------------------------------------------------------
/en_US/Images/apiservice.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/apiservice.png
--------------------------------------------------------------------------------
/en_US/Images/applicationshop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/applicationshop.png
--------------------------------------------------------------------------------
/en_US/Images/loginpage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/loginpage.png
--------------------------------------------------------------------------------
/en_US/Images/projectpage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/projectpage.png
--------------------------------------------------------------------------------
/en_US/Images/runworkflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/runworkflow.png
--------------------------------------------------------------------------------
/en_US/Images/scriptis.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/scriptis.png
--------------------------------------------------------------------------------
/en_US/Images/workflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/workflow.png
--------------------------------------------------------------------------------
/en_US/Images/workspacepage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/Images/workspacepage.png
--------------------------------------------------------------------------------
/en_US/Installation_and_Deployment/DSS_1.0.1_upgrade_to_1.1.0_using_documentation.md:
--------------------------------------------------------------------------------
1 | # DataSphere Studio 1.0.1 upgrade to 1.1.0 using documentation(apply to 1.01 to 1.1.1)
2 |
3 | ### The upgrade steps are mainly divided into:
4 | - service stopped
5 | - Execute database upgrade scripts
6 | - Replace the dss deployment directory with the new version package
7 | - Configuration file addition, modification
8 | - service start
9 |
10 | #### 1. Service stopped
11 | Go to the deployment directory of dss, and execute the command in the directory to stop all services of dss:
12 | ```shell
13 | cd ${DSS_DEPLOY_PATH}
14 |
15 | sh sbin/dss-stop-all.sh
16 | ````
17 | #### 2. Execute the database upgrade sql script
18 |
19 | How to get the upgrade sql script:
20 |
21 | - The decompressed directory of the installation package of dss1.1.0: db/version_update/from_v101_to_v110.sql
22 | - Download from the github page, the address is: (to be uploaded and added)
23 |
24 | Then log in to the dss database and execute the source command:
25 |
26 | ```shell
27 | source ${your_path}/from_v101_to_v110.sql
28 | ```
29 | It can be executed successfully under normal circumstances.
30 |
31 | #### 3. Replace the dss deployment directory with the new version package
32 |
33 | **(Important: it is best to back up the database of the old version of dss first)**
34 |
35 | - Back up the following tables with structural changes through the mysqldump command:
36 |
37 | dss_appconn、dss_appconn_instance、dss_workflow_node、dss_onestop_user_favorites、dss_component_role、dss_onestop_menu_application
38 |
39 |
40 |
41 | These tables just modify the table name, and can also be backed up:
42 |
43 | dss_dictionary
44 | dss_role
45 | dss_admin_dept
46 | dss_download_audit
47 | dss_flow_relation
48 | dss_flow_edit_lock
49 | dss_onestop_menu
50 | dss_menu_role
51 |
52 | - Back up the deployment directory of the old version of dss, take this directory as an example:/appcom/Install/DSSInstall
53 | ```shell
54 | mv /appcom/Install/DSSInstall /appcom/Install/DSSInstall-bak
55 | ```
56 |
57 | Put the installation package of dss1.1.0 in the temporary directory and decompress it:
58 | ```shell
59 | mkdir /tmp/dss-pkg
60 | mv wedatasphere-dss-1.1.0-dist.tar.gz /tmp/dss-pkg/
61 | cd /tmp/dss-pkg
62 | tar zxvf wedatasphere-dss-1.1.0-dist.tar.gz
63 | ```
64 | The directory structure after decompression is as follows:
65 | 
66 |
67 | Then copy all the files in the dss-1.1.0 directory to the installation directory of dss1.1.0:
68 | ```shell
69 | cd dss-1.1.0
70 | cp -r lib dss-appconns sbin /appcom/Install/DSSInstall/
71 | ```
72 |
73 | Copy the configuration file from the previous version:
74 | ```shell
75 | cp -r /appcom/Install/DSSInstall-bak/conf /appcom/Install/DSSInstall/
76 | ```
77 |
78 | #### 4. Add and modify configuration
79 |
80 | New configuration added in the new version: dss-scriptis-server.properties, dss-guide-server.properties,
81 | Copy directly from the dss1.1.0/conf directory:
82 |
83 | ```shell
84 | cp -r conf/dss-scriptis-server.properties /appcom/Install/DSSInstall/conf/
85 | cp -r conf/dss-guide-server.properties /appcom/Install/DSSInstall/conf/
86 | ```
87 |
88 | Configuration modification:
89 |
90 | 1. Add to the dss.properties configuration file:
91 | ```properties
92 | ###If appconn does not implement all development specifications (node update, delete, copy, import, export operations), it needs to be added to the configuration to ignore the check
93 | wds.dss.appconn.checker.development.ignore.list=workflow,sendemail
94 | ###If appconn does not implement all project specifications (add, delete, modify, check), it needs to be added to the configuration to ignore the check
95 | wds.dss.appconn.checker.project.ignore.list=
96 | ```
97 |
98 | #### 5. service start
99 | OK, now you can start the new version of dss service, run the command in the **dss deployment directory** to start all services:
100 |
101 | ```shell
102 | sh sbin/dss-start-all.sh
103 | ```
104 |
105 |
106 |
107 |
108 |
--------------------------------------------------------------------------------
/en_US/Installation_and_Deployment/DSS_Debug_Documentation.md:
--------------------------------------------------------------------------------
1 |
2 | # Debug related
3 |
4 | > No programmer can write code without any bugs in one go, so many programmers spend a considerable part of their time on debugging. Program debugging is a job that every programmer must face. The following will guide you how to perform dss. Remote debugging (based on DSS1.1.0 version).
5 |
6 | ## step 1 Prepare DSS source code and compile
7 |
8 | ```plain
9 | git clone https://github.com/WeBankFinTech/DataSphereStudio.git
10 | cd DataSphereStudio
11 | #If necessary, you can switch to the corresponding branch
12 | #git checkout dev-xxx
13 | mvn -N install
14 | mvn clean Install
15 | ```
16 |
17 | ## step 2 Deploy the DSS service on the server
18 | If DSS has not been deployed, please refer to the deployment document: [DSS single-machine deployment document](DSS&Linkis_one-click_deployment_document_stand-alone_version.md)
19 |
20 | ## step 3 Open debug port
21 |
22 | First, you need to identify the service where the package to be debugged is located, and determine the service to which it belongs according to the location of the code to be debugged.
23 |
24 | Then enter the ${DSS_HOME}/sbin/ext directory under the dss deployment directory, modify the startup script file of the corresponding service to be debugged, and open the remote call port: (take the workflow-server service as an example)
25 |
26 | ```shell
27 | cd ${DSS_HOME}/sbin/ext
28 |
29 | vim dss-workflow-server
30 | ```
31 | Find the DEBUG_PORT keyword in the script, and then enter the port number that needs to be opened **(to be able to connect from the local)**
32 |
33 | 
34 |
35 | Then you need to restart the corresponding service to make it take effect:
36 |
37 | ```
38 | sh sbin/dss-daemon.sh restart workflow-server
39 | ```
40 | Note: If you are not sure about the service name, you can query it in the ${DSS_HOME}/sbin/common.sh script,
41 | Just enter the keyword of the service to start the corresponding service:
42 |
43 | 
44 |
45 |
46 | ## step 4 IDEA Compiler configuration remote debugging
47 | Open the window as shown below and configure remote debugging ports, services, and modules:
48 |
49 | 
50 |
51 | ## step 5 start debugging
52 |
53 | Click the debug button in the upper right corner of the idea to start debugging:
54 |
55 | 
56 |
57 | ## step 6 Replace jar package
58 |
59 | After modifying the code locally, you can package the jar package of the corresponding module, and then replace the jar of the corresponding lib on the server. Just restart the service.
60 |
61 | ```shell
62 | cd ${DSS_HOME}
63 |
64 | ## Upload the jar package to the server
65 | rz -bye
66 |
67 | ## Replace the uploaded jar package with the original jar package
68 | cp ${your_jar_name} lib/${jar_path}
69 | ```
70 |
71 | Note: If you don't know which service libs the jar exists in, you can search all the locations of the jar with the following command:
72 | ```shell
73 | cd ${DSS_HOME}
74 |
75 | ## Search all dss-orchestrator-common-*.jar packages in the lib directory
76 | find lib/ -name "*dss-orchestrator-common*"
77 | ```
78 |
79 | 
--------------------------------------------------------------------------------
/en_US/Installation_and_Deployment/Schedulis_Linkis_JobType_Installation_Documentation.md:
--------------------------------------------------------------------------------
1 | # Schedulis Linkis JobType installation documentation
2 |
3 | > This article mainly introduces the automatic deployment and installation steps of Schedulis' Linkis JobType. If you install it manually, please refer to Azkaban's JobType [installation steps](https://azkaban.github.io/azkaban/docs/latest/#job-types)
4 |
5 | 1. Go to the Schedules directory
6 |
7 | ````
8 | ##Users first need to go to the installation directory of Schedulelis. The specific operation commands are as follows:
9 | cd xx/schedulis_0.7.0_exec/plugins/jobtypes/linkis/bin
10 | ```
11 |
12 | 2. Modify config.sh configuration
13 |
14 | ```
15 | ## Linkis gateway url
16 | LINKIS_GATEWAY_URL=http://127.0.0.1:9001 ## GateWay address for linkis
17 |
18 | ## Linkis gateway token default WS-AUTH
19 | LINKIS_GATEWAY_TOKEN=WS-AUTH ## The proxy token of Linkis, this parameter can use the default value
20 |
21 | ## Azkaban executor host
22 | AZKABAN_EXECUTOR_HOST=127.0.0.1 ## This IP is the machine IP if Schedulis is a stand-alone installation, or the Schedulis executor machine IP if it is a distributed installation
23 |
24 | ## SSH Port
25 | SSH_PORT=22 ## SSH port
26 |
27 | ## Azkaban executor dir
28 | AZKABAN_EXECUTOR_DIR=xx/schedulis_0.7.0_exec ## If Schedulis is a stand-alone installation, this directory is the installation directory of Schedulis. If it is a distributed installation, it is the installation directory of the executor. Note: You do not need to bring / at the end.
29 |
30 | ## Azkaban executor plugin reload url
31 | AZKABAN_EXECUTOR_URL=http://$AZKABAN_EXECUTOR_HOST:12321/executor?action=reloadJobTypePlugins ## You only need to modify the IP and port here.
32 | ```
33 |
34 | 3. Execute the installation script
35 |
36 | ```
37 | sh install.sh
38 | ```
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/en_US/README.md:
--------------------------------------------------------------------------------
1 | ## Introduction
2 |
3 | DataSphere Studio (DSS for short) is a data application development and management integration framework developed by WeBank.
4 |
5 | Under a unified UI, DataSphere Studio uses a workflow-based graphical drag-and-drop development experience to meet the needs of data application development from data exchange, desensitization cleaning, analysis and mining, quality inspection, visual display, timing scheduling to data output applications, etc. Full process scenario requirements.
6 |
7 | ## Document list
8 | * [Installation and Deployment](Installation_and_Deployment)
9 | * [DSS Single Machine Deployment Documentation](Installation_and_Deployment/DSS&Linkis_one-click_deployment_document_stand-alone_version.md)
10 | * [Exchangis AppConn Plugin installation documentation](https://github.com/WeBankFinTech/Exchangis/blob/master/docs/en_US/ch1/exchangis_appconn_deploy_en.md)
11 | * [Qualitis AppConn Plugin installation documentation](https://github.com/WeBankFinTech/Qualitis/blob/master/docs/zh_CN/ch1/%E6%8E%A5%E5%85%A5%E5%B7%A5%E4%BD%9C%E6%B5%81%E6%8C%87%E5%8D%97.md)
12 | * [Schedulis AppConn Plugin installation documentation](Installation_and_Deployment/SchedulisAppConn_Plugin_Installation_Documentation.md)
13 | * [Visualis AppConn Plugin installation documentation](https://github.com/WeBankFinTech/Visualis/blob/master/visualis_docs/en_US/Visualis_appconn_install_en.md)
14 | * [Streamis AppConn Plugin installation documentation](https://github.com/WeBankFinTech/Streamis/blob/main/docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md)
15 | * [Prophecis AppConn Plugin installation documentation](https://github.com/WeBankFinTech/Prophecis/blob/master/docs/zh_CN/Deployment_Documents/Prophecis%20Appconn%E5%AE%89%E8%A3%85%E6%96%87%E6%A1%A3.md)
16 | * [DolphinScheduler AppConn Plugin installation documentation](Installation_and_Deployment/DolphinScheduler_Plugin_Installation_Documentation.md)
17 | * [Using Document](Using_Document)
18 | * [DSS User Manual](Using_Document/DSS_User_Manual.md)
19 | * [DataApiService Using Document](Using_Document/DataApiService_Usage_Documentation.md)
20 | * [Development_documentation](Development_Documentation)
21 | * [Compilation Documentation](Development_Documentation/Compilation_Documentation.md)
22 | * [Third-party system access development guide](Development_Documentation/Third-party_System_Access_Development_Guide.md)
23 |
24 | Stay tuned for more documentation updates...
--------------------------------------------------------------------------------
/en_US/SUMMARY.md:
--------------------------------------------------------------------------------
1 | * [Installation and Deployment](Installation_and_Deployment)
2 | * [DSS Single Machine Deployment Documentation](Installation_and_Deployment/DSS&Linkis_one-click_deployment_document_stand-alone_version.md)
3 | * [Exchangis AppConn Plugin installation documentation](https://github.com/WeBankFinTech/Exchangis/blob/master/docs/en_US/ch1/exchangis_appconn_deploy_en.md)
4 | * [Qualitis AppConn Plugin installation documentation](https://github.com/WeBankFinTech/Qualitis/blob/master/docs/zh_CN/ch1/%E6%8E%A5%E5%85%A5%E5%B7%A5%E4%BD%9C%E6%B5%81%E6%8C%87%E5%8D%97.md)
5 | * [Schedulis AppConn Plugin installation documentation](Installation_and_Deployment/SchedulisAppConn_Plugin_Installation_Documentation.md)
6 | * [Visualis AppConn Plugin installation documentation](https://github.com/WeBankFinTech/Visualis/blob/master/visualis_docs/en_US/Visualis_appconn_install_en.md)
7 | * [Streamis AppConn Plugin installation documentation](https://github.com/WeBankFinTech/Streamis/blob/main/docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md)
8 | * [Prophecis AppConn Plugin installation documentation](https://github.com/WeBankFinTech/Prophecis/blob/master/docs/zh_CN/Deployment_Documents/Prophecis%20Appconn%E5%AE%89%E8%A3%85%E6%96%87%E6%A1%A3.md)
9 | * [DolphinScheduler AppConn Plugin installation documentation](Installation_and_Deployment/DolphinScheduler_Plugin_Installation_Documentation.md)
10 | * [Using Document](Using_Document)
11 | * [DSS User Manual](Using_Document/DSS_User_Manual.md)
12 | * [DataApiService Using Document](Using_Document/DataApiService_Usage_Documentation.md)
13 | * [Development_documentation](Development_Documentation)
14 | * [Compilation Documentation](Development_Documentation/Compilation_Documentation.md)
15 | * [Third-party system access development guide](Development_Documentation/Third-party_System_Access_Development_Guide.md)
16 |
17 | Stay tuned for more documentation updates...
--------------------------------------------------------------------------------
/en_US/User_Manual/DSS_How_To_Add_Users.md:
--------------------------------------------------------------------------------
1 | # DataSphere Studio How to add users
2 |
3 | > DSS only provides an administrator account by default. User login authentication relies on Linkis' LDAP user login authentication system. This article will introduce in detail how to add a new DSS user.
4 |
5 | ## 1. Basic introduction
6 |
7 | The user name of the DSS super administrator is the deployment user name. If the deployment user is hadoop, the administrator's user name and password are hadoop/hadoop. The specific user can be found in [DSS single-machine deployment document](../Installation/Deployment/DSS&Linkis One-click Deployment document stand-alone version.md) view.
8 |
9 | Adding a DSS user is mainly divided into the following steps:
10 |
11 | - Add LDAP user
12 | - Improve environmental information for new users
13 |
14 | ## 2. Add LDAP user
15 |
16 | DSS super administrators can create departments and users on the homepage, establish the company's hierarchy and personnel management system, and create departments and users. As shown below:
17 |
18 | 
19 |
20 | #### Create department and user:
21 |
22 | On the home page of the super administrator, click [Management Desk] to enter the management desk page.
23 |
24 | Super administrators can create, modify and delete departments (please note: a maximum of four levels can be established at the department level, including meta-company), meta-company cannot be deleted, only information can be modified.
25 |
26 | At the same time, the super administrator can create, modify and reset user passwords.
27 |
28 | As shown below:
29 |
30 | 
31 |
32 | After you create a user on the page, the DSS background will automatically request LDAP and create a user with the same name in LDAP for you.
33 |
34 | ## 3. Improve environmental information for new users
35 |
36 | 1. Due to the top-down multi-tenant isolation of DSS & Linkis, in order to allow logged-in users to use DSS normally, it is necessary to create a corresponding Linux user on the Linux server. The specific steps are as follows:
37 |
38 | - Create corresponding Linux users on all Linkis & DSS servers.
39 | - If Hadoop is used, a corresponding Linux user needs to be created on the NameNode of Hadoop.
40 | - Ensure that Linux users on the Linkis & DSS server can use commands such as `hdfs dfs -ls /` normally, and the user needs to be able to execute shell commands such as `spark-sql -e` and `hive -e` normally.
41 | - Since each user's workspace is strictly isolated, you also need to create a workspace and HDFS directory for the user, as follows:
42 |
43 | ```shell script
44 | ## Create user workspace directory and authorization
45 | mkdir $WORKSPACE_USER_ROOT_PATH/${NEW_USER}
46 | chmod 750 $WORKSPACE_USER_ROOT_PATH/${NEW_USER}
47 |
48 | ## Create user HDFS directory and authorize
49 | hdfs dfs -mkdir $HDFS_USER_ROOT_PATH/${NEW_USER}
50 | hdfs dfs -chown ${NEW_USER}:${NEW_USER} $HDFS_USER_ROOT_PATH/${NEW_USER}
51 | hdfs dfs -chmod 750 $HDFS_USER_ROOT_PATH/${NEW_USER}
52 | ```
53 |
54 | `WORKSPACE_USER_ROOT_PATH` and `HDFS_USER_ROOT_PATH` are the workspace and HDFS root paths set when you install DSS with one click.
55 |
56 | If you don't set it, it defaults to:
57 |
58 | ```shell script
59 | WORKSPACE_USER_ROOT_PATH=file:///tmp/linkis
60 | HDFS_USER_ROOT_PATH=hdfs:///tmp/linkis
61 | ```
62 |
--------------------------------------------------------------------------------
/en_US/User_Manual/Data_Service_User_Manual.md:
--------------------------------------------------------------------------------
1 | Data Service
2 | ----------
3 |
4 | DSS currently supports publishing SQL scripts as data service APIs and sharing them with other users. Business users can execute data service scripts and browse or download platform data directly by setting parameters without writing code and without big data platform users.
5 | Data service is a kind of service provided to users who do not have database table permission for users with database table permission. This article will focus on publishing data service users (referred to as publishing users) and using data service users (referred to as using users) from two different perspectives. Be explained.
6 | Due to business needs, the use of data services will have the following restrictions:
7 | * Only supports Spark SQL queries
8 | * Multiple result set queries are not supported
9 | * Only supports query SELECT statement
10 | * can only contain one SQL statement
11 | * Semicolons are not allowed at the end of SQL statements
12 |
13 | In addition to the above restrictions, the following notation is allowed.
14 | ```sql
15 | USE DATABASE default;
16 | SELECT * FROM default.book
17 | ```
18 |
19 | **1. Create a data service**
20 |
21 | Due to business needs, it is necessary to establish a data service and authorize others to use it. When the publishing user enters Scriptis, edits the new script file, writes Spark SQL statements, and embeds variables in the SQL statements, which is convenient for subsequent business personnel to set parameters by themselves. data can be obtained.
22 | After saving the Spark SQL script, you can click "Publish as Data API" at the top of the script editing bar (the "Publish as Data API" function, only some users have this permission, and the function button is not visible for users who do not have permission. ), fill in the new API information as shown in the figure below.
23 | 
24 |
25 | Click Next for information on setting variables.
26 | 
27 |
28 |
29 | Publishing users can use the data service on the homepage of the workspace, through the application tool, enter the "Data Service" application to use the data service, and click "More" on the data service tab to enter the data service management interface
30 |
31 | 
32 |
33 |
34 |
35 | **2. Use of data services**
36 |
37 | After entering the data service page, you can see the data service list page that the user can use, where default means all data services by default, the user can click the corresponding The label filters out the data services you need to use, and you can filter by name, status, and submitter in the search box.
38 |
39 | Users can click "Enter Use" to set the value of parameters in the filter conditions. The user is a subset of the published user data set.
40 |
41 | 
42 |
43 | **3. Modify data service**
44 |
45 | A data service may be modified due to business needs. When the publishing user has modified the script of the data service, he can click "Update Data API".
46 | To update the data service, you can select other data services to be bound.
47 | 
48 |
49 | **4. Use postman to access data services**
50 |
51 | After the data service is released, it can be accessed using the api interface, which can be called directly to other systems. Submit a query as shown below:
52 |
53 | 
54 |
55 | The task execution ID is obtained, and then the task execution progress, log, result set, etc. can be obtained according to the ID.
56 |
57 | 
58 |
59 | Description: The token of the data service can be returned from the /queryById? interface (click to use it), and the field is userToken. Access to all interfaces must be authenticated by GateWay. The token of the data service can only be used for the management process of the data service. Authentication using postman requires the use of the cookie of the page and the key authentication method of linkis-gateway. In the head, add Token-Code: XXX here to specify the login key of linkis-gateway Token-User: XXX here to specify the login user of linkis-gateway.
60 |
61 |
--------------------------------------------------------------------------------
/en_US/User_Manual/Scriptis_Usage_Documentation.md:
--------------------------------------------------------------------------------
1 | Scriptis usage documentation
2 | ------
3 | ## Introduce
4 | Use the computing engines such as Spark, Hive, and HBase of the big data platform to conduct interactive query and analysis, and support the daily use of data mining and analysts. Provides a graphical and multi-style interface, making it more convenient and simple for users to perform data analysis, script editing, testing, and querying.
5 | 
6 |
7 | ## workspace
8 | The workspace is a file directory, and the user has all the permissions to perform file management operations and so on. Generally, it corresponds to a file directory deployed by the Linkis server, and each login user corresponds to a file directory, which stores files such as user scripts and result sets.
9 | 
10 | When you right-click the workspace folder, the right-click functions mainly include copy path, new directory, new script, and refresh.
11 | 
12 | When the mouse right-clicks the file under the workspace folder, the script right-click function, the script right-click mainly includes punching to the side, copying the path, renaming, deleting, importing to hive (csv, txt, excel type files), importing to hdfs, etc. Function.
13 | 
14 | ## database
15 | The database module obtains the hive library that the logged-in user has permission. The main functions of the right-click library include brushing the library, brushing the table, and brushing the field information. Table right-click function - query table, quickly produce temporary hive script for data viewing, copy table name and delete table, that is, copy table fields. The right-click function of the table can view the table structure, and can display the field details of the table, table details, table partition information, etc.
16 | 
17 | ## UDFs and functions
18 | The UDF function is convenient for users to classify and display UDFs, and users can manage personal functions. The configuration management of UDF has been moved to the Linkis management console. For related configuration, please refer to the related documents of Linkis UDF.
19 |
20 | ## HDFS
21 | After Linkis is installed, each user is provided with an HDFS path by default to store user resource files. Scriptis will display the HDFS folder of the user. You can right-click to add, delete, and modify the folder. At the same time, the files in this path can also be managed through the right-click function.
22 | 
23 |
--------------------------------------------------------------------------------
/en_US/User_Manual/Super_Administrator_Function.md:
--------------------------------------------------------------------------------
1 | ## workspace concept
2 |
3 | In data development and management, the concept of a workspace is similar to a team. A company (ie, a master account) can create multiple workspaces. A workspace is the basic unit for managing tasks, members, and assigning roles and permissions. Collaborative development, operation and maintenance, analysis, etc. in one workspace.
4 |
5 | The organization's master account defaults to the organization's workspace administrator user, which is responsible for managing the organization's workspace management console, planning the workspace structure, and creating and deleting workspaces.
6 |
7 | Super administrators can click [Create Workspace] on the administrator homepage to create a workspace.
8 |
9 | 
10 |
11 | #### Workspace Type:
12 |
13 | Workspaces are divided into two categories: project-oriented and department-oriented:
14 |
15 | - Project-oriented workspace participation members are vertical structures and can come from different departments;
16 | - Department-Oriented Workspace Participating members are horizontally structured and come from the same department.
17 |
18 | In the project-oriented workspace, members of any department can be added;
19 |
20 | In a department-oriented workspace, only members of this department can be added.
21 |
22 | #### Workspace management:
23 |
24 | The default role of a workspace creator is owner, who can delegate the management of the workspace to one or more administrators.
25 |
26 | Only the administrator of the workspace can enter the [Workspace Management] module to manage the basic information and permission information of the workspace accordingly.
--------------------------------------------------------------------------------
/en_US/User_Manual/User_Documentation.md:
--------------------------------------------------------------------------------
1 | ## DSS User Document
2 |
3 | ## motivation
4 | DSS1.x version is a milestone version. It has carried out a lot of optimization and reconstruction on the basis of DSS0.x. Due to the limited space, it can only cover the basic use process of DSS, and more details of operation and use. I hope to work with community partners. Improve and optimize, if you have any questions or suggestions in the process of use, you can contact the relevant community open source personnel of WeBank's big data platform at any time. We are committed to creating a better one-stop big data suite and contributing to the big data open source ecosystem. the power of.
5 |
6 | ## foreword
7 | The DSS1.x version has refactored and optimized the front-end page interaction. This document is the DSS user manual, which covers the basic usage process of DSS1.0. For more details on operation and usage, please refer to the documentation of each module.
8 | User documentation is mainly divided into the following documents:
9 | 1. [Scriptis usage documentation]()
10 | 2. [Workflow usage document]()
11 | 3. [Data service usage document]()
12 | 4. [Linkis console user manual]()
13 |
14 | ## Management Module Introduction
15 | ## Login to the homepage
16 | For the convenience of users, the system defaults to the Linux deployment user of Linkis to log in. For example, Linkis and DSS deployed with hadoop can log in directly through user: hadoop, password: hadoop (the password is the username). After entering the DSS front-end address, 127.0.0.1:8088 Enter the user name and password: hadoop hadoop to log in. The login page provides the DSS user access permission verification function.
17 | 
18 | * Note: If you want to support multi-user login, DSS user login depends on Linkis, which needs to be configured in the configuration of linkis-GateWay. Linkis-GateWay supports LDAP by default. *
19 |
20 | ## workspace
21 | Go to the workspace page to create and manage workspaces. Workspace is the top-level concept of DSS. For example, a workspace can be a department, a business line, or an organization, which is used to manage data applications, including personnel, projects, or components. With the role permissions of the workspace administrator, you can manage the workspace and control the components and personnel permissions of the workspace.
22 | 
23 |
24 | ## Engineering Management
25 | After entering the corresponding workspace, it will jump to the homepage of the project. On the homepage of the project, you can create a project. In actual development and production, projects are often used to manage and develop a type of data application, including workflow, single task, etc. The projects under each workspace are isolated from each other. In practical applications, it is an ideal way to divide a project for each data application.
26 | 
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Create_Departments_And_Users.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Create_Departments_And_Users.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Create_a_workspace.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Create_a_workspace.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Cycle_Instance_Completion.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Cycle_Instance_Completion.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Edit_Scheduled_Tasks.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Edit_Scheduled_Tasks.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Page_Overview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Page_Overview.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Process_Status_Statistics.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Process_Status_Statistics.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Process_Status_Statistics_By_Date.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Process_Status_Statistics_By_Date.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Process_Time_Ranking.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Process_Time_Ranking.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Run_A_Workflow_On_A_Schedule.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Run_A_Workflow_On_A_Schedule.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Run_The_Workflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Run_The_Workflow.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Task_Instance.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Task_Instance.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Timed_Page.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Timed_Page.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/View_Logs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/View_Logs.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Workflow_Definition.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Workflow_Definition.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Workflow_Instance.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Workflow_Instance.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Schedule_Center/Workflow_Instance_And_Success_Rate_Statistics.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Schedule_Center/Workflow_Instance_And_Success_Rate_Statistics.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/Super_Administrator_Function.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/Super_Administrator_Function.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/apiservicepage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/apiservicepage.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/createapiservice.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/createapiservice.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/createapiservice_param.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/createapiservice_param.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/loginpage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/loginpage.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/modifyapiservice.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/modifyapiservice.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/postman1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/postman1.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/postman2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/postman2.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/project.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/project.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/runworkflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/runworkflow.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/scriptis.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/scriptis.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/scriptis_database.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/scriptis_database.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/scriptis_hdfs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/scriptis_hdfs.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/scriptis_summary.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/scriptis_summary.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/scriptis_workspace.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/scriptis_workspace.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/scriptis_workspace_dir.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/scriptis_workspace_dir.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/scriptis_workspace_file.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/scriptis_workspace_file.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/useapiservice.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/useapiservice.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/workflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/workflow.png
--------------------------------------------------------------------------------
/en_US/User_Manual/images/workspace.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/en_US/User_Manual/images/workspace.png
--------------------------------------------------------------------------------
/en_US/Using_Document/DataApiService_Usage_Documentation.md:
--------------------------------------------------------------------------------
1 | Data Service
2 | ----------
3 |
4 | DSS currently supports publishing SQL scripts as data service APIs and sharing them with other users. Business users can execute data service scripts and browse or download platform data directly by setting parameters without writing code and without big data platform users.
5 |
6 | 
7 |
8 | **1、Create a data service**
9 |
10 | Due to business needs, a data service needs to be established and authorized to be used by others. When the publishing user enters Scriptis, edits the new script file, writes SQL statements, and at the same time can embed variables in SQL statements, which is convenient for subsequent business personnel to set parameters by themselves. data can be obtained.
11 |
12 | 
13 |
14 |
15 | After saving the Spark SQL script, you can click "Publish as Data API" at the top of the script editing bar (the "Publish as Data API" function, only some users have this permission, users who do not have the permission, the function button is not visible ), after clicking, you need to fill in the following API creation information (with red
16 | \* marked as required), fill in the new API information as shown in the figure below.
17 |
18 | 
19 |
20 | Click Next for information on setting variables.
21 | 
22 |
23 |
24 | Publishing users can enter the use of data services on the homepage of the workspace through "Enter Data Service Development" and "More Application Tools", and click "More" on the Data Service tab to enter the data service management interface.
25 |
26 | 
27 |
28 |
29 |
30 | **2、Use data services**
31 |
32 | After entering the data service page, you can see the list of data services that the user can use, where default means all data services by default. , filter by name, status, and submitter respectively. The details page is shown in the figure below.
33 |
34 | 
35 |
36 | The user clicks "Enter Use" to set the value of the parameter in the filter condition. It can be seen that the user is a subset of the published user data set.
37 |
38 | 
39 |
40 | **3、Modify data service**
41 |
42 | A data service may be modified due to business needs. When the publishing user modifies the script of the data service, he can click "Update Data API".
43 | To update the data service, you can select other data services to be bound.
44 |
45 | 
46 |
47 | **4、Access data services using postman**
48 |
49 | After the data service is released, it can be accessed using the api interface, which can be called directly to other systems. Submit a query as shown below:
50 |
51 | 
52 |
53 | The task execution ID is obtained, and then the task execution progress, log, result set, etc. can be obtained according to the ID.
54 |
55 | 
56 |
57 | Description: The token of the data service can be returned from the /queryById? interface (click to enter to use), and the field is userToken. Access to all interfaces must be authenticated by GateWay. The token of the data service can only be used for the management process of the data service. Authentication using postman requires the use of the cookie of the page and the key authentication method of linkis-gateway. In the head, add Token-Code: XXX here to specify the login key of linkis-gateway Token-User: XXX here to specify the login user of linkis-gateway.
58 |
59 |
60 |
--------------------------------------------------------------------------------
/en_US/Using_Document/Scriptis/Scriptis_User_Tests1_Scala.md:
--------------------------------------------------------------------------------
1 | # DSS User Test Example 1:Scala
2 |
3 | The purpose of DSS user test samples is to provide a set of test samples for new users of the platform to familiarize themselves with the common operations of DSS and to verify the correctness of the DSS platform
4 |
5 | 
6 |
7 | ## 1.1 Spark Core(entry function sc)
8 |
9 | In Scriptis, SparkContext is already registered for you by default, so just use sc directly:
10 |
11 | ### 1.1.1 Single Value operator (Map operator as an example)
12 |
13 | ```scala
14 | val rddMap = sc.makeRDD(Array((1,"a"),(1,"d"),(2,"b"),(3,"c")),4)
15 | val res = rddMap.mapValues(data=>{data+"||||"})
16 | res.collect().foreach(data=>println(data._1+","+data._2))
17 | ```
18 |
19 | ### 1.1.2 Double Value operator (union operator as an example)
20 |
21 | ```scala
22 | val rdd1 = sc.makeRDD(1 to 5)
23 | val rdd2 = sc.makeRDD(6 to 10)
24 | val rddCustom = rdd1.union(rdd2)
25 | rddCustom.collect().foreach(println)
26 | ```
27 |
28 | ### 1.1.3 K-V operator (reduceByKey operator is an example)
29 |
30 | ```scala
31 | val rdd1 = sc.makeRDD(List(("female",1),("male",2),("female",3),("male",4)))
32 | val rdd2 = rdd1.reduceByKey((x,y)=>x+y)
33 | rdd2.collect().foreach(println)
34 | ```
35 |
36 | ### 1.1.4 Execute the operator (the above collect operator is an example)
37 |
38 | ### 1.1.5 Read files from hdfs and do simple execution
39 |
40 | ```scala
41 | case class Person(name:String,age:String)
42 | val file = sc.textFile("/test.txt")
43 | val person = file.map(line=>{
44 | val values=line.split(",")
45 |
46 | Person(values(0),values(1))
47 | })
48 | val df = person.toDF()
49 | df.select($"name").show()
50 | ```
51 |
52 |
53 |
54 | ## 1.2 UDF function test
55 |
56 | ### 1.2.1 function definition
57 |
58 |
59 |
60 | ```scala
61 | def ScalaUDF3(str: String): String = "hello, " + str + "this is a third attempt"
62 | ```
63 |
64 | ### 1.2.2 register function
65 |
66 | function-》personal function-》Right click to add spark function=》The registration method is the same as the regular spark development
67 |
68 | 
69 |
70 | ## 1.3 UDAF function test
71 |
72 | ### 1.3.1 Jar package upload
73 |
74 | Develop a udaf function for averaging on idea, package it into a jar (wordcount) package, and upload the dss jar folder.
75 |
76 | 
77 |
78 | ### 1.3.2 register function
79 |
80 | function-》personal function-》Right click to add spark function=》The registration method is the same as the regular spark development
81 |
82 | 
--------------------------------------------------------------------------------
/en_US/Using_Document/Scriptis/Scriptis_User_Tests2_Hive.md:
--------------------------------------------------------------------------------
1 | # DSS User Test Example 2:Hive
2 |
3 | The purpose of DSS user test samples is to provide a set of test samples for new users of the platform to familiarize themselves with the common operations of DSS and to verify the correctness of the DSS platform
4 |
5 | 
6 |
7 | ## 2.1 Data warehouse building table
8 |
9 | Enter the "Database" page, click "+", enter the table information, table structure and partition information in turn to create a database table:
10 |
11 | 
12 |
13 | 
14 |
15 | Through the above process, create the department table dept, the employee table emp and the partitioned employee table emp_partition respectively. The table creation statement is as follows:
16 |
17 | ```iso92-sql
18 | create external table if not exists default.dept(
19 | deptno int,
20 | dname string,
21 | loc int
22 | )
23 | row format delimited fields terminated by '\t';
24 |
25 | create external table if not exists default.emp(
26 | empno int,
27 | ename string,
28 | job string,
29 | mgr int,
30 | hiredate string,
31 | sal double,
32 | comm double,
33 | deptno int
34 | )
35 | row format delimited fields terminated by '\t';
36 |
37 | create table if not exists emp_partition(
38 | empno int,
39 | ename string,
40 | job string,
41 | mgr int,
42 | hiredate string,
43 | sal double,
44 | comm double,
45 | deptno int
46 | )
47 | partitioned by (month string)
48 | row format delimited fields terminated by '\t';
49 | ```
50 |
51 | **Import Data**
52 |
53 | Currently, data needs to be manually imported in batches through the background, and data can be inserted from the page through the insert method
54 |
55 | ```sql
56 | load data local inpath 'dept.txt' into table default.dept;
57 | load data local inpath 'emp.txt' into table default.emp;
58 | load data local inpath 'emp1.txt' into table default.emp_partition;
59 | load data local inpath 'emp2.txt' into table default.emp_partition;
60 | load data local inpath 'emp2.txt' into table default.emp_partition;
61 | ```
62 |
63 | Other data is imported according to the above statement. The path of the sample data file is: `examples\ch3`
64 |
65 | ## 2.2 Basic SQL syntax test
66 |
67 | ### 2.2.1 Simple query
68 |
69 | ```sql
70 | select * from dept;
71 | ```
72 |
73 | ### 2.2.2 Join connection
74 |
75 | ```sql
76 | select * from emp
77 | left join dept
78 | on emp.deptno = dept.deptno;
79 | ```
80 |
81 | ### 2.2.3 Aggregate function
82 |
83 | ```sql
84 | select dept.dname, avg(sal) as avg_salary
85 | from emp left join dept
86 | on emp.deptno = dept.deptno
87 | group by dept.dname;
88 | ```
89 |
90 | ### 2.2.4 built-in function
91 |
92 | ```sql
93 | select ename, job,sal,
94 | rank() over(partition by job order by sal desc) sal_rank
95 | from emp;
96 | ```
97 |
98 | ### 2.2.5 Partitioned table simple query
99 |
100 | ```sql
101 | show partitions emp_partition;
102 | select * from emp_partition where month='202001';
103 | ```
104 |
105 | ### 2.2.6 Partitioned table union query
106 |
107 | ```sql
108 | select * from emp_partition where month='202001'
109 | union
110 | select * from emp_partition where month='202002'
111 | union
112 | select * from emp_partition where month='202003'
113 | ```
114 |
115 | ## 2.3 UDF function test
116 |
117 | ### 2.3.1 Jar package upload
118 |
119 | After entering the Scriptis page, right-click the directory path to upload the jar package:
120 |
121 | 
122 |
123 | The test example jar is in `examples\ch3\rename.jar`
124 |
125 | ### 4.3.2 custom function
126 |
127 | Enter the "UDF Function" option (such as 1), right-click the "Personal Function" directory, and select "Add Function":
128 |
129 | 
130 |
131 | Enter the function name, select the jar package, and fill in the registration format and input and output format to create a function:
132 |
133 | 
134 |
135 | 
136 |
137 | The obtained function is as follows:
138 |
139 | 
140 |
141 | ### 4.3.3 SQL query with custom function
142 |
143 | After completing the function registration, you can enter the workspace page to create a .hql file to use the function:
144 |
145 | ```sql
146 | select deptno,ename, rename(ename) as new_name
147 | from emp;
148 | ```
149 |
--------------------------------------------------------------------------------
/en_US/Using_Document/Scriptis/Scriptis_User_Tests3_SparkSQL.md:
--------------------------------------------------------------------------------
1 | # DSS User Test Example 3:SparkSQL
2 |
3 | The purpose of DSS user test samples is to provide a set of test samples for new users of the platform to familiarize themselves with the common operations of DSS and to verify the correctness of the DSS platform
4 |
5 | 
6 |
7 | ## 3.1 RDD and DataFrame conversion
8 |
9 | ### 3.1.1 RDD to DataFrame
10 |
11 | ```scala
12 | case class MyList(id:Int)
13 |
14 | val lis = List(1,2,3,4)
15 |
16 | val listRdd = sc.makeRDD(lis)
17 | import spark.implicits._
18 | val df = listRdd.map(value => MyList(value)).toDF()
19 |
20 | df.show()
21 | ```
22 |
23 | ### 3.1.2 DataFrame to RDD
24 |
25 | ```scala
26 | case class MyList(id:Int)
27 |
28 | val lis = List(1,2,3,4)
29 | val listRdd = sc.makeRDD(lis)
30 | import spark.implicits._
31 | val df = listRdd.map(value => MyList(value)).toDF()
32 | println("------------------")
33 |
34 | val dfToRdd = df.rdd
35 |
36 | dfToRdd.collect().foreach(print(_))
37 | ```
38 |
39 | ## 3.2 DSL syntax style implementation
40 |
41 | ```scala
42 | val df = df1.union(df2)
43 | val dfSelect = df.select($"department")
44 | dfSelect.show()
45 | ```
46 |
47 | ## 3.3 SQL syntax style implementation (entry function sqlContext)
48 |
49 | ```scala
50 | val df = df1.union(df2)
51 |
52 | df.createOrReplaceTempView("dfTable")
53 | val innerSql = """
54 | SELECT department
55 | FROM dfTable
56 | """
57 | val sqlDF = sqlContext.sql(innerSql)
58 | sqlDF.show()
59 | ```
60 |
61 |
--------------------------------------------------------------------------------
/en_US/Using_Document/Workspace_User_Manual.md:
--------------------------------------------------------------------------------
1 | # 1.Introduction to Workspace Role Permissions
2 | In a project-oriented workspace, members of any department can be added; in a department-oriented workspace, only members of this department can be added.
3 | **management desk:**
4 | - The default role of a workspace creator is owner, who can delegate the management of the workspace to one or more administrators.
5 | - Only the administrator of the workspace can enter the [Administrator] module to manage the basic information and permission information of the workspace accordingly.
6 | - The default roles of the workspace include owner, administrator, development user, operation and maintenance user, analysis user, operation user, data service user, and guest.
7 | - Owner: has the highest authority, has the operation authority of all functions in the workspace, and can delete the workspace;
8 | - Administrator: has the operation authority of all functions in the workspace;
9 | - Development users: have permissions for data access, data quality, application development, test environment task release, production environment task submission, etc.;
10 | - Operation and maintenance users: have permissions for data quality, resource allocation, test environment task release, production task approval and release, etc.;
11 | - Analysis users: have data access, data analysis and other permissions;
12 | - Operational users: have data analysis and other permissions;
13 | - Data service users: have data access, data service and other rights;
14 | - Guest: has read-only access to all modules in the workspace, cannot edit or execute;
15 |
16 | # 2.Workspace page introduction
17 | The workspace page contains four parts: **Project List**, **Application Development Process**, **Menu**, and **FAQ**.
18 | - Project list: After users enter dss, they can choose different workspaces to create their own projects. Selecting different workspaces will display the created project list.
19 | 
20 | - Application development process: It includes **requirements** (unavailable), **design** (unavailable), **development**, **debugging**, **production**, click on the corresponding function button to jump to the corresponding function or view the corresponding example
21 | 
22 | - Menu: Click UDF management, you can enter the linkis management console for resource management, you can add functions or add UDFs
23 | 
24 |
25 | # 3.Create project
26 | In the project space list, click Create Project, you can configure and add new projects according to your own needs
27 | 
28 | The basic information of the project can be modified by clicking the management button of the project, only the administrator can delete the project
29 | 
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice.png
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice/105c29559c6ff03db92efc7cc0b7d15d.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice/105c29559c6ff03db92efc7cc0b7d15d.png
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice/150207df2ea89d2b0c2f3ccb8d46577b.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice/150207df2ea89d2b0c2f3ccb8d46577b.png
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice/50bd31ef9795efe92aa333f7ac8518c6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice/50bd31ef9795efe92aa333f7ac8518c6.png
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice/b7f2ba78f56e660c9345895205cd47ed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice/b7f2ba78f56e660c9345895205cd47ed.png
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice/c44c3ee7da22fdd19eb62379271a8410.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice/c44c3ee7da22fdd19eb62379271a8410.png
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice/c65f98286cd82f3d9100f602a31f4302.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice/c65f98286cd82f3d9100f602a31f4302.png
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice/d61d36f82ef7fc7111ea37574dd0db22.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice/d61d36f82ef7fc7111ea37574dd0db22.png
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice/dfbf44e1fe710b76883fe0eb24346707.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice/dfbf44e1fe710b76883fe0eb24346707.png
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice/ff7b18b4eec06f5dfd2da1e3693e2e59.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice/ff7b18b4eec06f5dfd2da1e3693e2e59.png
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice/postman01.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice/postman01.png
--------------------------------------------------------------------------------
/zh_CN/Images/apiservice/postman02.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/apiservice/postman02.png
--------------------------------------------------------------------------------
/zh_CN/Images/applicationshop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/applicationshop.png
--------------------------------------------------------------------------------
/zh_CN/Images/loginpage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/loginpage.png
--------------------------------------------------------------------------------
/zh_CN/Images/projectpage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/projectpage.png
--------------------------------------------------------------------------------
/zh_CN/Images/runworkflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/runworkflow.png
--------------------------------------------------------------------------------
/zh_CN/Images/scriptis.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/scriptis.png
--------------------------------------------------------------------------------
/zh_CN/Images/workflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/workflow.png
--------------------------------------------------------------------------------
/zh_CN/Images/workspacepage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/workspacepage.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/Scriptis/hive-6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/Scriptis/hive-6.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/Scriptis/hive1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/Scriptis/hive1.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/Scriptis/hive2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/Scriptis/hive2.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/Scriptis/hive3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/Scriptis/hive3.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/Scriptis/hive4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/Scriptis/hive4.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/Scriptis/hive5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/Scriptis/hive5.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/Scriptis/hive7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/Scriptis/hive7.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/Scriptis/home.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/Scriptis/home.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/Scriptis/udf-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/Scriptis/udf-3.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/Scriptis/udf1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/Scriptis/udf1.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/Scriptis/udf2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/Scriptis/udf2.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/workspace/ws_img1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/workspace/ws_img1.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/workspace/ws_img2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/workspace/ws_img2.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/workspace/ws_img3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/workspace/ws_img3.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/workspace/ws_img5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/workspace/ws_img5.png
--------------------------------------------------------------------------------
/zh_CN/Images/使用文档/workspace/ws_img6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/使用文档/workspace/ws_img6.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DSS&Linkis一键部署文档单机版/eureka.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DSS&Linkis一键部署文档单机版/eureka.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DSSUserGuide部署/userguide_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DSSUserGuide部署/userguide_1.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DSS调试/img.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DSS调试/img.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DSS调试/img_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DSS调试/img_1.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DSS调试/img_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DSS调试/img_2.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DSS调试/img_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DSS调试/img_3.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DSS调试/img_4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DSS调试/img_4.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_1.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_10.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_10.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_11.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_11.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_12.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_12.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_13.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_13.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_14.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_14.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_2.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_3.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_4.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_5.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_6.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_7.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_8.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_8.png
--------------------------------------------------------------------------------
/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/安装部署/DolphinschedulerAppConn部署/img_9.png
--------------------------------------------------------------------------------
/zh_CN/Images/开发文档/第三方系统如何接入DSS/AppConn架构图.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/开发文档/第三方系统如何接入DSS/AppConn架构图.png
--------------------------------------------------------------------------------
/zh_CN/Images/开发文档/第三方系统如何接入DSS/AppConn调用举例.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/开发文档/第三方系统如何接入DSS/AppConn调用举例.png
--------------------------------------------------------------------------------
/zh_CN/Images/开发文档/第三方系统如何接入DSS/DSS框架设计.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/开发文档/第三方系统如何接入DSS/DSS框架设计.png
--------------------------------------------------------------------------------
/zh_CN/Images/开发文档/第三方系统如何接入DSS/SSO免密跳转.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/开发文档/第三方系统如何接入DSS/SSO免密跳转.png
--------------------------------------------------------------------------------
/zh_CN/Images/开发文档/第三方系统如何接入DSS/Schedulis实现DSS一级规范.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/开发文档/第三方系统如何接入DSS/Schedulis实现DSS一级规范.png
--------------------------------------------------------------------------------
/zh_CN/Images/开发文档/第三方系统如何接入DSS/开发流程规范.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/开发文档/第三方系统如何接入DSS/开发流程规范.png
--------------------------------------------------------------------------------
/zh_CN/Images/开发文档/第三方系统如何接入DSS/组织结构规范.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/Images/开发文档/第三方系统如何接入DSS/组织结构规范.png
--------------------------------------------------------------------------------
/zh_CN/README.md:
--------------------------------------------------------------------------------
1 | ## 引言
2 |
3 | DataSphere Studio(简称 DSS)是微众银行自研的数据应用开发管理集成框架。
4 |
5 | 在统一的UI下,DataSphere Studio 以工作流式的图形化拖拽开发体验,将满足从数据交换、脱敏清洗、分析挖掘、质量检测、可视化展现、定时调度到数据输出应用等,数据应用开发全流程场景需求。
6 |
7 | ## 文档列表
8 |
9 | * [安装部署文档](安装部署)
10 | * [DSS 单机部署文档](安装部署/DSS&Linkis一键部署文档单机版.md)
11 | * [Exchangis AppConn 插件安装文档](https://github.com/WeBankFinTech/Exchangis/blob/master/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md)
12 | * [Qualitis AppConn 插件安装文档](https://github.com/WeBankFinTech/Qualitis/blob/master/docs/zh_CN/ch1/%E6%8E%A5%E5%85%A5%E5%B7%A5%E4%BD%9C%E6%B5%81%E6%8C%87%E5%8D%97.md)
13 | * [Schedulis AppConn 插件安装文档](安装部署/SchedulisAppConn插件安装文档.md)
14 | * [Visualis AppConn 插件安装文档](https://github.com/WeBankFinTech/Visualis/blob/master/visualis_docs/zh_CN/Visualis_appconn_install_cn.md)
15 | * [Streamis AppConn 插件安装文档](https://github.com/WeBankFinTech/Streamis/blob/main/docs/zh_CN/0.2.0/development/StreamisAppConn%E5%AE%89%E8%A3%85%E6%96%87%E6%A1%A3.md)
16 | * [Prophecis AppConn 插件安装文档](https://github.com/WeBankFinTech/Prophecis/blob/master/docs/zh_CN/Deployment_Documents/Prophecis%20Appconn%E5%AE%89%E8%A3%85%E6%96%87%E6%A1%A3.md)
17 | * [DolphinScheduler AppConn 插件安装文档](安装部署/DolphinScheduler插件安装文档.md)
18 |
19 |
20 | * [使用文档](使用文档)
21 | * [DSS 用户手册](使用文档/DSS用户手册.md)
22 | * [DataApiService 使用文档](使用文档/DataApiService使用文档.md)
23 | * [DSS 接口汇总](使用文档/DSS接口汇总.md)
24 | * [DSS 用户登录认证体系](使用文档/用户登录认证体系.md)
25 | * [DSS 工作空间使用手册](使用文档/工作空间使用手册.md)
26 | * Scriptis
27 | * [如何使用 SparkSQL?](使用文档/Scriptis/Scriptis_User_Tests3_SparkSQL.md)
28 | * [如何使用 Hive QL?](使用文档/Scriptis/Scriptis_User_Tests2_Hive.md)
29 | * [如何使用 Spark Scala?](使用文档/Scriptis/Scriptis_User_Tests1_Scala.md)
30 | * [DSS 用户手册汇总版](用户手册/用户使用文档.md)
31 | * [DSS 如何新增用户](用户手册/DSS新增用户方式.md)
32 | * [超级管理员使用介绍](用户手册/超级管理员功能.md)
33 | * [Scriptis 数据开发探索使用介绍](用户手册/Scriptis使用文档.md)
34 | * [DSS 工作流使用介绍](用户手册/工作流使用文档.md)
35 | * [数据服务使用介绍](用户手册/数据服务使用手册.md)
36 | * [Linkis 管理台使用介绍](https://linkis.apache.org/zh-CN/docs/1.1.1/user-guide/console-manual/)
37 | * [调度中心使用介绍](用户手册/调度中心使用文档.md)
38 |
39 |
40 | * [开发文档](开发文档)
41 | * [DSS 后端编译文档](开发文档/DSS编译文档.md)
42 | * [DSS 前端编译文档](开发文档/前端编译文档.md)
43 | * [第三方系统接入 DSS 开发指南](开发文档/第三方系统接入DSS开发指南.md)
44 | * [AppConn 开发指南](开发文档/AppConn开发指南.md)
45 | * [DSS 新增工作流节点](开发文档/DSS工作流如何新增工作流节点.md)
46 | * [Scripits 新增脚本类型](开发文档/Scriptis如何新增脚本类型.md)
47 |
48 |
49 | * [设计文档](设计文档)
50 | * [AppConn 设计文档](设计文档/appconn/appconn.md)
51 | * [DSS Workspace 设计文档](设计文档/Workspace/README.md)
52 | * [DSS Project 设计文档](设计文档/project/DSS工程模块设计文档.md)
53 | * [DSS Orchestrator 设计文档](设计文档/Orchestrator/README.md)
54 | * [DSS 工作流微模块设计文档](设计文档/workflow/DSS工作流架构设计.md)
55 | * [DSS 工作流执行设计文档](设计文档/FlowExecution/README.md)
56 | * [DSS 工作流发布到调度系统设计文档](设计文档/publish/工作流发布设计文档.md)
57 |
58 | 更多文档持续更新中,敬请期待……
--------------------------------------------------------------------------------
/zh_CN/SUMMARY.md:
--------------------------------------------------------------------------------
1 | * [安装部署](安装部署)
2 | * [DSS & Linkis 一键部署文档单机版.md](安装部署/DSS&Linkis一键部署文档单机版.md)
3 | * [DolphinScheduler 插件安装文档](安装部署/DolphinScheduler插件安装文档.md)
4 | * [SchedulisAppConn 插件安装文档](安装部署/SchedulisAppConn插件安装文档.md)
5 | * [DSS1.0.1 到 1.1.0 升级文档](安装部署/DSS1.0.1到1.1.0升级文档.md)
6 |
7 |
8 | * [使用文档](使用文档)
9 | * [DSS 快速使用文档](使用文档/DSS用户手册.md)
10 | * [DSS 用户手册汇总版](用户手册/用户使用文档.md)
11 | * [DSS 如何新增用户](用户手册/DSS新增用户方式.md)
12 | * [超级管理员使用介绍](用户手册/超级管理员功能.md)
13 | * [Scriptis 数据开发探索使用介绍](用户手册/Scriptis使用文档.md)
14 | * [DSS 工作流使用介绍](用户手册/工作流使用文档.md)
15 | * [数据服务使用介绍](用户手册/数据服务使用手册.md)
16 | * [Linkis 管理台使用介绍](https://linkis.apache.org/zh-CN/docs/1.1.1/user-guide/console-manual/)
17 | * [调度中心使用介绍](用户手册/调度中心使用文档.md)
18 | * [DSS 接口汇总](使用文档/DSS接口汇总.md)
19 | * [DSS 用户登录认证体系](使用文档/用户登录认证体系.md)
20 | * [工作空间管理使用介绍](使用文档/工作空间使用手册.md)
21 | * DSS 插件使用文档
22 | * [如何使用 ExchangisAppConn ?](https://github.com/WeBankFinTech/Exchangis/blob/master/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md)
23 | * [如何使用 StreamisAppConn ?](https://github.com/WeBankFinTech/Streamis/blob/main/docs/zh_CN/0.2.0/development/StreamisAppConn%E5%AE%89%E8%A3%85%E6%96%87%E6%A1%A3.md)
24 | * [如何使用 QualitisAppConn ?](https://github.com/WeBankFinTech/Qualitis/blob/master/docs/zh_CN/ch1/%E6%8E%A5%E5%85%A5%E5%B7%A5%E4%BD%9C%E6%B5%81%E6%8C%87%E5%8D%97.md)
25 | * [如何使用 VisualisAppConn ?](https://github.com/WeBankFinTech/Visualis/blob/master/visualis_docs/zh_CN/Visualis_appconn_install_cn.md)
26 | * [如何使用 ProphecisAppConn ?](https://github.com/WeBankFinTech/Prophecis/blob/master/docs/zh_CN/Deployment_Documents/Prophecis%20Appconn%E5%AE%89%E8%A3%85%E6%96%87%E6%A1%A3.md)
27 | * Scriptis
28 | * [如何使用 SparkSQL?](使用文档/Scriptis/Scriptis_User_Tests3_SparkSQL.md)
29 | * [如何使用 Hive QL?](使用文档/Scriptis/Scriptis_User_Tests2_Hive.md)
30 | * [如何使用 Spark Scala?](使用文档/Scriptis/Scriptis_User_Tests1_Scala.md)
31 |
32 |
33 | * [开发文档](开发文档)
34 | * [DSS 后台编译文档](开发文档/DSS编译文档.md)
35 | * [DSS 前端编译文档](开发文档/前端编译文档.md)
36 | * [第三方系统接入 DSS 开发指南](开发文档/第三方系统接入DSS开发指南.md)
37 |
38 |
39 | * [设计文档](设计文档)
40 | * [AppConn 设计文档](设计文档/appconn/appconn.md)
41 | * [DSS Workspace 设计文档](设计文档/Workspace/README.md)
42 | * [DSS Project 设计文档](设计文档/project/DSS工程模块设计文档.md)
43 | * [DSS Orchestrator 设计文档](设计文档/Orchestrator/README.md)
44 | * [DSS 工作流微模块设计文档](设计文档/workflow/DSS工作流架构设计.md)
45 | * [DSS 工作流执行设计文档](设计文档/FlowExecution/README.md)
46 | * [DSS 工作流发布到调度系统设计文档](设计文档/publish/工作流发布设计文档.md)
--------------------------------------------------------------------------------
/zh_CN/使用文档/DataApiService使用文档.md:
--------------------------------------------------------------------------------
1 | 数据服务
2 | ----------
3 |
4 | DSS目前支持将SQL脚本发布成数据服务API,分享给其他用户使用。业务用户可以在不用写代码和无大数据平台用户的情况下,设置参数,就可以执行数据服务的脚本,并直接浏览或者下载平台数据。
5 |
6 | 
7 |
8 | **1、创建数据服务**
9 |
10 | 由于业务的需要,需要建立一个数据服务,授权给他人使用,当发布用户进入Scriptis,编辑新建脚本文件,编写SQL语句,同时可以在SQL语句中嵌入变量,方便后续的业务人员自己设置参数后就可以获取数据。
11 |
12 | 
13 |
14 |
15 | 在保存该Spark SQL脚本后,可在脚本编辑栏上方点击“发布为数据API”(该“发布为数据API”的功能,只有部分用户具有该权限,不具备权限的用户,该功能按钮不可见),在点击后需要填如以下API创建信息(其中带红色
16 | \* 标记为必填项),新建API信息填写如下图所示。
17 |
18 | 
19 |
20 | 点击下一步,用于设置变量的信息。
21 | 
22 |
23 |
24 | 发布用户可以在工作空间的主页,通过“进入数据服务开发”,和“更多应用工具”进入使用数据服务,在数据服务的标签点击“更多”,可以进入数据服务管理界面
25 |
26 | 
27 |
28 |
29 |
30 | **2、使用数据服务**
31 |
32 | 在进入数据服务页面后,可以看到该用户可使用的数据服务列表页面,其中default表示默认所有的数据服务,用户可以点击对应的标签筛选出自己需要使用的数据服务,同时可以在搜索框中,分别用名称、状态、提交人进行筛选,详情页面如下图所示。
33 |
34 | 
35 |
36 | 使用用户点击“进入使用”,可以在筛选条件中,设置参数的值,由此可见,使用用户是发布用户数据集的一个子集。
37 |
38 | 
39 |
40 | **3、修改数据服务**
41 |
42 | 一个数据服务由于业务的需要可能发生修改,当发布用户对数据服务的脚本进行了修改,可以点击“更新数据API”。
43 | 更新数据服务,可以选择绑定的其他数据服务。
44 |
45 | 
46 |
47 | **4、使用postman访问数据服务**
48 |
49 | 数据服务发布后,支持使用api接口访问,可以给其它系统直接调用。提交查询如下图所示:
50 |
51 | 
52 |
53 | 获取到任务执行ID, 再根据ID可以获取任务的执行进度,日志,结果集等。
54 |
55 | 
56 |
57 | 说明:数据服务的token可以从/queryById?的接口返回(点击进入使用),字段为userToken. 所有接口的访问都是要经过GateWay认证的。数据服务的token只能用于数据服务的管理流程。使用postman的认证需要使用页面的cookie后者走linkis-gateway的秘钥认证方式。在head 里面加入Token-Code: XXX 这里指定linkis-gateway的登录秘钥 Token-User: XXX 这里指定linkis-gateway的登录用户。
58 |
59 |
60 |
--------------------------------------------------------------------------------
/zh_CN/使用文档/Scriptis/Scriptis_User_Tests1_Scala.md:
--------------------------------------------------------------------------------
1 | # DSS用户测试样例1:Scala
2 |
3 | DSS用户测试样例的目的是为平台新用户提供一组测试样例,用于熟悉DSS的常见操作,并验证DSS平台的正确性
4 |
5 | 
6 |
7 | ## 1.1 Spark Core(入口函数sc)
8 |
9 | 在Scriptis中,已经默认为您注册了SparkContext,所以直接使用sc即可:
10 |
11 | ### 1.1.1 单Value算子(Map算子为例)
12 |
13 | ```scala
14 | val rddMap = sc.makeRDD(Array((1,"a"),(1,"d"),(2,"b"),(3,"c")),4)
15 | val res = rddMap.mapValues(data=>{data+"||||"})
16 | res.collect().foreach(data=>println(data._1+","+data._2))
17 | ```
18 |
19 | ### 1.1.2 双Value算子(union算子为例)
20 |
21 | ```scala
22 | val rdd1 = sc.makeRDD(1 to 5)
23 | val rdd2 = sc.makeRDD(6 to 10)
24 | val rddCustom = rdd1.union(rdd2)
25 | rddCustom.collect().foreach(println)
26 | ```
27 |
28 | ### 1.1.3 K-V算子(reduceByKey算子为例子)
29 |
30 | ```scala
31 | val rdd1 = sc.makeRDD(List(("female",1),("male",2),("female",3),("male",4)))
32 | val rdd2 = rdd1.reduceByKey((x,y)=>x+y)
33 | rdd2.collect().foreach(println)
34 | ```
35 |
36 | ### 1.1.4 执行算子(以上collect算子为例)
37 |
38 | ### 1.1.5 从hdfs上读取文件并做简单执行
39 |
40 | ```scala
41 | case class Person(name:String,age:String)
42 | val file = sc.textFile("/test.txt")
43 | val person = file.map(line=>{
44 | val values=line.split(",")
45 |
46 | Person(values(0),values(1))
47 | })
48 | val df = person.toDF()
49 | df.select($"name").show()
50 | ```
51 |
52 |
53 |
54 | ## 1.2 UDF函数测试
55 |
56 | ### 1.2.1 函数定义
57 |
58 |
59 |
60 | ```scala
61 | def ScalaUDF3(str: String): String = "hello, " + str + "this is a third attempt"
62 | ```
63 |
64 | ### 1.2.2 注册函数
65 |
66 | 函数-》个人函数-》右击新增spark函数=》注册方式同常规spark开发
67 |
68 | 
69 |
70 | ## 1.3 UDAF函数测试
71 |
72 | ### 1.3.1 Jar包上传
73 |
74 | idea上开发一个求平均值的 udaf 函数,打成jar(wordcount)包,上传dss jar文件夹。
75 |
76 | 
77 |
78 | ### 1.3.2 注册函数
79 |
80 | 函数-》个人函数-》右击新增普通函数=》注册方式同常规 spark 开发
81 |
82 | 
--------------------------------------------------------------------------------
/zh_CN/使用文档/Scriptis/Scriptis_User_Tests2_Hive.md:
--------------------------------------------------------------------------------
1 | # DSS用户测试样例2:Hive
2 |
3 | DSS用户测试样例的目的是为平台新用户提供一组测试样例,用于熟悉DSS的常见操作,并验证DSS平台的正确性
4 |
5 | 
6 |
7 | ## 2.1 数仓建表
8 |
9 | 进入“数据库”页面,点击“+”,依次输入表信息、表结构和分区信息即可创建数据库表:
10 |
11 | 
12 |
13 | 
14 |
15 | 通过以上流程,分别创建部门表dept、员工表emp和分区员工表emp_partition,建表语句如下:
16 |
17 | ```iso92-sql
18 | create external table if not exists default.dept(
19 | deptno int,
20 | dname string,
21 | loc int
22 | )
23 | row format delimited fields terminated by '\t';
24 |
25 | create external table if not exists default.emp(
26 | empno int,
27 | ename string,
28 | job string,
29 | mgr int,
30 | hiredate string,
31 | sal double,
32 | comm double,
33 | deptno int
34 | )
35 | row format delimited fields terminated by '\t';
36 |
37 | create table if not exists emp_partition(
38 | empno int,
39 | ename string,
40 | job string,
41 | mgr int,
42 | hiredate string,
43 | sal double,
44 | comm double,
45 | deptno int
46 | )
47 | partitioned by (month string)
48 | row format delimited fields terminated by '\t';
49 | ```
50 |
51 | **导入数据**
52 |
53 | 目前需要通过后台手动批量导入数据,可以通过insert方法从页面插入数据
54 |
55 | ```sql
56 | load data local inpath 'dept.txt' into table default.dept;
57 | load data local inpath 'emp.txt' into table default.emp;
58 | load data local inpath 'emp1.txt' into table default.emp_partition;
59 | load data local inpath 'emp2.txt' into table default.emp_partition;
60 | load data local inpath 'emp2.txt' into table default.emp_partition;
61 | ```
62 |
63 | 其它数据按照上述语句导入,样例数据文件路径在:`examples\ch3`
64 |
65 | ## 2.2 基本SQL语法测试
66 |
67 | ### 2.2.1 简单查询
68 |
69 | ```sql
70 | select * from dept;
71 | ```
72 |
73 | ### 2.2.2 Join连接
74 |
75 | ```sql
76 | select * from emp
77 | left join dept
78 | on emp.deptno = dept.deptno;
79 | ```
80 |
81 | ### 2.2.3 聚合函数
82 |
83 | ```sql
84 | select dept.dname, avg(sal) as avg_salary
85 | from emp left join dept
86 | on emp.deptno = dept.deptno
87 | group by dept.dname;
88 | ```
89 |
90 | ### 2.2.4 内置函数
91 |
92 | ```sql
93 | select ename, job,sal,
94 | rank() over(partition by job order by sal desc) sal_rank
95 | from emp;
96 | ```
97 |
98 | ### 2.2.5 分区表简单查询
99 |
100 | ```sql
101 | show partitions emp_partition;
102 | select * from emp_partition where month='202001';
103 | ```
104 |
105 | ### 2.2.6 分区表联合查询
106 |
107 | ```sql
108 | select * from emp_partition where month='202001'
109 | union
110 | select * from emp_partition where month='202002'
111 | union
112 | select * from emp_partition where month='202003'
113 | ```
114 |
115 | ## 2.3 UDF 函数测试
116 |
117 | ### 2.3.1 Jar 包上传
118 |
119 | 进入 Scriptis 页面后,右键目录路径上传jar包:
120 |
121 | 
122 |
123 | 测试样例jar包在 `examples\ch3\rename.jar`
124 |
125 | ### 4.3.2 自定义函数
126 |
127 | 进入“UDF函数”选项(如1),右击“个人函数”目录,选择“新增函数”:
128 |
129 | 
130 |
131 | 输入函数名称、选择jar包、并填写注册格式、输入输出格式即可创建函数:
132 |
133 | 
134 |
135 | 
136 |
137 | 获得的函数如下:
138 |
139 | 
140 |
141 | ### 4.3.3 利用自定义函数进行SQL查询
142 |
143 | 完成函数注册后,可进入工作空间页面创建.hql文件使用函数:
144 |
145 | ```sql
146 | select deptno,ename, rename(ename) as new_name
147 | from emp;
148 | ```
149 |
--------------------------------------------------------------------------------
/zh_CN/使用文档/Scriptis/Scriptis_User_Tests3_SparkSQL.md:
--------------------------------------------------------------------------------
1 | # DSS用户测试样例3:SparkSQL
2 |
3 | DSS用户测试样例的目的是为平台新用户提供一组测试样例,用于熟悉DSS的常见操作,并验证DSS平台的正确性
4 |
5 | 
6 |
7 | ## 3.1RDD与DataFrame转换
8 |
9 | ### 3.1.1 RDD转为DataFrame
10 |
11 | ```scala
12 | case class MyList(id:Int)
13 |
14 | val lis = List(1,2,3,4)
15 |
16 | val listRdd = sc.makeRDD(lis)
17 | import spark.implicits._
18 | val df = listRdd.map(value => MyList(value)).toDF()
19 |
20 | df.show()
21 | ```
22 |
23 | ### 3.1.2 DataFrame转为RDD
24 |
25 | ```scala
26 | case class MyList(id:Int)
27 |
28 | val lis = List(1,2,3,4)
29 | val listRdd = sc.makeRDD(lis)
30 | import spark.implicits._
31 | val df = listRdd.map(value => MyList(value)).toDF()
32 | println("------------------")
33 |
34 | val dfToRdd = df.rdd
35 |
36 | dfToRdd.collect().foreach(print(_))
37 | ```
38 |
39 | ## 3.2 DSL语法风格实现
40 |
41 | ```scala
42 | val df = df1.union(df2)
43 | val dfSelect = df.select($"department")
44 | dfSelect.show()
45 | ```
46 |
47 | ## 3.3 SQL语法风格实现(入口函数sqlContext)
48 |
49 | ```scala
50 | val df = df1.union(df2)
51 |
52 | df.createOrReplaceTempView("dfTable")
53 | val innerSql = """
54 | SELECT department
55 | FROM dfTable
56 | """
57 | val sqlDF = sqlContext.sql(innerSql)
58 | sqlDF.show()
59 | ```
60 |
61 |
--------------------------------------------------------------------------------
/zh_CN/使用文档/工作空间使用手册.md:
--------------------------------------------------------------------------------
1 | # 1.工作空间角色权限介绍
2 | 在项目导向的工作空间中,可以添加任何部门的成员;在部门导向的工作空间中,只可以添加本部门的成员。
3 | **管理台:**
4 | - 工作空间创建者的默认角色是拥有者,可以委托一名或多名管理员对工作空间进行管理。
5 | - 只有工作空间的管理员可以进入【管理台】模块,对该工作空间的基本信息和权限信息进行相应管理。
6 | - 工作空间的默认角色包括拥有者、管理员、开发用户、运维用户、分析用户、运营用户、数据服务用户、访客。
7 | - 拥有者:拥有最高权限,拥有该工作空间内所有功能的操作权限,可以删除该工作空间;
8 | - 管理员:拥有该工作空间内所有功能的操作权限;
9 | - 开发用户:拥有数据接入、数据质量、应用开发、测试环境任务发布、生产环境任务提交等权限;
10 | - 运维用户:拥有数据质量、资源配置、测试环境任务发布、生产任务审批发布等权限;
11 | - 分析用户:拥有数据接入、数据分析等权限;
12 | - 运营用户:拥有数据分析等权限;
13 | - 数据服务用户:拥有数据接入、数据服务等权限;
14 | - 访客:拥有工作空间内所有模块的只读权限,不可编辑或执行;
15 |
16 | # 2.工作空间页面介绍
17 | 工作空间页面包含:**项目列表**、**应用开发流程**、**菜单**、**常见问题**四部分。
18 | - 项目列表:在用户进入dss后,可以选择不同的工作空间来创建自己的项目,选择不同的工作空间会展示所创建的项目列表。
19 | 
20 | - 应用开发流程:其中包含了**需求**(不可用)、**设计**(不可用)、**开发**、**调试**、**生产**,点击对应功能下的按钮可跳转到对应的功能或查看对应的示例
21 | 
22 | - 菜单:点击UDF管理,可进入linkis管理台进行资源管理,可以新增函数或新增UDF
23 | 
24 |
25 | # 3.创建项目
26 | 在项目空间列表中,点击创建项目,可以更具自己的需要配置并新增项目
27 | 
28 | 通过点击项目的管理按钮可修改项目的基本信息,只有管理员才能够删除项目
29 | 
--------------------------------------------------------------------------------
/zh_CN/使用文档/用户登录认证体系.md:
--------------------------------------------------------------------------------
1 | # DataSphere Studio 用户登录认证体系
2 |
3 | > DSS 默认只提供了管理员账号,用户登录鉴权依托于 Linkis 的 LDAP 用户登录认证体系。
4 | > 本文将详细介绍 Linkis 目前已经支持的用户登录认证方式。
5 |
6 | ## 一、基本介绍
7 |
8 | 1. DSS 超级管理员的用户名为部署用户名,如部署用户为 hadoop,则管理员的用户名和密码为 hadoop/hadoop,具体用户可在 [DSS单机部署文档](../安装部署/DSS&Linkis一键部署文档单机版.md) 查看。
9 |
10 | 2. DSS的用户登录鉴权,依托于 Linkis 的用户登录认证体系。除管理员以外,Linkis 还支持以下用户登录认证方式:
11 |
12 | - Token 登录方式
13 | - 代理用户模式
14 | - 接入SSO单点登录
15 |
16 | ## 二、Token登录方式
17 |
18 | 该方式是给第三方系统访问 Linkis 和 DSS 使用的。
19 |
20 | 第三方系统调用 Linkis 和 DSS 后台接口时,只需通过 token 模式即可直接跳过登录。
21 |
22 | 在 `linkis/linkis-gateway/conf/linkis.properties` 指定如下参数:
23 |
24 | ```properties
25 | # 打开token模式
26 | wds.linkis.gateway.conf.enable.token.auth=true
27 | # 指定token配置文件
28 | wds.linkis.gateway.conf.token.auth.config=token.properties
29 | ```
30 |
31 | 在 `linkis/linkis-gateway/conf` 目录下,创建 `token.properties` 文件,内容如下:
32 |
33 | ```properties
34 | # 格式如下:
35 | ${TOKEN_NAME}=${USER1},${USER2}
36 | # 例如:
37 | AZKABAN=*
38 | ```
39 |
40 | TOKEN_NAME 指分配给第三方系统的 tokenId,后面的 value 为可跳过登录的用户,如果完全信任该系统的所有请求,可直接等于*,表示全部授权。
41 |
42 | 第三方系统在请求 DSS 和 Linkis,在 request 的 header 或 cookie 中,写入如下两个参数即可:
43 |
44 | ```json
45 | {
46 | "Token-Code": "${TOKEN_NAME}",
47 | "Token-User": "${USER}"
48 | }
49 | ```
50 |
51 | ## 三、代理用户模式
52 |
53 | 该方式允许登录用户和实际使用 DSS 的用户不同,主要作用:控制用户登录时必须为实名用户,但是实际使用大数据平台时,是非实名用户。
54 |
55 | 在 `linkis/linkis-gateway/conf/linkis.properties` 指定如下参数:
56 |
57 | ```properties
58 | # 打开代理模式
59 | wds.linkis.gateway.conf.enable.proxy.user=true
60 | # 指定代理配置文件
61 | wds.linkis.gateway.conf.proxy.user.config=proxy.properties
62 | ```
63 |
64 | 在 `linkis/linkis-gateway/conf` 目录下,创建 `proxy.properties` 文件,内容如下:
65 |
66 | ```properties
67 | # 格式如下:
68 | ${LOGIN_USER}=${PROXY_USER}
69 | # 例如:
70 | enjoyyin=hadoop
71 | ```
72 |
73 | 如果现有的代理模式不能满足您的需求,您也可以手动修改:`com.webank.wedatasphere.linkis.gateway.security.ProxyUserUtils`。
74 |
75 | ## 四、接入SSO单点登录
76 |
77 | 接入您公司的 SSO 单点登录体系要复杂一些。
78 |
79 | 首先,您需要打开 SSO 单点验证功能,请在 `linkis/linkis-gateway/conf/linkis.properties` 指定如下参数:
80 |
81 | ```properties
82 | wds.linkis.gateway.conf.enable.sso=true
83 | ```
84 |
85 | 然后,您需要实现SSOInterceptor接口:
86 |
87 | ```scala
88 | trait SSOInterceptor {
89 |
90 | /**
91 | * 如果打开SSO单点登录功能,当前端跳转SSO登录页面登录成功后,会重新跳回到DSS首页,这时DSS前端再次请求gateway,
92 | * gateway会通过调用该方法获取已SSO登录的用户,然后将用户写入cookie,保证后续请求可直接放行。
93 | * 您需实现该方法,通过Request返回用户名。
94 | * @param gatewayContext
95 | * @return
96 | */
97 | def getUser(gatewayContext: GatewayContext): String
98 |
99 | /**
100 | * 通过DSS首页Url,用户生成一个可重定向的SSO登录页面URL。
101 | * 要求:需带上requestUrl,以便SSO登录成功后能跳转回来
102 | * @param requestUrl DSS首页URL
103 | * @return 例如:https://${sso_host}:${sso_port}/cas/login?redirectUrl=${requestUrl}
104 | */
105 | def redirectTo(requestUrl: URI): String
106 |
107 | /**
108 | * 用户退出登录时,gateway会调用此接口,以保证gateway清除cookie后,SSO单点登录也会把登录信息清除掉
109 | * @param gatewayContext
110 | */
111 | def logout(gatewayContext: GatewayContext): Unit
112 |
113 | }
114 | ```
115 |
116 | 将您的 SSO 实现类,打成 jar 包,放入 `linkis/linkis-gateway/lib` 目录。
117 |
118 | Linkis 提供了两种加载您 SSO 实现类的方式:
119 |
120 | 将 SSO 实现类声明为 Spring bean,这种方式要求您只需在类名上面加上 `@Component` 注解即可。
121 |
122 | 在 `linkis/linkis-gateway/conf/linkis.properties` 指定如下参数:
123 |
124 | ```properties
125 | #请指定为您的SSO实现类
126 | wds.linkis.gateway.conf.sso.interceptor=com.webank.wedatasphere.linkis.gateway.security.sso.SSOInterceptor
127 | ```
128 |
129 | 重启 linkis-gateway,SSO 单点登录即可生效。
--------------------------------------------------------------------------------
/zh_CN/安装部署/DSS1.0.1到1.1.0升级文档.md:
--------------------------------------------------------------------------------
1 | # DataSphere Studio 1.0.1 升级到 1.1.0 使用文档
2 |
3 | ### 升级步骤主要分为:
4 | - 服务停止
5 | - 执行数据库升级脚本
6 | - dss部署目录替换为新版本包
7 | - 配置文件添加,修改
8 | - 服务启动
9 |
10 | #### 1. 服务停止
11 | 进入到dss的部署目录,在目录下执行命令停止dss的所有服务:
12 | ```shell
13 | cd ${DSS_DEPLOY_PATH}
14 |
15 | sh sbin/dss-stop-all.sh
16 | ```
17 | #### 2. 执行数据库升级sql脚本
18 |
19 | 升级sql脚本获取方式:
20 |
21 | - dss1.1.0的安装包解压后的目录:db/version_update/from_v101_to_v110.sql
22 | - 从github页面下载,地址为:(待上传补充)
23 |
24 | 然后登陆dss数据库执行source命令:
25 |
26 | ```shell
27 | source ${your_path}/from_v101_to_v110.sql
28 | ```
29 | 正常情况下可以执行成功。
30 |
31 | #### 3. dss部署目录替换为新版本包
32 |
33 | **(重要:最好先备份好dss旧版本的数据库)**
34 |
35 | - 通过mysqldump命令备份如下有结构变化的表:
36 |
37 | dss_appconn、dss_appconn_instance、dss_workflow_node、dss_onestop_user_favorites、dss_component_role、dss_onestop_menu_application
38 |
39 |
40 |
41 | 这些表只是修改了表名,也可以备份下:
42 |
43 | dss_dictionary
44 | dss_role
45 | dss_admin_dept
46 | dss_download_audit
47 | dss_flow_relation
48 | dss_flow_edit_lock
49 | dss_onestop_menu
50 | dss_menu_role
51 |
52 | - 备份dss旧版本的部署目录,以该目录为例:/appcom/Install/DSSInstall
53 | ```shell
54 | mv /appcom/Install/DSSInstall /appcom/Install/DSSInstall-bak
55 | ```
56 |
57 | 将dss1.1.0的安装包放到临时目录解压:
58 | ```shell
59 | mkdir /tmp/dss-pkg
60 | mv wedatasphere-dss-1.1.0-dist.tar.gz /tmp/dss-pkg/
61 | cd /tmp/dss-pkg
62 | tar zxvf wedatasphere-dss-1.1.0-dist.tar.gz
63 | ```
64 | 解压后目录结构如下:
65 | 
66 |
67 | 然后将dss-1.1.0目录下的文件全部拷贝到dss1.1.0的安装目录:
68 | ```shell
69 | cd dss-1.1.0
70 | cp -r lib dss-appconns sbin /appcom/Install/DSSInstall/
71 | ```
72 |
73 | 拷贝先前版本的配置文件:
74 | ```shell
75 | cp -r /appcom/Install/DSSInstall-bak/conf /appcom/Install/DSSInstall/
76 | ```
77 |
78 | #### 4. 添加、修改配置
79 |
80 | 新版本新增配置: dss-scriptis-server.properties、dss-guide-server.properties,
81 | 直接从dss1.1.0/conf目录拷贝:
82 |
83 | ```shell
84 | cp -r conf/dss-scriptis-server.properties /appcom/Install/DSSInstall/conf/
85 | cp -r conf/dss-guide-server.properties /appcom/Install/DSSInstall/conf/
86 | ```
87 |
88 | 配置修改:
89 |
90 | 1.在配置文件dss-framework-project-server.properties中加入:
91 | ```properties
92 | ###若appconn没有实现所有开发规范(节点更新、删除、拷贝、导入、导出操作),需要加入到该配置忽略检查
93 | wds.dss.appconn.checker.development.ignore.list=workflow,sendemail
94 | ###若appconn没有实现所有工程规范(增删改查),需要加入到该配置忽略检查
95 | wds.dss.appconn.checker.project.ignore.list=
96 | ```
97 |
98 | 并替换为新版本的restful、mybatis配置:
99 | ```properties
100 | ##restful
101 | wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.dss.framework.workspace.restful,com.webank.wedatasphere.dss.framework.project.restful,com.webank.wedatasphere.dss.framework.release.restful,com.webank.wedatasphere.dss.framework.appconn.restful,com.webank.wedatasphere.dss.framework.admin.restful
102 | ##mybatis
103 | wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/framework/workspace/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/application/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/framework/project/dao/impl/*Mapper.xml,classpath*:com/webank/wedatasphere/dss/framework/appconn/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/framework/release/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/framework/admin/xml/impl/*.xml
104 | wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.application.entity,com.webank.wedatasphere.dss.common.entity,com.webank.wedatasphere.dss.framework.workspace.bean,com.webank.wedatasphere.dss.framework.project.entity,com.webank.wedatasphere.dss.framework.appconn.entity,com.webank.wedatasphere.dss.framework.release.entity,com.webank.wedatasphere.dss.framework.admin.pojo.entity
105 | wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.framework.workspace.dao,com.webank.wedatasphere.dss.application.dao,com.webank.wedatasphere.dss.framework.project.dao,com.webank.wedatasphere.dss.framework.appconn.dao,com.webank.wedatasphere.dss.framework.release.dao,com.webank.wedatasphere.dss.framework.admin.xml
106 | ```
107 |
108 | #### 5. 服务启动
109 | OK,到现在可以启动dss新版本的服务了,在**dss部署目录下**执行命令启动所有服务:
110 |
111 | ```shell
112 | sh sbin/dss-start-all.sh
113 | ```
114 |
115 |
116 |
117 |
118 |
--------------------------------------------------------------------------------
/zh_CN/安装部署/DSS1.1.0到1.1.1升级文档.md:
--------------------------------------------------------------------------------
1 | # DataSphere Studio 1.1.0 升级到 1.1.1 使用文档
2 |
3 | ### 升级步骤主要分为:
4 | - 服务停止
5 | - 执行数据库升级脚本
6 | - 替换相关jar包
7 | - 替换前端包
8 | - 服务启动
9 |
10 | ### 1. 服务停止
11 | 进入到dss的部署目录,在目录下执行命令停止dss的所有服务:
12 | ```shell
13 | cd ${DSS_DEPLOY_PATH}
14 |
15 | sh sbin/dss-stop-all.sh
16 | ```
17 | ### 2. 执行数据库升级sql脚本
18 |
19 | 升级sql脚本获取方式:
20 |
21 | - dss1.1.1的安装包解压后的目录:db/version_update/from_v110_to_v111.sql
22 |
23 | 然后登陆dss数据库执行source命令:
24 |
25 | ```shell
26 | source ${your_path}/from_v110_to_v111.sql
27 | ```
28 | 正常情况下可以执行成功。
29 |
30 | ### 3. 替换相关jar包
31 |
32 | #### 3.1 替换DSS的jar包
33 | - 用户可以自行编译DSS1.1.1后端代码或参考文档 [DSS&Linkis一键部署文档单机版](DSS&Linkis一键部署文档单机版.md) 来获取DSS1.1.1的全量包
34 | - 使用DSS1.1.1的lib去全量替换掉原有1.1.0 xxx/dss/目录下的的lib
35 |
36 | #### 3.2 DolphinScheduler涉及到的jar包替换
37 | - `DolphinSchedulerAppConn` 插件安装包替换,可从此处下载:[点我下载插件安装包](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/DolphinScheduler/DSS1.1.1_dolphinscheduler/dolphinscheduler-appconn.zip) 解压该zip包后用将其中的lib和db全量替换原有的`${DSS_HOME}/dss-appconns/dss-dolphinscheduler/`目录下的lib和db
38 | - 下载 dss-dolphinscheduler-token-1.1.1.jar [点我下载](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/DolphinScheduler/DSS1.1.1_dolphinscheduler/dss-dolphinscheduler-token-1.1.1.jar) 将该jar包放到`${DSS_HOME}/lib/dss-framework/dss-framework-project-server/`目录下
39 | - 下载`dss-dolphinscheduler-client` 插件安装包,可从此处下载:[点我下载插件安装包](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/DolphinScheduler/DSS1.1.1_dolphinscheduler/dss-dolphinscheduler-client.zip) 解压该zip包后用将其中的lib全量替换原有的dss-dolphinscheduler-client安装目录下的lib
40 | - 下载 dolphinscheduler-prod-metrics-with-dependencies.jar [点我下载](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/DolphinScheduler/DSS1.1.1_dolphinscheduler/dolphinscheduler-prod-metrics-1.1.1-jar-with-dependencies.jar) 将该jar包放到dolphinscheduler安装目录的lib目录下,删除之前的 dolphinscheduler-prod-metrics-1.1.0.jar
41 |
42 | ### 4. 替换前端包
43 | - 用户可自行编译前端包或者从 [DSS&Linkis一键部署文档单机版](DSS&Linkis一键部署文档单机版.md) 来获取DSS1.1.1的全量包并将前端包拿出,替换掉`${DSS_HOME}/web/`下的dist即可
44 | ### 5. 服务启动
45 | - 重启DSS服务,在**dss部署目录下**执行命令启动所有服务:
46 |
47 | ```shell
48 | sh sbin/dss-start-all.sh
49 | ```
50 | - 重启dolphinschduler服务
51 | - 重启nginx
52 |
53 |
54 |
55 |
--------------------------------------------------------------------------------
/zh_CN/安装部署/DSSUserGuide部署文档.md:
--------------------------------------------------------------------------------
1 | # 帮助文档部署指南
2 |
3 | > 简介:帮助文档模块属于dss-user-guide模块下,用于提供dss项目相关资料。
4 |
5 | 在使用dss-user-guide模块时需先部署dss项目服务,可参考dss部署相关文档,dss-user-guide文档同步功能采用定时任务每两小时同步一次,文档更新到系统需要维护一份SUMMARY.md文件用于user-guide模块解析文件所在位置,及解析文档内容。
6 |
7 |
8 |
9 | ## 1. dss-guide-server.properties知识库相关配置说明
10 |
11 | ### 1.1 参考配置
12 |
13 | ````properties
14 | #gitbook
15 | #文档所存放的服务器ip地址
16 | target.ip.address=127.0.0.1
17 | #文档所在服务器路径
18 | host.gitbook.path=/appcom/Install/ApacheInstall/gitbook_books
19 | #需要同步到服务器
20 | target.gitbook.path=/appcom/Install/ApacheInstall
21 | #用于忽略解析km下到目录
22 | summary.ignore.model=km
23 | #知识库同步方式:gitbook database
24 | guide.sync.model=gitbook
25 | ````
26 |
27 | ### 1.2 DSS1.1.0配置
28 |
29 | ````properties
30 | #gitbook
31 | #文档所存放的服务器ip地址
32 | target.ip.address=127.0.0.1
33 | #文档所在服务器路径(目录配置到summary.md文件所在目录即可,例:/xxx/test1/SUMMARY.md)
34 | host.gitbook.path=/xxx/test1
35 | #Dss1.1.0不支持gitbook方式同步,故采取database
36 | guide.sync.model=gitbook
37 | ````
38 |
39 |
40 |
41 | ## 2. SUMMARY.md文件结构说明
42 |
43 | SUMMARY.md文件主要用于维护文件所在位置及文件解析之后文件的层级结构
44 |
45 | **例:**
46 |
47 | ````SUMMARY.md
48 | guide
49 | - [学习引导]()
50 | /workspaceManagement
51 | - [工作空间管理]()
52 | - [question]()
53 | - [什么用户可以进入工作空间管理](/学习引导/工作空间管理/question/什么用户可以进入工作空间管理.md)
54 | - [step]()
55 | - [权限管理页面](/学习引导/工作空间管理/step/权限管理页面.md)
56 | - [用户权限管理](/学习引导/工作空间管理/step/用户权限管理.md)
57 | /workspaceHome
58 | - [工作空间页面]()
59 | - [question]()
60 | - [为什么看不到应用商店了](/学习引导/工作空间页面/question/为什么看不到应用商店了.md)
61 | - [step]()
62 | - [工作空间页面介绍](/学习引导/工作空间页面/step/工作空间页面介绍.md)
63 | /workflow
64 | - [工作流]()
65 | - [step]()
66 | - [页面介绍](/学习引导/工作流/step/页面介绍.md)
67 | knowledge
68 | - [知识库]()
69 | - [Demo案例]()
70 | - [开发](/知识库/Demo案例/开发.md)
71 | - [生产](/知识库/Demo案例/生产.md)
72 | - [调试](/知识库/Demo案例/调试.md)
73 | - [Dss常见问题]()
74 | - [用户权限问题]()
75 | - [新人权限申请](/知识库/DSS常见问题/用户权限问题/新人权限申请.md)
76 | - [SparkHive等组件报权限问题](/知识库/DSS常见问题/用户权限问题/SparkHive等组件报权限问题.md)
77 | - [没有队列权限不能提交到队列](/知识库/DSS常见问题/用户权限问题/没有队列权限不能提交到队列.md)
78 | km
79 | - [km]()
80 | - [BDAP-IDE2.1.2功能介绍](/km/BDAP-IDE2.1.2功能介绍.md)
81 | - [BDAP共享目录问题汇总](/km/BDAP共享目录问题汇总.md)
82 | ````
83 |
84 | **说明:**
85 |
86 | 
87 |
88 | **目录结构说明:**
89 |
90 | - “-” + “空格” + “[内容]” + "()" 表示一级目录
91 | - “空格” + “空格” + “-” + “空格” + “[内容]” + "()" 表示二级目录
92 | - “空格” + “空格” + “空格” + “空格” + “-” + “空格” + “[内容]” + "()" 表示三级目录
93 | - “空格” + “空格” + “空格” + “空格” + “空格” + “空格” + “-” + “空格” + “[内容]” + "()" 表示四级目录
94 |
95 | **注:( )中存放文件的相对路径,并且文件名不能含有英文括号**
96 |
97 |
98 |
99 | ## 3. 文件图片说明及配置
100 |
101 | 由于部分文档会存在插图,md文件被user-guide模块解析后会将其内容存放到数据库中,图片存放到数据库的只是图片的路径,故在md文档中插入图片的时候需要存放文件的相对路径,然后通过nginx代理到服务器存放图片的文件夹即可。
102 |
103 | 文件目录结构如下:(假设文件存放在服务器/xxx/test1目录下)
104 | ```text
105 | ├── 这是一个例子而已.md
106 | ├── images
107 | └── 1.png
108 | ├──SUMMARY.md
109 | ```
110 |
111 | **例:**
112 |
113 | ````md
114 | 这是一个例子而已!!!
115 | 下面是图片
116 | 
117 | ````
118 |
119 | **nginx配置:**
120 |
121 | ````nginx
122 | # 图片所在服务器路径
123 | location /images {
124 | root /xxx/test1/;
125 | autoindex on;
126 | }
127 | ````
128 |
129 | **注:这段配置需加在dss服务所在的nginx配置中,需要保证图片所在服务器的ip和port与DSS服务的一致。**
130 |
131 | **配置完成之后重启服务即可将文件同步到user-guide中!**
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
--------------------------------------------------------------------------------
/zh_CN/安装部署/DSS工作流内容从0.X版本迁移1.0版本说明文档.md:
--------------------------------------------------------------------------------
1 | # DSS工作流内容从0.X版本迁移1.0版本说明文档
2 |
3 | > 由于 DSS1.0 的工作流存储方式和 DSS0.X 相比存在一些结构差异,因此不能通过简单的替换jar包再复用0.X 的工作流数据内容的方式完成升级。需要先对原有的0.X项目内容进行导出,后在1.0进行导入的时候,迁移服务会自动解析导出包内容并重新在1.0进行工作流的创建。
4 |
5 | > 迁移服务主要完成0.X的项目导出包解析、工作流信息提取、结构修改和1.0编排导入。用户只需要拿到0.X的项目导出包,并调用1.0的导入接口,就可以完成工作流内容的迁移。
6 |
7 | ## 主要变化点
8 |
9 | - 新增编排和编排版本信息表,每一个编排版本会关联一个工作流ID,编排的内容与原来工作流的信息基本一致
10 |
11 | - 去掉工作流的版本信息表,把工作流的版本放到编排的版本,每个工作流会存储bml的版本信息
12 |
13 | - 项目信息表也有一些变化,去掉一些在1.0没有用到的字段
14 |
15 | ## 迁移步骤
16 |
17 | ### Step 1 导出 DSS0.X 的项目包
18 |
19 | > 0.X导出可以直接复用现有的项目导出,在已有的发布操作中也会将项目进行导出。
20 |
21 | #### 导出流程
22 | 1. 进入0.X环境
23 | 2. 选择一个工程的工作流,点击发布
24 | 3. 进入服务器0.X的部署目录
25 | 4. 接着进入dss-server的日志目录,通过查看日志,获取到本次解压的路径。路径是用户配置的调度导出路径。
26 | 5. 下载路径下zip包文件
27 | 6. 然后通过postman测试 /dss/framework/release/importOldDSSFlow
28 |
29 | ### Step 2 导入 DSS1.0 环境
30 |
31 | 导入接口: `http://ip:port/api/rest_j/v1/dss/framework/release/importOldDSSProject`
32 |
33 | 接口类型: POST
34 |
35 | java导入示例:
36 | ```java
37 | httpClient = HttpClients.custom().build();
38 | MultipartEntityBuilder entityBuilder = MultipartEntityBuilder.create();
39 | entityBuilder.addBinaryBody("file",file); //导出项目文件的File对象
40 | entityBuilder.addTextBody("dssLabels", "{\"route\": \"dev\"}"); //导入环境的标签,可以省略
41 | httpPost.setEntity(entityBuilder.build());
42 | response = httpClient.execute(httpPost);
43 | ```
44 |
45 |
46 | 导入处理流程:
47 |
48 | - 把 DSS0.X 导出的zip包上传到 DSS1.0 后,DSS1.0 会自动创建项目、导入编排信息、导入工作流信息,无需用户操作
49 |
50 | - 用户若只关心导入是否成功,那么可直跳转到 校验导入内容
51 |
52 | 导入处理过程中后台主要实现步骤,用户使用时无需关注:
53 |
54 | 1. 下载导入的0.X项目zip包到本地
55 |
56 | 2. 对zip包的内容进行解析,获取项目信息、工作流信息、工作流关系等
57 |
58 | 3. 同步项目信息,如果项目不存在,则新建项目,如果已经存在,则支持重复导入,覆盖上次的导入内容。 由于在1.0的类定义发生了变化,需要替换项目元数据信息中存储的内容,如类的名称,替换成功后,使用修改后的元数据信息,创建项目
59 |
60 | ```java
61 | replaceFileStr(metaFilePath, "com.webank.wedatasphere.dss.common.entity.project.DWSProject",
62 | "com.webank.wedatasphere.dss.framework.project.entity.DSSProjectDO");
63 | replaceFileStr(metaFilePath, "com.webank.wedatasphere.dss.common.entity.flow.DWSFlow",
64 | "com.webank.wedatasphere.dss.workflow.common.entity.DSSFlow");
65 | replaceFileStr(metaFilePath, "com.webank.wedatasphere.dss.server.entity.DWSFlowRelation",
66 | "com.webank.wedatasphere.dss.workflow.common.entity.DSSFlowRelation");
67 | ```
68 |
69 | 4. 丰富相关内容,适配1.0的架构,补全编排信息
70 |
71 | - 读取工作流导入文件中的工作流元数据信息meta.txt文件,以及工作流关系信息dss_flow_relation,建立工作流的依赖关系,根据所有的rootflow创建新的编排信息,并且写入到新的meta.txt, 因为原来导出的存储结构中没有包含编排的内容,需要依据工作流信息创建编排
72 |
73 | - 调整工作流打包结构,按照1.0的规范是以编排为单位进行导入的,需要把0.x里面同一个父级工作流下的所有子工作流内容,放入到一个编排的导入包中。把原有以工作流为单位的导出内容
74 | 拷贝到以编排为单位的目录结构下,工作流的导出结构和1.0没有变化,可以直接复用
75 |
76 | 5. 将适配之后的目录打包成新的zip包
77 |
78 | 6. 上传到bml,将新打包好的编排的导入包上传到BML服务器,拿到ResourceId、Version
79 |
80 | 7. 通过resourceId 和 version 导入到 dev的 orchestrator-server,复用1.0已有的编排导入流程
81 |
82 | ### Step 3 校验导入内容
83 |
84 | * 检查工作流数量是否有缺失
85 | * 检查工作流节点是否有缺失
86 | * 检查节点的内容是否正确
87 | * 实时执行是否可以正常执行
88 | * 工作流的资源文件都有同步
89 |
90 | ## 说明:
91 | - 迁移只包含迁移项目导出的内容,主要是工作流的json,资源文件、节点脚本,节点关联第三方组件的定义。
92 |
93 |
94 | - 不包括迁移项目的用户权限、脚本中关联的UDF函数、第三方组件除了节点导出以外的内容,如数据源、规则定义等。
95 |
--------------------------------------------------------------------------------
/zh_CN/安装部署/DSS调试文档.md:
--------------------------------------------------------------------------------
1 |
2 | # 调试相关
3 |
4 | > 没有任何程序员能够一气呵成的写出没有任何Bug的代码,所以很多程序员有相当一部分时间是花费在Debug上的,程序调试是每个程序员必须面对的工作,下面就指导大家如何进行dss远程调试(基于DSS1.1.0版本)。
5 |
6 | ## step 1 准备DSS源码并编译
7 |
8 | ```plain
9 | git clone https://github.com/WeBankFinTech/DataSphereStudio.git
10 | cd DataSphereStudio
11 | #如果需要 可以切换到对应的分支上
12 | #git checkout dev-xxx
13 | mvn -N install
14 | mvn clean Install
15 | ```
16 |
17 | ## step 2 在服务器上部署好DSS服务
18 | 若尚未部署DSS可参考部署文档:[DSS单机部署文档](DSS&Linkis一键部署文档单机版.md)
19 |
20 | ## step 3 打开调试端口
21 |
22 | 首先需要明确需要调试的包所在的服务,根据需要调试的代码位置,确定其所属的服务。
23 |
24 | 其后进入dss部署目录下的 ${DSS_HOME}/sbin/ext 目录,修改需要调试的对应服务的启动脚本文件,开启远程调用端口:(以workflow-server服务为例)
25 |
26 | ```shell
27 | cd ${DSS_HOME}/sbin/ext
28 |
29 | vim dss-workflow-server
30 | ```
31 | 在脚本中找到 DEBUG_PORT 关键字,然后输入需要打开的端口号 **(要从本地能够连通)**
32 |
33 | 
34 |
35 | 接着需要重启对应服务使之生效:
36 |
37 | ```
38 | sh sbin/dss-daemon.sh restart workflow-server
39 | ```
40 | 注:如果不确定服务名称,可以在 ${DSS_HOME}/sbin/common.sh 脚本内查询,
41 | 只需要输入服务的关键字即可启动对应服务:
42 |
43 | 
44 |
45 |
46 | ## step 4 IDEA编译器配置远程调试
47 | 如下图所示打开窗口并配置远程调试的端口,服务,以及模块:
48 |
49 | 
50 |
51 | ## step 5 开始调试
52 |
53 | 在idea右上角点击调试按钮,即可可以开始调试:
54 |
55 | 
56 |
57 | ## step 6 替换jar包
58 |
59 | 本地修改代码后,可以打包好对应模块的jar包,然后替换服务器上对应lib的jar。重启服务即可。
60 |
61 | ```shell
62 | cd ${DSS_HOME}
63 |
64 | ## 往服务器上传jar包
65 | rz -bye
66 |
67 | ## 将上传的jar包拷贝替换原来jar包
68 | cp ${your_jar_name} lib/${jar_path}
69 | ```
70 |
71 | 注:若不清楚该jar在哪些服务的lib中存在,可通过如下命令搜索该jar的所有位置:
72 | ```shell
73 | cd ${DSS_HOME}
74 |
75 | ## 搜索lib目录下的所有dss-orchestrator-common-*.jar包
76 | find lib/ -name "*dss-orchestrator-common*"
77 | ```
78 |
79 | 
--------------------------------------------------------------------------------
/zh_CN/安装部署/SchedulisAppConn插件安装文档.md:
--------------------------------------------------------------------------------
1 | # Schedulis AppConn 插件安装
2 |
3 | > Schedulis 通过在 DSS 中安装配置 AppConn 插件,可以实现DSS开发的工作流发布到 Schedulis 定时调度执行。
4 |
5 | ### 一、安装前准备
6 |
7 | - 需先安装DSS,可参考: [DSS单机部署文档](DSS&Linkis一键部署文档单机版.md)
8 |
9 | - 需先安装并运行 Schedulis,可参考: [Schedulis部署文档](https://github.com/WeBankFinTech/Schedulis/blob/master/docs/schedulis_deploy_cn.md)
10 |
11 | ### 二、安装 Schedulis AppConn
12 | - 用户首先需要查看xx/dss_linkis/dss/dss-appconns目录下面是否存在 schedulis 目录,若不存在用户就需要下载 schedulis 插件 [点击下载](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/Schedulis/schedulis_appconn_DSS1.1.1/schedulis-appconn.zip)
13 | - 用户通过执行脚本appconn-install.sh来安装 Schedulis AppConn,只需输入部署 Schedulis WEB机器的具体 IP 和 Port,就能完成 AppConn 插件安装。在执行脚本时,会执行对应 AppConn 下 init.sql 脚本,把对应的数据库信息插入到 DSS 表中
14 | ```shell
15 | ## 切换目录到DSS的安装目录
16 | cd xx/dss
17 |
18 | ## 执行 appconn-install 安装脚本,输入对应的appconn名称,按照提示输入对应schedulis WEB服务对应的IP和PORT,
19 | ## 注意当服务都部署在一台机器时,IP不要输入127.0.0.1 或 localhost,必须输入真实IP
20 | sh bin/appconn-install.sh
21 | >> schedulis
22 | >> xx.xx.xx.xx
23 | >> 8085
24 | ```
25 |
26 | #### 配置 "前往调度中心" 按钮 的 url
27 |
28 | - 修改 `${DSS_HOME}/conf/dss-workflow-server.properties` 配置:
29 |
30 | ```properties
31 | #该路径对应的是dolphinscheduler运维中心的页面
32 | wds.dss.workflow.schedulerCenter.url="http://${schedulis_ip}:${schedulis_port}"
33 | ```
34 |
35 | - 然后重启下 workflow 使配置生效:
36 |
37 | ```shell script
38 | sh sbin/dss-daemon.sh restart workflow-server
39 | ```
40 |
41 |
42 | ### 三、Schedulis JobType 插件安装
43 |
44 | - 用户还需为 Schedulis 安装一个 JobType 插件: linkis-jobtype,请点击[Linkis JobType安装文档](Schedulis_Linkis_JobType安装文档.md)。
45 |
46 | ### 四、Schedulis 使用方式
47 |
48 | - Schedulis 服务部署完成和 Schedulis AppConn 安装完成后,即可在 DSS 中使用 Schedulis。用户可以在应用组件中,点击 Schedulis 进入 Schedulis,或在工作流开发时,一键发布 DSS 工作流到 Schedulis 进行调度执行。
49 |
50 | ### 五、Schedulis AppConn 安装原理
51 |
52 | - Schedulis 的相关配置信息会插入到以下表中,通过配置下表,可以完成 Schedulis 的使用配置,安装 Schedulis AppConn 时,脚本会替换每个 AppConn 下的 init.sql,并插入到表中。
53 |
54 | | 表名 | 表作用 | 备注 |
55 | |-----------------|----------------|----------------------------------------|
56 | | dss_workspace_menu | 菜单表,存储对外展示的内容,如图标、名称等 | 必须 |
57 | | dss_workspace_menu_appconn | menu 和 application 的关联表,用于联合查找 | 必须 |
58 | | dss_appconn | appconn 的基本信息,用于加载 appconn | 必须 |
59 | | dss_appconn_instance | AppConn 的实例的信息,包括自身的url信息 | 必须 |
60 |
61 | - Schedulis 作为调度框架,实现了一级规范和二级规范,需要使用 Schedulis AppConn 的微服务如下表。
62 |
63 | | 微服务名 | 使用AppConn完成的功能 | 备注 |
64 | |-----------------|----------------|----------------------------------------|
65 | | dss-framework-project-server | 使用 schedulis-appconn 完成工程以及组织的统一 | 必须 |
66 | | dss-workflow-server | 借用调度 AppConn 实现工作流发布,状态等获取 | 必须 |
67 |
--------------------------------------------------------------------------------
/zh_CN/安装部署/Schedulis_Linkis_JobType安装文档.md:
--------------------------------------------------------------------------------
1 | # Schedulis Linkis JobType 安装文档
2 |
3 | > 本文主要介绍 Schedulis 的 Linkis JobType 自动化部署安装步骤,如果手动安装请参考 Azkaban 的 JobType [安装步骤](https://azkaban.github.io/azkaban/docs/latest/#job-types)
4 |
5 | 1. 进入到Schedulis目录
6 |
7 | ```
8 | ##用户首先需要到Schedulis的安装目录,具体操作命令如下:
9 | cd xx/schedulis_0.7.0_exec/plugins/jobtypes/linkis/bin
10 | ```
11 |
12 | 2. 修改config.sh配置
13 |
14 | ```
15 | ## Linkis gateway url
16 | LINKIS_GATEWAY_URL=http://127.0.0.1:9001 ## linkis 的 GateWay 地址
17 |
18 | ## Linkis gateway token default WS-AUTH
19 | LINKIS_GATEWAY_TOKEN=WS-AUTH ## Linkis 的代理 Token,该参数可以用默认值
20 |
21 | ## Azkaban executor host
22 | AZKABAN_EXECUTOR_HOST=127.0.0.1 ## 如果 Schedulis 是单机安装则该 IP 就是机器 IP,如果是分布式安装则为 Schedulis 执行器机器 IP
23 |
24 | ## SSH Port
25 | SSH_PORT=22 ## SSH 端口
26 |
27 | ## Azkaban executor dir
28 | AZKABAN_EXECUTOR_DIR=xx/schedulis_0.7.0_exec ## 如果 Schedulis 是单机安装则该目录是 Schedulis 的安装目录,如果是分布式安装为执行器的安装目录,注意:最后不需要带上/
29 |
30 | ## Azkaban executor plugin reload url
31 | AZKABAN_EXECUTOR_URL=http://$AZKABAN_EXECUTOR_HOST:12321/executor?action=reloadJobTypePlugins ## 此处只需要修改 IP 和端口即可
32 | ```
33 |
34 | 3. 执行安装脚本
35 |
36 | ```
37 | sh install.sh
38 | ```
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/zh_CN/开发文档/DSS编译文档.md:
--------------------------------------------------------------------------------
1 | ## 编译DSS后端代码
2 |
3 | >从github获取项目代码后,使用maven打包后端项目安装包。前端代码编译请参考: [DSS前端编译文档](前端编译文档.md)
4 |
5 | 1. 您可以在最顶层的pom.xml文件,修改Linkis、Java、Scala、Maven等软件的版本,以适配您公司的大数据环境,具体如下:
6 |
7 | ```xml
8 |
9 | 1.1.1
10 | 1.1.1
11 | 2.11.12
12 | 1.8
13 | 3.3.3
14 |
15 |
16 | ```
17 |
18 | 2. **如果您是本地第一次使用,必须在最外层工程pom.xml所在目录先执行以下命令**:
19 |
20 | ```bash
21 | mvn -N install
22 | ```
23 |
24 | 3. 在最外层工程pom.xml所在目录执行以下命令
25 |
26 | ```bash
27 | mvn clean install
28 | ```
29 |
30 |
31 |
32 | 4. 获取安装包,在工程的assembly->target目录下:
33 |
34 | ```
35 | wedatasphere-dss-x.x.x-dist.tar.gz
36 | ```
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/zh_CN/开发文档/Scriptis如何新增脚本类型.md:
--------------------------------------------------------------------------------
1 | ## 一、后台新增
2 |
3 | 在 ```linkis-computation-governance\linkis-manager\label-common\src\main\java\com\webank\wedatasphere\linkis\manager\label\entity\engine\EngineType.scala``` 中,
4 | 新增一个全新的枚举引擎类型。
5 |
6 | 关于如何实现一个全新的 Linkis 引擎,请参考:[如何实现一个新引擎](https://linkis.apache.org/zh-CN/docs/latest/development/new-engine-conn)。
7 |
8 | ## 二、前端新增
9 |
10 | 找到 ```web/src/common/config/scriptis.js``` 这个文件,在其数组上新增一条记录即可。
11 |
12 | 该记录是 object 类型,可配置以下属性:
13 |
14 | - rule:正则表达式,用于匹配符合正则的文件名后缀;
15 | - executable:该脚本类型是否可被执行;
16 | - lang:在 monaco editor 编辑器中使用哪种语言,对应其高亮、联想词等功能。可参考 monaco editor 官网支持的语言,或者自定义语言;
17 | - application:与 Linkis 某个引擎类型对应,用于在 websocket 或者 http 轮询时的传递参数 executeApplicationName;
18 | - runType:与 Linkis 某个引擎的脚本类型对应,用于在 websocket 或者 http 轮询时的传递参数 runType;
19 | - ext:该脚本类型的文件后缀,如:```.hql```;
20 | - scriptType:系统支持的脚本类型,用于在新建脚本时作判断使用,该属性是由于 application 和 runType 均可能出现重复,所以用于在前端区分脚本类型;
21 | - abbr:该脚本类型的后缀名(该属性已废弃,后续会删除);
22 | - label:在用户新建脚本 UI 界面上,显示的脚本类型名称,首字母大写;
23 | - logo:该脚本的图标。存放路径为:```web/src/apps/scriptis/assets/styles/home.scss```,请在文件最后加上新的LOGO。
24 | - isCanBeNew:是否允许被新建,用于在工作空间等可新建脚本的场景中显示;
25 | - isCanBeOpen:是否允许被打开,用于在工作空间等模块中可被双击或右键打开(该属性是在前台生效,后台也有对脚本是否可打开做校验);
26 | - flowType:工作流中使用的类型。
27 |
--------------------------------------------------------------------------------
/zh_CN/开发文档/前端编译文档.md:
--------------------------------------------------------------------------------
1 | # DSS 前端编译文档
2 |
3 | ## 启动流程
4 |
5 | ### 一、安装 Node.js
6 |
7 | 将 Node.js 下载到电脑本地,安装即可。
8 |
9 | 下载地址:[http://nodejs.cn/download/](http://nodejs.cn/download/) (最新稳定版本可能存在不兼容问题,可以使用以经过测试的版本v10.16.2、v-14.15.0)后端同学建议在Linux下进行
10 |
11 | **该步骤仅第一次使用时需要执行。**
12 |
13 | ### 二、安装项目
14 |
15 | 在终端命令行中执行以下指令:
16 |
17 | ```shell script
18 | cd DataSphereStudio/web
19 | lerna bootstrap
20 | ```
21 |
22 | 指令简介:
23 | 1. 进入项目包根目录:`cd DataSphereStudio/web`
24 | 2. 安装项目所需依赖:`lerna bootstrap`
25 | 3. 若未安装lerna命令,可以通过`npm install lerna -g`命令进行安装
26 |
27 | ### 三、打包项目
28 |
29 | 您可以通过在终端命令行执行以下指令对项目进行打包,生成压缩后的代码:
30 |
31 | ```shell script
32 | npm run build
33 | ```
34 |
35 | 该指令成功执行后,项目根目录下会出现一个名叫 “dist” 的文件夹,该文件夹即为打包好的代码。您可以直接将该文件夹放进您的静态服务器中。
36 |
37 | ### 四、运行项目
38 |
39 | 如果您想在本地浏览器上运行该项目并且改动代码查看效果,需要在终端命令行中执行以下指令:
40 |
41 | ```shell script
42 | npm run serve
43 | ```
44 |
45 | 在浏览器中(建议使用 Chrome 浏览器)通过链接访问应用:```http://localhost:8080/```
46 |
47 |
48 | 当您使用该方式运行项目时,您对代码的改动产生的效果,会动态体现在浏览器上。
49 |
50 | **注意:因为项目采用前后端分离开发,所以在本地浏览器上运行时,需要对浏览器进行设置跨域才能访问后端接口:**
51 |
52 | 这里介绍 chrome 浏览器如何设置跨域:
53 |
54 | - windows 系统下的跨域配置方式:
55 | 1. 关闭所有的 chrome 浏览器。
56 | 2. 新建一个 chrome 快捷方式,右键“属性”,“快捷方式”选项卡里选择“目标”,添加 ```--args --disable-web-security --user-data-dir=C:\MyChromeDevUserData```
57 | 3. 通过快捷方式打开 chrome 浏览器
58 |
59 | - mac系统下的跨域配置方式:
60 |
61 | 在终端命令行执行以下命令(需要替换路径中的 yourname,若还不生效请检查您机器上 MyChromeDevUserData 文件夹的位置并将路径复制到下面指令的 ```--user-data-dir=``` 后面)
62 |
63 | ```shell script
64 | open -n /Applications/Google\ Chrome.app/ --args --disable-web-security --user-data-dir=/Users/yourname/MyChromeDevUserData/
65 | ```
66 | ####方式二
67 | 使用vue的代理配置,找到web的packages下的dss中的vue.config.js中
68 | ```
69 | devServer: {
70 | proxy: {
71 | '/api': {
72 | target: 'http://127.0.0.1:9001', // 写成你的后端地址
73 | changeOrigin: true,
74 | pathRewrite: {
75 | '^/api': '/api'
76 | }
77 | }
78 | }
79 | }
80 | ```
--------------------------------------------------------------------------------
/zh_CN/用户手册/DSS新增用户方式.md:
--------------------------------------------------------------------------------
1 | # DataSphere Studio 新增用户的方式
2 |
3 | > DSS 默认只提供了管理员账号,用户登录鉴权依托于 Linkis 的 LDAP 用户登录认证体系,本文将详细介绍如何新增一个全新的 DSS 用户。
4 |
5 | ## 一、基本介绍
6 |
7 | DSS 超级管理员的用户名为部署用户名,如部署用户为 hadoop,则管理员的用户名和密码为 hadoop/hadoop,具体用户可在 [DSS单机部署文档](../安装部署/DSS&Linkis一键部署文档单机版.md) 查看。
8 |
9 | 新增一个 DSS 用户主要分为以下几步:
10 |
11 | - DSS接入LDAP认证体系
12 | - 新增 LDAP 用户
13 | - 为新用户完善环境信息
14 |
15 | ## 二、DSS接入LDAP认证体系
16 |
17 | 1.首先要搭建好LDAP服务端,确保LDAP服务可用,能正常新增用户。如下图所示(以ldapadmin客户端展示为例):
18 | 
19 |
20 | 2.需要在dss.properties配置文件添加ldap相关配置(替换ip,密码字段)
21 |
22 | wds.dss.ldap.admin.name=cn=root,dc=zdww,dc=com
23 |
24 | wds.dss.ldap.admin.password=XXXX
25 |
26 | wds.dss.ldap.url=ldap://10.18.XX.XX:389/
27 |
28 | wds.dss.ldap.base.dn=ou=user,dc=zdww,dc=com
29 |
30 | ## 三、新增 LDAP 用户
31 |
32 | DSS 超级管理员可以在首页创建部门和用户,建立公司的层级及人员管理体系,创立的部门和用户。如下图所示:
33 |
34 | 
35 |
36 | #### 创建部门和用户:
37 |
38 | 在超级管理员首页点击【管理台】,进入管理台页面。
39 |
40 | 超级管理员可以新建、修改和删除部门(请注意:部门层级最多可建立四级,包括元公司在内),元公司无法删除,仅可修改信息。
41 |
42 | 同时,超级管理员可以进行用户的新建,修改和用户密码重置。
43 |
44 | 如下图所示:
45 |
46 | 
47 |
48 | 当您在页面创建一个用户后,DSS 后台会自动请求 LDAP,为您在 LDAP 中创建一个同名用户。
49 |
50 | ## 四、为新用户完善环境信息
51 |
52 | 1. 由于 DSS & Linkis 做了自上而下的多租户隔离,为了使登录的用户可正常使用 DSS,还需在 Linux 服务器上面创建对应的 Linux 用户,具体步骤如下:
53 |
54 | - 在所有Linkis & DSS 服务器上创建对应 Linux 用户。
55 | - 如果使用了 Hadoop,还需在 Hadoop 的 NameNode 创建对应 Linux 用户。
56 | - 保证 Linkis & DSS 服务器上的 Linux 用户,可正常使用 `hdfs dfs -ls /` 等命令,同时该用户需要能正常执行 `spark-sql -e` 和 `hive -e` 等 shell 命令。
57 | - 由于每个用户的工作空间严格隔离,您还需为该用户创建工作空间和 HDFS 目录,如下:
58 |
59 | ```shell script
60 | ## 创建用户工作空间目录和授权
61 | mkdir $WORKSPACE_USER_ROOT_PATH/${NEW_USER}
62 | chmod 750 $WORKSPACE_USER_ROOT_PATH/${NEW_USER}
63 |
64 | ## 创建用户HDFS目录和授权
65 | hdfs dfs -mkdir $HDFS_USER_ROOT_PATH/${NEW_USER}
66 | hdfs dfs -chown ${NEW_USER}:${NEW_USER} $HDFS_USER_ROOT_PATH/${NEW_USER}
67 | hdfs dfs -chmod 750 $HDFS_USER_ROOT_PATH/${NEW_USER}
68 | ```
69 |
70 | `WORKSPACE_USER_ROOT_PATH` 和 `HDFS_USER_ROOT_PATH` 是您一键安装 DSS 时,设置的工作空间和 HDFS 根路径。
71 |
72 | 如果您没有设置,则默认为:
73 |
74 | ```shell script
75 | WORKSPACE_USER_ROOT_PATH=file:///tmp/linkis
76 | HDFS_USER_ROOT_PATH=hdfs:///tmp/linkis
77 | ```
78 |
--------------------------------------------------------------------------------
/zh_CN/用户手册/Scriptis使用文档.md:
--------------------------------------------------------------------------------
1 | Scriptis使用文档
2 | ------
3 | ## 简介
4 | 使用大数据平台的Spark、Hive和HBase等计算引擎,进行交互式查询与分析,支持数据挖掘和分析人员的日常使用。提供图形化、多样式的界面,让用户在进行数据分析、脚本编辑、测试、查询使用时更加方便,简单。
5 | 
6 |
7 | ## 工作空间
8 | 工作空间是一个文件目录,用户对该目录拥有所有的权限可以进行文件管理操作等。一般对应着Linkis服务器部署的一个文件目录,每一个登陆用户都对应着一个文件目录,存储用户脚本和结果集等文件。
9 | 
10 | 当鼠标右键工作空间文件夹时,右键功能主要包含复制路径,新建目录,新建脚本,刷新。
11 | 
12 | 当鼠标右键工作空间文件夹下的文件时,脚本右键功能,脚本右键主要有打卡到侧边,复制路径,重命名,删除,导入到hive(csv,txt,excel类型文件),导入到hdfs等功能。
13 | 
14 | ## 数据库
15 | 数据库模块获取的是登录用户具有权限的hive库,右键库的主要功能包括刷库,刷表,刷字段信息。表右键功能-查询表,快捷生产临时hive脚本进行数据查看,复制表名和删除表即复制表字段。表右键功能查看表结构,可以展示表的字段详细信息,表详情信息,表分区信息等。
16 | 
17 | ## UDF和函数
18 | UDF功能是方便用户对UDF进行分类展示,以及用户可以对个人函数进行管理。UDF的配置管理已经移到Linkis管理台,相关配置需要参考Linkis UDF相关文档。
19 |
20 | ## HDFS
21 | Linkis在安装后,对每个用户都默认提供了一个HDFS路径,存储用户资源文件。Scriptis会显示展现用户的HDFS文件夹,可以鼠标右键对该文件夹进行增删改,同时该路径下的文件,也可以通过右键功能进行管理。
22 | 
23 |
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/apiservicepage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/apiservicepage.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/createapiservice.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/createapiservice.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/createapiservice_param.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/createapiservice_param.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/loginpage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/loginpage.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/modifyapiservice.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/modifyapiservice.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/postman1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/postman1.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/postman2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/postman2.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/project.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/project.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/runworkflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/runworkflow.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/scriptis.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/scriptis.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/scriptis_database.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/scriptis_database.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/scriptis_hdfs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/scriptis_hdfs.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/scriptis_summary.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/scriptis_summary.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/scriptis_workspace.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/scriptis_workspace.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/scriptis_workspace_dir.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/scriptis_workspace_dir.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/scriptis_workspace_file.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/scriptis_workspace_file.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/useapiservice.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/useapiservice.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/workflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/workflow.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/workspace.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/workspace.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/创建工作空间.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/创建工作空间.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/创建部门和用户.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/创建部门和用户.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/新增用户功能-LDAP界面.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/新增用户功能-LDAP界面.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/任务实例.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/任务实例.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/周期实例完成情况.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/周期实例完成情况.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/定时运行工作流.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/定时运行工作流.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/定时页面.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/定时页面.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/工作流定义.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/工作流定义.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/工作流实例.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/工作流实例.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/工作流实例与成功率统计.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/工作流实例与成功率统计.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/按日期进行流程状态统计.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/按日期进行流程状态统计.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/查看日志.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/查看日志.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/流程状态统计.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/流程状态统计.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/流程耗时排名.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/流程耗时排名.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/编辑定时任务.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/编辑定时任务.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/运行工作流.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/运行工作流.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/调度中心/页面概述.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/调度中心/页面概述.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/images/超级管理员功能.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/用户手册/images/超级管理员功能.png
--------------------------------------------------------------------------------
/zh_CN/用户手册/数据服务使用手册.md:
--------------------------------------------------------------------------------
1 | 数据服务
2 | ----------
3 |
4 | DSS目前支持将SQL脚本发布成数据服务API,分享给其他用户使用。业务用户可以在不用写代码和无大数据平台用户的情况下,设置参数,就可以执行数据服务的脚本,并直接浏览或者下载平台数据。
5 | 数据服务是具有库表权限的用户,提供给没有库表权限用户的一种服务,本文将从发布数据服务用户(简称发布用户),和使用数据服务用户(简称使用用户)两种不同的角度进行说明。
6 | 因为业务的需要,数据服务在使用上,会有如下限制:
7 | * 仅支持Spark SQL的查询语句
8 | * 不支持多结果集查询
9 | * 只支持查询SELECT语句
10 | * 只能含有一条SQL语句
11 | * SQL语句中末尾不允许出现分号
12 |
13 | 除上述限制,以下写法是允许的。
14 | ```sql
15 | USE DATABASE default;
16 | SELECT * FROM default.book
17 | ```
18 |
19 | **1、创建数据服务**
20 |
21 | 由于业务的需要,需要建立一个数据服务,授权给他人使用,当发布用户进入Scriptis,编辑新建脚本文件,编写Spark SQL语句,同时在SQL语句中嵌入变量,方便后续的业务人员自己设置参数后就可以获取数据。
22 | 在保存该Spark SQL脚本后,可在脚本编辑栏上方点击"发布为数据API"(该"发布为数据API"的功能,只有部分用户具有该权限,不具备权限的用户,该功能按钮不可见),新建API信息填写如下图所示。
23 | 
24 |
25 | 点击下一步,用于设置变量的信息。
26 | 
27 |
28 |
29 | 发布用户可以在工作空间的主页,通过应用工具,进入"数据服务"应用使用数据服务,在数据服务的标签点击"更多",可以进入数据服务管理界面
30 |
31 | 
32 |
33 |
34 |
35 | **2、使用数据服务**
36 |
37 | 在进入数据服务页面后,可以看到该用户可使用的数据服务列表页面,其中default表示默认所有的数据服务,用户可以点击对应的标签筛选出自己需要使用的数据服务,同时可以在搜索框中,分别用名称、状态、提交人进行筛选,
38 |
39 | 使用用户点击"进入使用",可以在筛选条件中,设置参数的值,使用用户是发布用户数据集的一个子集。
40 |
41 | 
42 |
43 | **3、修改数据服务**
44 |
45 | 一个数据服务由于业务的需要可能发生修改,当发布用户对数据服务的脚本进行了修改,可以点击"更新数据API"。
46 | 更新数据服务,可以选择绑定的其他数据服务。
47 | 
48 |
49 | **4、使用postman访问数据服务**
50 |
51 | 数据服务发布后,支持使用api接口访问,可以给其它系统直接调用。提交查询如下图所示:
52 |
53 | 
54 |
55 | 获取到任务执行ID, 再根据ID可以获取任务的执行进度,日志,结果集等。
56 |
57 | 
58 |
59 | 说明:数据服务的token可以从/queryById?的接口返回(点击进入使用),字段为userToken. 所有接口的访问都是要经过GateWay认证的。数据服务的token只能用于数据服务的管理流程。使用postman的认证需要使用页面的cookie后者走linkis-gateway的秘钥认证方式。在head 里面加入Token-Code: XXX 这里指定linkis-gateway的登录秘钥 Token-User: XXX 这里指定linkis-gateway的登录用户。
60 |
61 |
62 |
--------------------------------------------------------------------------------
/zh_CN/用户手册/数据服务简介.md:
--------------------------------------------------------------------------------
1 | DSS Plugin - Data Api Services
2 | ----------
3 | ## 动机
4 | 在实际业务中,业务人员和数据开发人员通常需要进行对接,业务人员通常不太熟悉数据开发,所以当有数据需求时,通常需要和数据人员进行沟通。数据开发人员通常按照业务方的需求进行数据开发,然后通过共享脚本或者数据的方式来完成需求。
5 | 数据提取,在实际的生产中会存在多种场景,不过主体结构大多类似。接下来会按照几个方面对数据提供的场景进行描述。
6 | ## 传统的数据提取方式
7 | 由业务方提供需求,数据开发人员开发完取数的脚本,和业务方进行交接时,存在两种方式。
8 | - 第一种:直接把取数的脚本共享给业务人员(通常需要共享存储的方式或者其他方式),由业务人员自己运行脚本获取数据。这种方式的优点缺点如下:
9 | 优点:适合一些临时性需求,脚本通常只会使用一次或几次,比较方便。
10 | 缺点:因为业务人员也需要执行脚本,所以业务人员需要脚本中涉及的库和表的权限。存在业务人员篡改脚本的风险。
11 | - 第二种:有数据开发人员自己运行取数脚本,然后通过数据共享给业务人员,这种方式的优缺点如下:
12 | 优点:适合一些临时性需求,脚本通常只会使用一次或几次,比较方便。
13 | 缺点:存在安全合规的风险,业务不具有权限查看的数据存在被泄露的风险。
14 | ## 数据视图(Data View)
15 | 数据视图和数据服务类似,只是在流程上有些差别。当业务用户需要数据时,首先提需求,然后提单申请数据使用权限,当提单完成后,数据开发人员可以对需求进行开发,并把开发完成的脚本发布给业务用户使用。
16 | 因为行内的安全要求,访问不具有权限的数据需要进行提单授权。所以数据视图的优缺点和数据服务类似。
17 | 不同的是数据视图的提单申请由业务用户发起。
18 | ## 数据服务(Api Service)
19 | 数据服务是具有库表权限的用户,提供给没有库表权限用户的一种服务,同时也能防止篡改发布的脚本。在业务上,数据服务的方式,首先由业务方发起需求,然后数据开发人员开发脚本并发布为数据服务。
20 | 在发布数据服务的时候,授权给业务用户使用的过程,按照公司的安全要求,需要进行授权,该提单需求由数据开发人员发起,当授权通过后,业务用户就能使用该数据服务。数据服务的方式存在以下问题。
21 | 优点:适合一些固定需求的脚本,对多次使用该脚本的场景,比较方便。同时可以防止篡改脚本。不需要授予库表级的权限就可以让业务用户取数。比较友好,业务用户需要简单的操作就能取数。
22 | ## 数据服务和数据视图
23 | 数据视图试用的场景一般为,业务方从上到下对数据开发发起需求,在需求未提供前,数据开发并不知道需要的库表权限和开发的具体内容。
24 | 数据服务则不同,在一些比较固定的业务场景下,数据服务从下往上发起,由数据开发人员规定数据访问的集合,业务方使用该脚本,业务方可查看的数据只能是数据开发提供数据集的子集。
25 | ## 如何使用
26 | ### 编译
27 | ```shell
28 | # 由DSS统一编译打包
29 | mvn -N install
30 | mvn -DskipTests=true clean package
31 | ```
32 | 在打包完后的tar包安装解压后,在dss-apps可以找到相应的lib包。如果想抽离该服务,可以单独打包,相关的启动脚本和配置需要进行剥离和修改。目前建议和DSS一起编译打包使用。
33 | ### 启动和关闭
34 | ```shell
35 | # 1. 使用批量启停脚本
36 | sh sbin/start-all.sh
37 | sh sbin/stop-all.sh
38 |
39 | # 2. 单独启停该服务
40 | sh sbin/dss-daemon.sh start dss-datapipe-server
41 | sh sbin/dss-daemon.sh stop dss-datapipe-server
42 | sh sbin/dss-daemon.sh restart dss-datapipe-server
43 | ```
44 | ### 相关限制
45 | 目前DSS的数据服务只能在DSS的Scritis内部使用,在使用前需要注意以下内容:
46 | 1. 只支持Spark SQL脚本发布为数据服务。
47 | 2. 数据服务的脚本参数需要在Scritis中配置${params}参数。
48 | 3. 发布数据服务后,可以在数据服务模块中查看到该发布的数据服务。
49 | 4. 只有工作空间配置中有数据服务权限的用户才能使用数据服务功能。
50 |
51 | ### 数据服务架构
52 | 整个数据服务的核心主要围绕着Linkis Data View,可以不用针对整个库表进行授权,发布者针对行列授权需要,创建DataView的SQL脚本,使用Where过滤行,使用Select过滤列。只有发布者或者代理用户具有整个表的权限。数据服务的架构图如下:
53 |
54 |
55 | ### 数据服务使用
56 | 数据服务(Data Api Service)属于DSS的插件一部分,属于DSS生态中的一部分,数据服务的使用说明可以见文档[数据服务使用介绍]()。
57 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/zh_CN/用户手册/用户使用文档.md:
--------------------------------------------------------------------------------
1 | ## DSS User Document
2 |
3 | ## 动机
4 | DSS1.x版本是一个里程碑版本,在DSS0.x的基础上进行了大量优化和重构,由于篇幅有限,只能涵盖DSS的基本使用流程,更多的操作使用细节,希望和社区伙伴们一起完善优化,在使用的过程中有任何问题和建议,可以随时联系微众银行大数据平台的相关社区开源人员,我们致力于打造更好的一站式大数据套件,为大数据开源生态贡献自己的力量。
5 |
6 | ## 前言
7 | DSS1.x版本对前端页面交互进行了重构和优化,本文档是DSS的用户使用手册,涵盖了DSS1.0的基本使用流程,更多的操作使用细节,请参考各个模块的的文档。
8 | 用户文档主要分为以下几个文档,分别是:
9 | 1. [Scriptis使用文档]()
10 | 2. [工作流使用文档]()
11 | 3. [数据服务使用文档]()
12 | 4. [Linkis管理台使用手册]()
13 |
14 | ## 管理模块介绍
15 | ## 登录首页
16 | 为了方便用户使用,系统默认通过Linkis的Linux部署用户进行登录,如使用hadoop部署的Linkis和DSS,可以直接通过用户:hadoop,密码:hadoop(密码就是用户名)登录。 输入DSS前端地址后,127.0.0.1:8088 输入用户名密码:hadoop hadoop,即可登录,登录页提供DSS用户访问权限验证功能。
17 | 
18 | *注意: 如果要支持多用户登录,DSS的用户登录依赖Linkis,需要在linkis-GateWay的配置里面进行配置,Linkis-GateWay默认支持LDAP。*
19 |
20 | ## 工作空间
21 | 进入到工作空间页面,可以创建和管理工作空间。工作空间是DSS最顶层的概念,比较直观的例子比如,工作空间可以是一个部门,一个业务线,或者是一个组织,用来管理数据应用,包括人员,工程,或者组件,每个工作空间会有工作空间管理员的角色权限,可以对工作空间的进行管理,控制工作空间的组件和人员权限。
22 | 
23 |
24 | ## 工程管理
25 | 在进入相应的工作空间后,会跳转到工程首页,在工程首页,可以创建工程,在实际的开发生产中,工程往往被用来管理开发一类数据应用,包括工作流,单任务等,每个工作空间下的工程互相隔离。在实际应用中,为每一个数据应用划分一个工程,是较为理想的方式。
26 | 
--------------------------------------------------------------------------------
/zh_CN/用户手册/调度中心使用文档.md:
--------------------------------------------------------------------------------
1 | # 调度中心使用文档
2 |
3 | 调度中心是可视化的工作流调度平台,运维大屏功能可实时可视化监控并统计工作流任务的运行状态,调度功能支持对工作流进行运行。
4 |
5 | 定时管理、上线和下线等操作,还支持对工作流进行管理,执行例如重跑、恢复、终止、暂停等操作。
6 |
7 | 日志功能支持对工作流的日志信息进行查看和下载等操作。
8 |
9 | **请注意:如您想使用调度中心,请先安装 DolphinSchedulerAppConn。** [如何安装 DolphinSchedulerAppConn](../安装部署/DolphinScheduler插件安装文档.md)
10 |
11 | ## 页面概述
12 |
13 | 调度中心页面如下所示。
14 |
15 | 
16 |
17 | 运维中心首页各区域的功能如下:
18 |
19 | - ①项目与工作流列表:页面左侧列表显示工空间下所有项目及工作流,可以在此进行项目切换;
20 | - ②功能模块列表:此区域以项目为单位展示项目下的运维大屏、工作流定义、工作流实例和任务实例;
21 | - ③操作区域:在此区域中执行对具体功能的操作;
22 |
23 | ## 运维大屏
24 |
25 | 运维大屏用以展示整个项目下不同时间段内所有工作流的统计图表监控信息,分别展示了流程状态统计信息、周期实例完成情况、工作流实例与成功率统计和流程定义统计。
26 |
27 | 操作步骤如下:
28 |
29 | 1. 登录平台,进入运维中心,选择 “运维大屏”
30 | 2. 查看当前选中项目下的工作流统计信息,不同信息的含义和操作如下:
31 |
32 | #### 2.1 流程状态统计
33 |
34 | 统计选中项目下所有工作流实例的运行情况,并以饼图形式展示。
35 |
36 | 
37 |
38 | 用户可点击**日期图标**选择统计时间段
39 |
40 | 
41 |
42 | #### 2.2 周期实例完成情况
43 |
44 | 统计选中项目下周期实例的任务执行情况,其中横轴为时间,纵轴为周期实例数量。用户可在右上角点击【运行成功】或【运行失败】来切换需要查看的周期实例。
45 |
46 | 
47 |
48 | #### 2.3 工作流实例与成功率统计
49 |
50 | 统计选中项目下的不同时间段的执行工作流实例数量和成功率信息,其中横轴为时间,纵轴为实例数量和成功率。用户可在右上角点击【今日】和【昨日】以切换要查看的日期。
51 |
52 | 
53 |
54 | #### 2.4 流程耗时排名
55 |
56 | 展示相应时间段已有结束时间的任务实例的耗时的降序排行情况。
57 |
58 | 
59 |
60 | ## 工作流定义
61 |
62 | 工作流定义展示了当前选中项目下定义的所有工作流的信息,可以在操作栏对单个工作流进行运行、定时、上线/下线和定时管理等操作。
63 |
64 | 
65 |
66 | - 1. 如何运行当前选中的工作流?
67 |
68 | 选择需要运行的工作流所在行,点击操作栏的 **运行** 按钮。
69 |
70 | 
71 |
72 | - 2. 如何定时运行工作流?
73 |
74 | 选择需要运行的工作流所在行,点击操作栏的 **定时** 按钮。
75 |
76 | 在弹出框中设置参数并配置通知人等信息,填写完成后点击【创建】按钮即可设置定时任务。
77 |
78 | 
79 |
80 | - 3. 工作流上线/下线切换
81 |
82 | 选择未上线的工作流,点击其操作栏的 **上线** 和 **下线** 按钮,进行工作流上下线。
83 |
84 | - 4. 定时管理
85 |
86 | 工作流定时管理可以对工作流历史执行和正在执行的定时服务进行上线/下线切换、删除或编辑等操作。
87 |
88 | 选择需要进行定时管理的工作流,点击其操作栏的 **定时** 按钮,可进入该工作流的定时管理界面。如下图:
89 |
90 | 
91 |
92 | 点击 **编辑** 按钮可编辑工作流的定时服务。
93 |
94 | 
95 |
96 |
97 | ## 工作流实例
98 |
99 | 工作流实例页面以列表形式展示了工作流的状态、运行类型、开始时间、运行时长等信息,并可对工作流的运行状态进行操作。
100 |
101 | 
102 |
103 | - 1. 重跑工作流
104 |
105 | 选择需要重跑的工作流,点击其操作栏的 **重跑** 按钮即可。
106 |
107 | - 2. 恢复工作流
108 |
109 | 选择需要恢复的工作流,点击其操作栏的 **恢复** 按钮即可。
110 |
111 | - 3. 终止工作流
112 |
113 | 选择需要终止的工作流,点击其操作栏的 **终止** 按钮即可。
114 |
115 | - 4. 暂停工作流
116 |
117 | 选择需要暂停运行的工作流,点击其操作栏 **暂停** 按钮即可。
118 |
119 | - 5. 删除工作流
120 |
121 | 选择需要删除运行的工作流,点击其操作栏 **删除** 按钮即可。
122 |
123 | - 6. 查看甘特图
124 |
125 | 查看工作流的甘特图,甘特图由时间和工作流下各节点的运行时间组成。
126 |
127 | 选择需要查看甘特图的工作流,点击其操作栏 **查看甘特图** 按钮即可。
128 |
129 | ## 任务实例
130 |
131 | 任务实例页面以列表形式展示工作流中各个节点任务的基本信息和运行情况,并可查看任务实例的运行日志。
132 |
133 | 
134 |
135 | - 1. 查看日志
136 |
137 | 查看任务实例的运行日志,并支持对日志执行下载、刷新和全屏查看等操作。
138 |
139 | 
140 |
141 | 点击日志窗口的 **下载** 按钮,可下载日志到本地。
142 |
143 | 对于正在运行的任务实例,点击日志窗口的 **刷新** 按钮,可实时刷新日志。
144 |
145 | 点击日志窗口的 **全屏** 按钮,可全屏查看日志。
--------------------------------------------------------------------------------
/zh_CN/用户手册/超级管理员功能.md:
--------------------------------------------------------------------------------
1 | ## 工作空间概念
2 |
3 | 在数据开发与管理中,工作空间的概念类似于团队,一个公司(即主账号)可创建多个工作空间,工作空间作为管理任务、成员,分配角色和权限的基本单元,其内的成员可以在一个工作空间内进行协同开发,运维,分析等。
4 |
5 | 组织的主账户默认为该组织的工作空间管理员用户,负责管理该组织的工作空间管理控制台,规划工作空间架构,新建、删除工作空间等工作。
6 |
7 | 超级管理员可以在管理员首页点击【创建工作空间】,进行工作空间的创建操作。
8 |
9 | 
10 |
11 | #### 工作空间类型:
12 |
13 | 工作空间分为项目导向和部门导向两类:
14 |
15 | - 项目导向的工作空间参与成员是纵向架构,可以来自不同部门;
16 | - 部门导向的工作空间参与成员是横向架构,来自同一个部门。
17 |
18 | 在项目导向的工作空间中,可以添加任何部门的成员;
19 |
20 | 在部门导向的工作空间中,只可以添加本部门的成员。
21 |
22 | #### 工作空间管理:
23 |
24 | 工作空间创建者的默认角色是拥有者,可以委托一名或多名管理员对工作空间进行管理。
25 |
26 | 只有工作空间的管理员可以进入【工作空间管理】模块,对该工作空间的基本信息和权限信息进行相应管理。
--------------------------------------------------------------------------------
/zh_CN/设计文档/FlowExecution/images/flowexecution.drawio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/FlowExecution/images/flowexecution.drawio.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/FlowExecution/images/工作流执行uml.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/FlowExecution/images/工作流执行uml.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/Orchestrator/images/orchestrator_arch.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/Orchestrator/images/orchestrator_arch.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/Orchestrator/images/orchestrator_uml.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/Orchestrator/images/orchestrator_uml.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/Orchestrator/images/创建编排时序图.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/Orchestrator/images/创建编排时序图.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/Orchestrator/images/导入编排时序图.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/Orchestrator/images/导入编排时序图.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/UserGuide/DSS-UserGuide模块设计.md:
--------------------------------------------------------------------------------
1 | # DSS-UserGuide模块设计
2 |
3 | ### 引言
4 |
5 | DSS用户手册模块是DSS1.0新增的功能模块,用于给DSS的用户提供使用指引,里面录入了许多DSS使用过程中遇到的问题和解决方法,也包含了一些功能点使用说明。用户能够自助式的搜索遇到问题相关解决方案。后期也可以用来关联错误码,支持在弹出的错误码后,直接定位到知识库中已经录入的解决方案。guide模块是将文件以html的方式存放在表的字段中,需要解析md文件并转化为html, 由于某些文件存在链接需要跳转,需要另外搭建gitbook用于展示和管理这些文档,为了能够高效同步dss-guide-user模块,采取将gitLab上的文件打包,然后上传解压到gitbook所在服务器的指定目录下,通过guide-user定时扫描指定目录从而达到同步的目的。
6 |
7 | ## dss_guide主要模块介绍
8 |
9 | DSS_Guide模块主要包含了Restful、Service、Dao、Entity的定义。
10 |
11 | ### GuideGroupService
12 |
13 | 用来解决GuideGroup的增加、查询、修改、保存、删除等能力,还有具备同步SUMMARY.md的能力。guide模块可以通过解析此文件,然后根据解析出的各级目录在文件中的配置路径,定位到需要读取的文件并向数据库定时写入,从而完成同步,当服务在其他服务器上运行时,为了避免重复安装gitbook,guide模块需要通过配置文件所在服务器ip,然后会自动将文件同步到guide模块所在服务器进行展示。
14 |
15 | ### GuideContentService
16 |
17 | 用来处理GuideContent的保存、查询、更新、删除操作。
18 |
19 | ### GuideChapterService
20 |
21 | 用来专门处理手册章节的具体内容,包含章节的搜索、基于ID的查询、删除、保存等。
22 |
23 | ### GuideCatalogService
24 |
25 | 用来实现知识库的同步、支持批量插入目录内容,并实现对目录结构分类的保存、删除、查询等操作
26 |
27 |
28 | ### 核心流程图
29 |
30 | 
31 |
32 |
33 | ### 数据结构
34 |
35 | 
36 |
37 | ### dss_guide_group
38 |
39 | 划分dss_guide 的分组,包含group_id、path(访问路径)、title等
40 |
41 | ### dss_guide_chapter
42 |
43 | 用来存储dss_guide章节的详细内容,包含catalog_id、title、content、content_html。和dss_guide_catalog的内容进行关联。
44 |
45 | ### dss_guide_content
46 |
47 | 用来存储分组后的说明内容,会规划到对应的group下面。包含title、type、content、content_html等
48 |
49 | ### dss_guide_catalog
50 |
51 | 用来对dss_guide进行内容分类,相当于知识库的目录结构,具有层级目录关系。
52 |
--------------------------------------------------------------------------------
/zh_CN/设计文档/UserGuide/images/1653309535303.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/UserGuide/images/1653309535303.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/UserGuide/images/数据结构.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/UserGuide/images/数据结构.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/UserGuide/images/核心流程图.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/UserGuide/images/核心流程图.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/Workspace/README.md:
--------------------------------------------------------------------------------
1 | 介绍
2 | -------------------------
3 | Workspace:工作空间模块,属于project服务,提供工作空间所有功能的接口实现。主要包含这些接口:获取字典表信息接口、工作空间菜单、组件权限的crud操作接口,
4 | 工作空间角色、用户的crud操作接口。工作空间左上角菜单栏组件收藏接口。
5 |
6 | 用户使用功能点:
7 |
8 | | 一级模块 | 二级模块 | 用户功能 |
9 | |-------------|----------- |---------------- |
10 | | 工作空间 | 工作空间管理(仅工作空间管理员) | 查询、添加、编辑、删除工作空间用户 |
11 | | | | 工作空间角色的菜单组件的权限设置 |
12 | | | | 工作空间角色的首页设置 |
13 | | | 首页左上角菜单栏 | 收藏组件至左侧列表 |
14 | | | | 订阅已收藏组件,会展示在上方导航栏 |
15 | | | | 点击组件跳转至对应组件页面 |
16 |
17 | ### Restful/Service类功能介绍:
18 |
19 | | 核心接口/类 | 核心功能 |
20 | |---------------------------|------------------------------|
21 | | DSSDictionaryRestful、DSSDictionaryServiceImpl | 提供字典信息获取接口,通过参数key或parentKey从dictionary表查询对应记录 |
22 | | DSSWorkspacePrivRestful、DSSWorkspacePrivServiceImpl | 提供对工作空间角色的菜单组件权限信息的查看、编辑功能 |
23 | | DSSWorkspaceRestful、DSSWorkspaceServiceImpl | 提供工作空间基本功能接口,如创建工作空间、获取工作空间列表、获取菜单组件权限信息等 |
24 | | DSSWorkspaceRoleRestful、DSSWorkspaceRoleServiceImpl | 提供工作空间角色的查询、创建接口 |
25 | | DSSWorkspaceUserRestful、DSSWorkspaceUserServiceImpl | 提供工作空间用户的增删改查接口 |
26 |
27 | ### 用户功能UML类图:
28 | 
29 |
30 | ## 数据库表设计
31 |
32 | 工作空间基本信息表:
33 | ```roomsql
34 | CREATE TABLE `dss_workspace` (
35 | `id` bigint(20) NOT NULL AUTO_INCREMENT,
36 | `name` varchar(255) DEFAULT NULL COMMENT '工作空间名',
37 | `label` varchar(255) DEFAULT NULL COMMENT '标签',
38 | `description` varchar(255) DEFAULT NULL COMMENT '描述',
39 | `create_by` varchar(255) DEFAULT NULL COMMENT '创建者',
40 | `create_time` datetime DEFAULT NULL COMMENT '创建时间',
41 | `department` varchar(255) DEFAULT NULL COMMENT '部门id',
42 | `product` varchar(255) DEFAULT NULL COMMENT '产品,预留字段',
43 | `source` varchar(255) DEFAULT NULL COMMENT '预留字段',
44 | `last_update_time` datetime DEFAULT NULL,
45 | `last_update_user` varchar(30) DEFAULT NULL COMMENT '最新修改用户',
46 | `workspace_type` varchar(20) DEFAULT NULL comment '工作空间类型',
47 | PRIMARY KEY (`id`),
48 | UNIQUE KEY `name` (`name`)
49 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
50 | ```
51 | 工作空间字典表:
52 | ```roomsql
53 | CREATE TABLE `dss_workspace_dictionary` (
54 | `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键ID',
55 | `workspace_id` int(11) DEFAULT '0' COMMENT '空间ID,默认为0,所有空间都有',
56 | `parent_key` varchar(200) DEFAULT '0' COMMENT '父key',
57 | `dic_name` varchar(200) NOT NULL COMMENT '名称',
58 | `dic_name_en` varchar(300) DEFAULT NULL COMMENT '名称(英文)',
59 | `dic_key` varchar(200) NOT NULL COMMENT 'key 相当于编码,空间是w_开头,工程是p_',
60 | `dic_value` varchar(500) DEFAULT NULL COMMENT 'key对应的值',
61 | `dic_value_en` varchar(1000) DEFAULT NULL COMMENT 'key对应的值(英文)',
62 | `title` varchar(200) DEFAULT NULL COMMENT '标题',
63 | `title_en` varchar(400) DEFAULT NULL COMMENT '标题(英文)',
64 | `url` varchar(200) DEFAULT NULL COMMENT 'url',
65 | `url_type` int(1) DEFAULT '0' COMMENT 'url类型: 0-内部系统,1-外部系统;默认是内部',
66 | `icon` varchar(200) DEFAULT NULL COMMENT '图标',
67 | `order_num` int(2) DEFAULT '1' COMMENT '序号',
68 | `remark` varchar(1000) DEFAULT NULL COMMENT '备注',
69 | `create_user` varchar(100) DEFAULT NULL COMMENT '创建人',
70 | `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
71 | `update_user` varchar(100) DEFAULT NULL COMMENT '更新人',
72 | `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
73 | PRIMARY KEY (`id`),
74 | UNIQUE KEY `idx_unique_workspace_id` (`workspace_id`,`dic_key`),
75 | KEY `idx_parent_key` (`parent_key`),
76 | KEY `idx_dic_key` (`dic_key`)
77 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='数据字典表';
78 | ```
79 | 组件菜单信息表
80 | ```roomsql
81 | CREATE TABLE `dss_workspace_menu` (
82 | `id` int(11) NOT NULL AUTO_INCREMENT,
83 | `name` varchar(64) DEFAULT NULL COMMENT '菜单名',
84 | `title_en` varchar(64) DEFAULT NULL COMMENT '菜单英文标题',
85 | `title_cn` varchar(64) DEFAULT NULL COMMENT '菜单中文标题',
86 | `description` varchar(255) DEFAULT NULL COMMENT '描述',
87 | `is_active` tinyint(1) DEFAULT '1' COMMENT '是否启用',
88 | `icon` varchar(255) DEFAULT NULL COMMENT '图标',
89 | `order` int(2) DEFAULT NULL COMMENT '顺序',
90 | `create_by` varchar(255) DEFAULT NULL COMMENT '创建者',
91 | `create_time` datetime DEFAULT NULL COMMENT '创建时间',
92 | `last_update_time` datetime DEFAULT NULL,
93 | `last_update_user` varchar(30) DEFAULT NULL,
94 | PRIMARY KEY (`id`)
95 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
96 | ```
--------------------------------------------------------------------------------
/zh_CN/设计文档/Workspace/images/workspace_uml.drawio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/Workspace/images/workspace_uml.drawio.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/appconn/DSS调度系统接入.md:
--------------------------------------------------------------------------------
1 | # DSS接入调度系统
2 |
3 | ## 背景
4 |
5 | 目前应用于大数据领域的批量定时调度系统有很多,如Azkaban、Dophinscheduler、Airflow等,DataSphereStudio(DSS)支持将用户设计好的工作流,发布到不同的调度系统进行调度,当前默认支持了发布到Azkaban。用户在DSS中完成了工作流的DAG设计,包含工作流资源文件加载、工作流参数设置,数据导入、数据质量检查、节点代码编写,可视化报表设计、邮件输出、节点资源文件上传和运行参数设置等操作后,可在DSS中调试执行,验证所有节点的可执行正确后,发布到调度系统 ,由调度系统根据定时任务的配置,定时调度执行。
6 |
7 | ### 发布方式
8 |
9 | DSS集成新的调度系统需要使用AppConn的方式接入,用户需要根据不同调度系统定义对应的XXXSchedulerAppConn, 在SchedulerAppConn中定义了转换集成规范和结构化集成规范。转换集成规范包含DSS工程级别内容和DSS工作流级别内容到第三方调度系统的转换。DSS同调度系统的接入可以分为以下两种:
10 |
11 | 1、工程级别发布
12 |
13 | 是指将工程内的所有工作流进行转换,并把转换后的内容统一打包上传给调度系统。主要有ProjectPreConversionRel接口,定义了工程内需要转换的工作流。
14 |
15 | 2、工作流级别发布
16 |
17 | 是指按照工作流的粒度进行转换,只打包工作流的内容上传给调度系统。当前DSS的工作流定义都是以Json的方式存储在BML文件中,工作流的元数据信息则存储在数据库中。
18 |
19 |
20 | ## 主要步骤
21 |
22 | ### Parser
23 |
24 | JsonToFlowParser 用于将工作流的Json转换成Workflow,Workflow是DSS中操作工作流的标准格式,包含了工作流的节点信息,工作流的边信息、父工作流、子工作流、工作流资源文件、工作流属性文件、工作流创建时间、更新时间、工作流用户、工作流代理用户、工作流的元数据信息如名称、ID、描述、类型、是否根工作流等。这些都是根据Json的内容进行解析,转成DSS可操作的Workflow对象,如AzkabanWorkflow、DolphinSchedulerWorkflow。
25 |
26 | ### Converter
27 |
28 | 把DSS的Workflow转成接入调度系统可以识别的工作流, 每个调度系统对于工作流都有自己的定义。如果把DSS工作流的节点转成Azkaban的job格式文件,或者转成DolphinScheduler的task, 也可以反过来转换,将调度系统的工作流转成DSS可以加载和展示的工作流,把工作流的依赖关系,节点的连线转成对应调度系统的依赖。还可在Converter中检查该项目下的工作流节点是否存在重名的节点,如在Azkaban的调度系统中是不允许使用重名节点的。
29 |
30 | WorkflowConVerter 定义工作流转换输出目录结构,包括工作流的存储目录、工作流资源文件存储、子工作流存储目录建立等。如Azkaban在工程级别的转换操作中还包括建立项目转换的目录,并根据工程内的工作流情况建立工作流的转换目录。在convertToRel中实现把Workflow转成dolphinSchedulerWorkflow或者SchedulisWorkFlow
31 |
32 | NodeConverter 定义节点转换输出内容:如Azkaban的ConvertNode,会把工作流的节点内容转成对应的Job文件内容。包括转换节点的名称、类型、依赖关系、节点的执行命令(依赖linkis-jobtype解析)、节点的配置参数、节点的标签等内容。最终按照Job文件定义的格式进行存储。DolphinScheduler的Converter将DSS中节点转为 DolphinScheduler 中 task,并构建Shell类型Task的执行脚本,将DSS的节点内容转成自定义的dss-dolphinscheduler-client.sh脚本执行所需要的参数。
33 |
34 | ```--java
35 | addLine.accept("LINKIS_TYPE", dssNode.getNodeType()); //工作流节点类型
36 | addLine.accept("PROXY_USER", dssNode.getUserProxy()); //代理用户
37 | addObjectLine.accept("JOB_COMMAND", dssNode.getJobContent()); //执行命令
38 | addObjectLine.accept("JOB_PARAMS", dssNode.getParams()); //节点执行参数
39 | addObjectLine.accept("JOB_RESOURCES", dssNode.getResources()); //节点执行资源文件
40 | addObjectLine.accept("JOB_SOURCE", sourceMap); //节点的source信息
41 | addLine.accept("CONTEXT_ID", workflow.getContextID()); //上下文ID
42 | addLine.accept("LINKIS_GATEWAY_URL", Configuration.getGateWayURL()); //linkis的gateway地址
43 | addLine.accept("RUN_DATE", "${system.biz.date}"); //运行日期变量
44 | ```
45 |
46 | ### Tunning
47 |
48 | 用于完成工程发布前的整体调整操作,在Azkaban的实现 中主要完成了工程的路径设置和工作流的存储路径设置。因为这个时候是可以操作工程=》工作流=》子工作流,便于进行从外到里的设置操作。比如工作流的存储依赖于工程的存储位置,子工作流存储依赖于父工作流的位置。在FlowTuning中完成了子节点计算,自动添加末尾节点。
49 |
50 | ## 调度AppConn实现
51 |
52 | ### AbstractSchedulerAppConn
53 |
54 | 调度AppConn的抽象类,新的调度系统AppConn接入可以直接继承该抽象类,它实现了SchedulerAppConn接口,并继承了AbstractOnlySSOAppConn,打通DSS与调度系统的SSO登录。比如已经集成的DolphinSchedulerAppConn和SchedulisAppConn都是继承了该抽象类。
55 |
56 | 该抽象类中包含了两种类型的Standard
57 |
58 | 第一个是ConversionIntegrationStandard,用于支持将DSS编排转换为调度系统的工作流
59 |
60 | 第二个是SchedulerStructureIntegrationStandard,用于DSS和调度系统的结构化集成规范
61 |
62 | ### ConversionIntegrationStandard
63 |
64 | 用于调度系统的转换集成规范,包含用于将DSS编排转成调度系统工作流的DSSToRelConversionService。也预留了接口支持把调度系统的工作流转成DSS的编排
65 |
66 | ### AbstractSchedulerStructureIntegrationStandard
67 |
68 | 调度系统组织结构集成规范,专门用于调度系统的组织结构管理,主要包含工程服务和编排服务。
69 |
70 | ### ProjectService
71 |
72 | * 实现了工程的统一创建、更新、删除和查重操作。
73 | * 用于打通 DSS 工程与接入的第三方应用工具的工程体系,实现工程的协同管理。
74 | * 如调度系统需要同DSS打通工程体系就需要在结构化集成规范中实现工程服务的所有接口。
75 |
76 | ### OrchestrationService
77 |
78 | 编排服务用于调度系统统一编排规范,具有如下作用:
79 |
80 | * 统一编排规范,专门用于打通 DSS 与 SchedulerAppConn(调度系统)的编排体系。
81 | * 例如:打通 DSS 工作流 与 Schedulis 工作流。
82 | * 请注意,如果对接的 SchedulerAppConn 系统本身不支持管理工作流,则无需实现该接口。
83 |
--------------------------------------------------------------------------------
/zh_CN/设计文档/appconn/appconn.md:
--------------------------------------------------------------------------------
1 | DSS-AppConn设计文档
2 | ------
3 | ## 简介
4 | 原始的AppJoint的原理,定义一个顶层接口AppJoint,第三方通过实现该接口,并把自身的连接信息存在DSS表中,在DSS中实现一个与第三方系统通信的一个"代理服务",在初始化初期,通过反射的机制创建该服务的实例,并利用表中的连接信息,可以实现DSS利用该"代理服务"与第三方系统建立HTTP通信,从而调用第三方系统。但在AppJoint的设计上,存在不足,每个接入的应用实例,都需要生成一个AppJoint实例,同一个应用的不同实例并没有逻辑上关联起来,每个系统的应用实例AppConn是DSS1.0的顶层接口,在DSS1.0中,自身的编排模式、工作流,单任务节点等,都是一个AppConn的实例,除此之外,接入DSS的第三方系统,需要实现AppConn接口,来实现DSS与第三方系统的融合,从而实现调用第三方应用,逻辑上,AppConn比AppJoint的抽象逻辑更高,AppConn类似于一类实例,而AppJoint类似于一个实例。
5 |
6 | ### 相关模块介绍
7 | |一级模块| 二级模块|功能介绍|
8 | |-------------|-----------|----------------|
9 | |dss-appconn|appconns|接入DSS实现AppConn相关规范实现代码|
10 | | |dss-appconn-core|appconn接口及基础类定义|
11 | | |dss-appconn-loader|接入的应用AppConn编译包的实例化加载组装|
12 | | |dss-appconn-manager|与framework模块交互,管理相关AppConn实例信息|
13 | | |dss-scheduler-appconn|对调度系统实现的抽象AppConn定义|
14 | | |linkis-appconn-engineplugin|实现linkis appconn的相关规范,打通DSS AppConn和Linkis的交互|
15 |
16 |
17 |
18 | | 核心接口/类 | 核心功能 |
19 | |---------------------------|------------------------------|
20 | | DSSDictionaryRestful、DSSDictionaryServiceImpl | 提供字典信息获取接口,通过参数key或parentKey从dictionary表查询对应记录 |
21 | | DSSWorkspacePrivRestful、DSSWorkspacePrivServiceImpl | 提供对工作空间角色的菜单组件权限信息的查看、编辑功能 |
22 | | DSSWorkspaceRestful、DSSWorkspaceServiceImpl | 提供工作空间基本功能接口,如创建工作空间、获取工作空间列表、获取菜单组件权限信息等 |
23 | | DSSWorkspaceRoleRestful、DSSWorkspaceRoleServiceImpl | 提供工作空间角色的查询、创建接口 |
24 | | DSSWorkspaceUserRestful、DSSWorkspaceUserServiceImpl | 提供工作空间用户的增删改查接口 |
25 |
26 | ### AppConn架构图
27 | 
28 | 
29 | 
30 |
--------------------------------------------------------------------------------
/zh_CN/设计文档/appconn/images/appconn_class_uml.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/appconn/images/appconn_class_uml.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/appconn/images/appconn_load_process.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/appconn/images/appconn_load_process.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/appconn/images/appconn_structure.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/appconn/images/appconn_structure.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/labelRoute/DSS标签路由转发.md:
--------------------------------------------------------------------------------
1 | # DSS基于标签的路由转发
2 |
3 | ## 引言
4 |
5 | 标签是DSS1.0新引入的概念,在DSS框架中有很多地方使用了标签,作为转发路由的依据。比如在DSS的多环境支持中,每个请求都是带有环境标签的,如DEV、PROD等。这些可以用来确定当前请求是发送给哪个服务,或者当前应该使用哪一个AppConn Instance。
6 |
7 | ## 标签的格式
8 |
9 | route类型标签
10 |
11 | ```
12 | labels={"route":"dev"}
13 | ```
14 |
15 | Get类型请求
16 |
17 | ```
18 | dssurl?labels=dev
19 | ```
20 |
21 | 上面两种是比较常见的DSS中用于Label的格式
22 |
23 | ## 基于标签的路由转发
24 |
25 | 1、服务的转发
26 |
27 | 服务的转发是指将HTTP请求基于标签的匹配转发给对应的服务实例,当前主要由DSS-GateWay来实现,通过定义DSS-GateWay插件,部署到Linkis-GateWay的lib目录下,就可以实现将DSS的请求转发给DSS的服务实例。
28 |
29 | * 直接从请求中获取标签,当前的DSS请求中都包含了类似DEV或者PROD的环境标签,GET类型的请求直接在URl的参数中,POST类型的请求则放在请求的body中,以json的格式进行提交。DSS-GateWay在拦截到发送给DSS的请求后,就会解析请求中带的标签和服务名称。然后根据服务名称和标签,去注册中心如Euraka等查找对应的服务实例,按照匹配规则,当完全匹配服务名时优先发送给该服务实例,有多个同级别的则随机选中一个进行发送。当不能完全匹配时,则发送给匹配相近度最高的服务实例,当完全没有匹配时,则发送给默认的DSS服务实例project。
30 |
31 | ```
32 | object DSSGatewayParser {
33 | //DSS的URL提取正则匹配规则,可以参考DSS的前端请求URL进行分析
34 | val DSS_HEADER = normalPath(API_URL_PREFIX) + "rest_[a-zA-Z][a-zA-Z_0-9]*/(v\\d+)/dss/"
35 | val DSS_URL_REGEX = (DSS_HEADER + "([^/]+)/" + "([^/]+)/.+").r
36 | val DSS_URL_DEFAULT_REGEX = (DSS_HEADER + "([^/]+).+").r
37 |
38 | //该方法主要用于AppConn的应用,如Visualis的url提取
39 | val APPCONN_HEADER = normalPath(API_URL_PREFIX) + "rest_[a-zA-Z][a-zA-Z_0-9]*/(v\\d+)/([^/]+)/"
40 | val APPCONN_URL_DEFAULT_REGEX = (APPCONN_HEADER + "([^/]+).+").r
41 |
42 | }
43 | ```
44 |
45 | * RouteLabelParser 在应用中引入Linkis的Label相关的依赖,然后在应用的yaml中定义服务的标签名称,那么在服务启动后,就会带上标签的元数据信息向注册中心注册,这是每个服务在注册中心都是有标签的,也可能存在多个标签的情况。然后在Linkis的GateWay中实现RouteLabelParser,并在GateWay拦截到请求后,在parse中解析出相关的route类型标签列表,再由Linkis-gateway基于标签的转发规则进行请求转发。这个是另外一种基于标签的转发,要求使用Linkis中标签模块。
46 |
47 | ```
48 |
49 | org.apache.linkis
50 | linkis-instance-label-client
51 | ${linkis.version}
52 |
53 | ```
54 |
55 | 2、AppConn Instance转发
56 |
57 | 在DSS1.0中使用了AppConn的概念,并替换了原来的AppJoint,以AppConn的方式集成第三方系统。且AppConn最大特点就是能支持多应用实例,每个应用实例都有自己的标签。在数据库中新增一张表dss_appconn_instance的表:
58 |
59 | ```
60 | CREATE TABLE `dss_appconn_instance` (
61 | `id` int(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
62 | `appconn_id` int(20) NOT NULL COMMENT 'appconn的主键',
63 | `label` varchar(128) NOT NULL COMMENT '实例的标签',
64 | `url` varchar(128) DEFAULT NULL COMMENT '访问第三方的url',
65 | `enhance_json` varchar(1024) DEFAULT NULL COMMENT 'json格式的配置',
66 | `homepage_uri` varchar(255) DEFAULT NULL COMMENT '主页uri',
67 | PRIMARY KEY (`id`)
68 | ) ENGINE=InnoDB AUTO_INCREMENT=66 DEFAULT CHARSET=utf8mb4 COMMENT='dss instance的实例表';
69 | ```
70 |
71 | 在每个AppConn进行部署的时候,就会在DSS中注册它的的AppConn的Instance信息。并使用上面的数据库表记录该实例对应的标签、访问地址、主页地址、配置信息等。
72 |
73 | 当每个appConn实例有了标签以后,在DSS的框架中就会在每次和AppConn进行接口交互时,都会根据当前请求的标签信息,查找对应的操作实例对象,再向该对象发送具体的请求。
74 |
75 | ```
76 | //从执行参数中获取标签内容
77 | val labels = engineExecutorContext.getProperties.get("labels").toString
78 | //根据标签内容和加载的AppConn,获取对应的appconn实例
79 | getAppInstanceByLabels(labels, appConn) match {
80 | case Some(appInstance) =>
81 | val developmentIntegrationStandard = appConn.asInstanceOf[OnlyDevelopmentAppConn].getOrCreateDevelopmentStandard
82 | //根据实例信息,从开发规范中获取对应的执行服务
83 | val refExecutionService = developmentIntegrationStandard.getRefExecutionService(appInstance)
84 | ```
85 |
86 | DSS中标签请使用EnvDSSLabel,包含标签的key和Value。继承自linkis中标签体系GenericLabel。
87 |
88 | ```
89 | //构建一个DSS的标签对象,默认的key为:DSSEnv
90 | DSSLabel envDSSLabel = new EnvDSSLabel(rollbackOrchestratorRequest.getLabels().getRoute());
91 | ```
92 |
93 |
--------------------------------------------------------------------------------
/zh_CN/设计文档/project/DSS工程模块设计文档.md:
--------------------------------------------------------------------------------
1 | # DSS 工程设计
2 |
3 |
4 |
5 | 在实际的开发生产中,工程往往被用来管理开发一类数据应用,工程可以是一个实际的一类数据应用,包括工作流,单任务等,每个工作空间下的工程互相隔离。
6 |
7 |
8 |
9 | ## 1、架构设计
10 |
11 | - DSS本身可以创建和管理工程。包括创建、查看、修改、删除等功能。同时DSS提供工程集成规范完成与外部组件的对接操作。
12 | - DSS的工程通过工程集成规范,与外部系统的工程(或同级别的实体)进行同步创建和互相绑定。
13 | - 外部系统通过工程集成规范,获取用户在DSS对应的工程,完成对底层实体的统一管理。
14 | - 外部系统通过工程集成规范,获取用户在DSS拥有的工程权限,进一步限制原生工程的操作权限。
15 |
16 | 
17 |
18 |
19 |
20 | ### 2.1.1、工程创建
21 |
22 | 简要流程说明:创建DSS工程 》创建第三方应用工程 》保存工程与用户权限关系
23 |
24 | 流程图:
25 |
26 | 
27 |
28 | ### 2.1.2、工程编辑
29 |
30 | 简要流程说明: 编辑用户权限关系 》编辑第三方工程信息 》编辑DSS工程基本信息
31 |
32 |
33 |
34 | 流程图:
35 |
36 | 
37 |
38 | ### 2.1.3、工程删除
39 |
40 | 简要流程说明:判断是否有删除权限 》删除第三方应用工程 》删除DSS工程
41 |
42 | ###
43 |
44 | ## 3、数据库表结构设计
45 |
46 | ```
47 | --dss工程基础信息表
48 | dss_project:
49 | id
50 | name
51 | source
52 | description
53 | user_id
54 | username
55 | workspace_id
56 | create_time
57 | create_by
58 | update_time
59 | update_by
60 | org_id
61 | visibility
62 | is_transfer
63 | initial_org_id
64 | isArchive
65 | pic
66 | star_num
67 | product
68 | application_area
69 | business
70 | is_personal
71 | create_by_str
72 | update_by_str
73 | dev_process
74 | orchestrator_mod
75 | visible
76 |
77 | --dss工程与用户权限关系表
78 | dss_project_user:
79 | id
80 | project_id
81 | username
82 | workspace_id
83 | priv
84 | last_update_time
85 |
86 | --第三方应用工程与dss工程关系表
87 | dss_appconn_project_relation:
88 | id
89 | project_id
90 | appconn_instance_id
91 | appconn_instance_project_id
92 |
93 | ```
94 |
95 |
96 |
97 |
--------------------------------------------------------------------------------
/zh_CN/设计文档/project/images/project-create.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/project/images/project-create.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/project/images/project-edit.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/project/images/project-edit.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/project/images/project.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/project/images/project.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/publish/images/workflow-publish.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/publish/images/workflow-publish.png
--------------------------------------------------------------------------------
/zh_CN/设计文档/publish/工作流发布设计文档.md:
--------------------------------------------------------------------------------
1 | ## 工作流发布设计
2 |
3 | ## 一、概述
4 |
5 | 在实际的生产应用中,开发中心负责对业务相关的工作流做调试,当调试完成之后,会把工作流发布到调度系统做定时批量调度作业,实现业务自动化。
6 |
7 | ### 二、工作流发布架构设计
8 |
9 | 工作流的发布是一个相对复杂的过程,其中涉及到工作流的导入、导出以及发布等功能。
10 |
11 | ##### 1、发布流程调用链路图:
12 |
13 | 
14 |
15 | ##### 2、重要步骤说明
16 |
17 | - 导出:从开发中心(dev)导出,包括工作流和第三方节点导出。通过bml服务将生成的工作流json文件压缩成zip包上传到bml文件服务中心。
18 | - 导入:导入到生产中心(prod)。下载保存在bml文件服务中心的的zip文件,并对json文件进行解析,获取工作流编排信息,并保存到数据库。
19 | - 发布:将导入获取到的dss工作流编排信息转换成调度系统可用的工作流编排信息,压缩成zip包,发布到wtss调度系统。
20 |
--------------------------------------------------------------------------------
/zh_CN/设计文档/workflow/DSS工作流架构设计.md:
--------------------------------------------------------------------------------
1 | ## DSS工作流(workflow)架构设计
2 |
3 | 工作流(workflow):指“业务过程的部分或整体在计算机应用环境下的自动化”。是对工作流程及其各操作步骤之间业务规则的抽象、概括描述。通过使用工作流可以给数据开发工作带来极大的便利。
4 |
5 | ### 一、工作流架构
6 |
7 | 
8 |
9 |
10 |
11 | ### 二、二级模块介绍
12 |
13 | ##### dss-workflow-server
14 |
15 | 工作流核心模块,包括工作流CRUD功能,工作流发布功能,工作流appconn节点的CRUD功能,工作流cs服务功能,以及PPC对外提供服务功能。
16 |
17 | | 核心service | 核心功能 |
18 | | ------------------- | -------- |
19 | | DSSFlowService | 包含了工作流及子工作流CRUD,以及工作流版本管理等方法 |
20 | | WorkflowNodeService | 包含工作流节点CRUD、copy、import、export等方法 |
21 | | PublishService | 提供工作流发布相关功能 |
22 | | DSSWorkflowReceiver | PRC任务调用接收器 |
23 | | DSSWorkflowChooser | PRC任务调用选择器 |
24 |
25 | ##### dss-flow-execution-server
26 |
27 | 工作流执行模块,包含工作流执行的相关功能,包括工作流的实时执行、选中执行、失败重跑、工作流kill等功能。
28 |
29 | ##### dss-workflow-sdk
30 |
31 | 工作流工具包模块,对外提供工作流资源文件解析功能。
32 |
33 | | 核心类 | 核心功能 |
34 | | ---------------- | -------------------------------------------- |
35 | | FlowToJsonParser | 用于将一个flow解析为DSS可以使用的DSSJsonFlow |
36 | | json2flow | 用于将工作流json解析为需要的工作流对象 |
37 |
38 | ##### dss-workflow-common
39 |
40 | 工作流基础公共模块,将公共实体类抽象出来,保存在此模块当中。
41 |
42 | ##### dss-linkis-node-execution
43 |
44 | dss调用linkis执行节点模块,实现了linkis提供的任务执行相关接口。
45 |
46 | | 核心类 | 核心功能 |
47 | | ----------------------- | ------------------------------------------------------------ |
48 | | LinkisNodeExecution | 包含任务运行、任务状态、任务结果、任务取消、任务日志等核心方法 |
49 | | LinkisExecutionListener | 监听任务执行情况 |
50 | | DefaultRetryHandler | 提供重试机制 |
51 |
52 | ##### dss-workflow-conversion-standard
53 |
54 | 工作流转换规范模块,定义了将DSS的工作流转换为其他外部应用可以使用的工作流的规范。
55 |
56 | | 核心类 | 核心功能 |
57 | | ------------------------------------- | -------------- |
58 | | ProjectConversionIntegrationStandard | 工程转换规范 |
59 | | WorkflowConversionIntegrationStandard | 工作流转换规范 |
60 |
61 |
--------------------------------------------------------------------------------
/zh_CN/设计文档/workflow/images/workflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WeBankFinTech/DataSphereStudio-Doc/0b47d2b92d3a9a4294993cf4cc30a5f226ad8751/zh_CN/设计文档/workflow/images/workflow.png
--------------------------------------------------------------------------------