├── .gitignore
├── LICENSE
├── README.md
├── assembly
├── pom.xml
└── src
│ └── main
│ └── assembly
│ └── assembly.xml
├── bin
├── datalinked.sh
├── find-ides-home
├── ides-build.sh
├── ides-shell
├── ides-submit
├── load-ides-env
├── options-tool
├── package.sh
└── start-local-ides.sh
├── common
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── tech
│ │ │ └── ides
│ │ │ ├── conf
│ │ │ ├── ConfigBuilder.scala
│ │ │ ├── ConfigEntry.scala
│ │ │ ├── ConfigProvider.scala
│ │ │ ├── ConfigReader.scala
│ │ │ └── IdesConf.scala
│ │ │ ├── constants
│ │ │ ├── IdesEnvConstants.scala
│ │ │ └── ScriptConstants.scala
│ │ │ ├── core
│ │ │ ├── ApplicationSetting.scala
│ │ │ └── platform
│ │ │ │ └── Lifecycle.scala
│ │ │ ├── datasource
│ │ │ ├── BaseDataSource.scala
│ │ │ ├── DataSource.java
│ │ │ ├── DataSourceKey.scala
│ │ │ ├── DataTable.scala
│ │ │ ├── reader
│ │ │ │ └── DataReader.scala
│ │ │ └── writer
│ │ │ │ ├── DataWriter.scala
│ │ │ │ └── SaveMode.java
│ │ │ ├── doc
│ │ │ └── Document.scala
│ │ │ ├── exception
│ │ │ └── IdesException.scala
│ │ │ ├── extension
│ │ │ ├── ETPlugin.scala
│ │ │ ├── Extension.java
│ │ │ └── PluginType.scala
│ │ │ ├── package.scala
│ │ │ ├── rest
│ │ │ └── RestServer.scala
│ │ │ ├── runtime
│ │ │ └── SQLRuntime.scala
│ │ │ ├── script
│ │ │ └── ScriptTemplate.scala
│ │ │ ├── strategy
│ │ │ ├── PlatformFrameEnum.java
│ │ │ └── PlatformFrameImpl.java
│ │ │ └── utils
│ │ │ ├── PlatformUtils.scala
│ │ │ └── ScriptUtils.scala
│ └── resources
│ │ └── ides-version-info.properties
│ └── test
│ └── java
│ └── org
│ └── apache
│ └── spark
│ ├── IdesConfSuite.scala
│ └── ScriptUtilTest.scala
├── conf
├── application.yml
├── log4j.properties
└── log4j.properties.template
├── core
├── pom.xml
└── src
│ ├── main
│ └── java
│ │ ├── org
│ │ └── apache
│ │ │ ├── hadoop
│ │ │ └── hdfs
│ │ │ │ └── HdfsOperator.scala
│ │ │ └── spark
│ │ │ └── sql
│ │ │ ├── DataFrameUtils.scala
│ │ │ └── jdbc
│ │ │ ├── JdbcUpsert.scala
│ │ │ ├── JdbcUtil.scala
│ │ │ └── mysql
│ │ │ └── MySQLUpsert.scala
│ │ └── tech
│ │ └── ides
│ │ ├── cli
│ │ └── OptionsProcessor.java
│ │ ├── core
│ │ ├── IdesApp.scala
│ │ ├── ScriptQueryExecute.scala
│ │ ├── ScriptStage.scala
│ │ └── platform
│ │ │ └── PlatformManager.scala
│ │ ├── datasource
│ │ ├── BaseFileDataSource.scala
│ │ └── DataSourceFactory.scala
│ │ ├── dsl
│ │ ├── CaseChangeCharStream.java
│ │ ├── SyntaxErrorListener.java
│ │ ├── adaptor
│ │ │ ├── CommandAdaptor.scala
│ │ │ ├── ConnectAdaptor.scala
│ │ │ ├── DropAdaptor.scala
│ │ │ ├── LoadAdaptor.scala
│ │ │ ├── RunAdaptor.scala
│ │ │ ├── SaveAdaptor.scala
│ │ │ ├── ScriptDslAdaptor.scala
│ │ │ ├── SelectAdaptor.scala
│ │ │ └── SetAdaptor.scala
│ │ ├── listener
│ │ │ ├── PreProcessListener.scala
│ │ │ └── ScriptQueryExecListener.scala
│ │ ├── statement
│ │ │ └── SqlStatement.scala
│ │ └── utils
│ │ │ └── DslUtil.scala
│ │ ├── extension
│ │ └── ETPluginRegister.scala
│ │ ├── job
│ │ ├── ScriptJobInfo.scala
│ │ └── ScriptJobManager.scala
│ │ ├── metastore
│ │ └── ConnectMetaStore.scala
│ │ ├── runtime
│ │ └── SparkRuntime.scala
│ │ ├── storage
│ │ ├── ExternalStorage.scala
│ │ └── impl
│ │ │ ├── ParquetExternalStorage.scala
│ │ │ └── YamlFileExternalStorage.scala
│ │ ├── strategy
│ │ └── PlatformStrategyCenter.scala
│ │ └── tool
│ │ └── QueryTool.scala
│ └── test
│ └── java
│ └── tech
│ └── ides
│ └── core
│ └── test
│ └── ListenerTest.java
├── docs
└── imgs
│ ├── ides.png
│ ├── introduce.jpg
│ └── slack_icon.png
├── dsl
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── ides
│ │ │ └── dsl
│ │ │ └── parser
│ │ │ ├── IdesLexer.interp
│ │ │ ├── IdesLexer.java
│ │ │ ├── IdesLexer.tokens
│ │ │ ├── IdesParser.interp
│ │ │ ├── IdesParser.java
│ │ │ ├── IdesParser.tokens
│ │ │ ├── IdesParserBaseListener.java
│ │ │ ├── IdesParserBaseVisitor.java
│ │ │ ├── IdesParserListener.java
│ │ │ └── IdesParserVisitor.java
│ └── resources
│ │ ├── IdesLexer.g4
│ │ └── IdesParser.g4
│ └── test
│ └── java
│ └── ides
│ └── dsl
│ └── parser
│ ├── ListenerTest.java
│ └── VisitorTest.java
├── engine
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ ├── org
│ │ │ └── apache
│ │ │ │ └── spark
│ │ │ │ └── sql
│ │ │ │ └── jdbc
│ │ │ │ └── DataFrameWriterExtensions.scala
│ │ └── tech
│ │ │ └── ides
│ │ │ ├── cli
│ │ │ └── DatalinkedCli.scala
│ │ │ ├── datasource
│ │ │ └── impl
│ │ │ │ ├── CSVDataSource.scala
│ │ │ │ ├── HiveDataSource.scala
│ │ │ │ ├── JDBCDataSource.scala
│ │ │ │ └── TextDataSource.scala
│ │ │ ├── ets
│ │ │ ├── GetPartitionNum.scala
│ │ │ └── TableRepartition.scala
│ │ │ └── local
│ │ │ └── LocalIdesServiceApp.scala
│ ├── resources-local
│ │ ├── application.yml
│ │ └── webapp
│ │ │ └── WEB-INF
│ │ │ ├── static
│ │ │ ├── css
│ │ │ │ ├── bootstrap-table.min.css
│ │ │ │ ├── bootstrap.min.css
│ │ │ │ ├── fontawesome-all.min.css
│ │ │ │ └── index.css
│ │ │ ├── image
│ │ │ │ ├── favicon-16x16.png
│ │ │ │ └── favicon-32x32.png
│ │ │ ├── js
│ │ │ │ ├── FileSaver.min.js
│ │ │ │ ├── ace.min.js
│ │ │ │ ├── bootstrap-table-export.js
│ │ │ │ ├── bootstrap-table-export.min.js
│ │ │ │ ├── bootstrap-table-zh-CN.min.js
│ │ │ │ ├── bootstrap-table.min.js
│ │ │ │ ├── bootstrap.min.js
│ │ │ │ ├── bootstrapQ.min.js
│ │ │ │ ├── ext-beautify.js
│ │ │ │ ├── ext-beautify.min.js
│ │ │ │ ├── ext-language_tools.min.js
│ │ │ │ ├── ext-searchbox.min.js
│ │ │ │ ├── fontawesome-all.min.js
│ │ │ │ ├── jquery-3.5.1.min.js
│ │ │ │ ├── mode-sql.js
│ │ │ │ ├── popper.min.js
│ │ │ │ ├── sql-app.js
│ │ │ │ ├── sql.min.js
│ │ │ │ ├── tableExport.min.js
│ │ │ │ ├── theme-twilight.js
│ │ │ │ ├── theme-twilight.min.js
│ │ │ │ └── xlsx.core.min.js
│ │ │ └── webfonts
│ │ │ │ ├── fa-brands-400.eot
│ │ │ │ ├── fa-brands-400.svg
│ │ │ │ ├── fa-brands-400.ttf
│ │ │ │ ├── fa-brands-400.woff
│ │ │ │ ├── fa-brands-400.woff2
│ │ │ │ ├── fa-regular-400.eot
│ │ │ │ ├── fa-regular-400.svg
│ │ │ │ ├── fa-regular-400.ttf
│ │ │ │ ├── fa-regular-400.woff
│ │ │ │ ├── fa-regular-400.woff2
│ │ │ │ ├── fa-solid-900.eot
│ │ │ │ ├── fa-solid-900.svg
│ │ │ │ ├── fa-solid-900.ttf
│ │ │ │ ├── fa-solid-900.woff
│ │ │ │ └── fa-solid-900.woff2
│ │ │ └── templates
│ │ │ └── views
│ │ │ └── index.ssp
│ └── resources-online
│ │ ├── application.yml
│ │ └── webapp
│ │ └── WEB-INF
│ │ ├── static
│ │ ├── css
│ │ │ ├── bootstrap-table.min.css
│ │ │ ├── bootstrap.min.css
│ │ │ ├── fontawesome-all.min.css
│ │ │ └── index.css
│ │ ├── image
│ │ │ ├── favicon-16x16.png
│ │ │ └── favicon-32x32.png
│ │ ├── js
│ │ │ ├── FileSaver.min.js
│ │ │ ├── ace.min.js
│ │ │ ├── bootstrap-table-export.js
│ │ │ ├── bootstrap-table-export.min.js
│ │ │ ├── bootstrap-table-zh-CN.min.js
│ │ │ ├── bootstrap-table.min.js
│ │ │ ├── bootstrap.min.js
│ │ │ ├── bootstrapQ.min.js
│ │ │ ├── ext-beautify.js
│ │ │ ├── ext-beautify.min.js
│ │ │ ├── ext-language_tools.min.js
│ │ │ ├── ext-searchbox.min.js
│ │ │ ├── fontawesome-all.min.js
│ │ │ ├── jquery-3.5.1.min.js
│ │ │ ├── mode-sql.js
│ │ │ ├── popper.min.js
│ │ │ ├── sql-app.js
│ │ │ ├── sql.min.js
│ │ │ ├── tableExport.min.js
│ │ │ ├── theme-twilight.js
│ │ │ ├── theme-twilight.min.js
│ │ │ └── xlsx.core.min.js
│ │ └── webfonts
│ │ │ ├── fa-brands-400.eot
│ │ │ ├── fa-brands-400.svg
│ │ │ ├── fa-brands-400.ttf
│ │ │ ├── fa-brands-400.woff
│ │ │ ├── fa-brands-400.woff2
│ │ │ ├── fa-regular-400.eot
│ │ │ ├── fa-regular-400.svg
│ │ │ ├── fa-regular-400.ttf
│ │ │ ├── fa-regular-400.woff
│ │ │ ├── fa-regular-400.woff2
│ │ │ ├── fa-solid-900.eot
│ │ │ ├── fa-solid-900.svg
│ │ │ ├── fa-solid-900.ttf
│ │ │ ├── fa-solid-900.woff
│ │ │ └── fa-solid-900.woff2
│ │ └── templates
│ │ └── views
│ │ └── index.ssp
│ └── test
│ └── java
│ └── tech
│ └── ides
│ └── engine
│ └── test
│ ├── AnyDataTableTest.scala
│ ├── DataTableTest.scala
│ └── Person.java
├── external
├── ds-spark-excel
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ └── java
│ │ └── tech
│ │ └── ides
│ │ └── external
│ │ └── datasource
│ │ └── excel
│ │ └── ExcelDataSource.scala
└── hive-exec
│ ├── pom.xml
│ └── src
│ └── main
│ └── java
│ └── org
│ └── apache
│ └── hadoop
│ ├── hive
│ └── custom
│ │ ├── inputformat
│ │ └── MultiLineCSVInputFormat.java
│ │ └── serde
│ │ └── OpenCSVSerde.java
│ └── mapred
│ └── MultiLineCSVRecordReader.java
├── kubernetes
└── dockerfiles
│ ├── ides-shell
│ ├── Dockerfile
│ └── daemon
│ └── settings.xml
├── libs
├── antlr-4.7.1-complete.jar
└── mysql-connector-java-5.1.46.jar
├── pom.xml
├── python
└── __init__.py
├── repl
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── tech
│ │ └── ides
│ │ └── repl
│ │ ├── IdesILoop.scala
│ │ ├── Main.scala
│ │ ├── SignalUtils.scala
│ │ ├── Signaling.scala
│ │ └── deploy
│ │ └── IdesSubmit.scala
│ └── scala-2.11
│ └── org
│ └── apache
│ └── spark
│ └── repl
│ ├── ExecutorClassLoader.scala
│ ├── SparkExprTyper.scala
│ ├── SparkILoopInterpreter.scala
│ └── Utils.scala
└── restfulserver
├── pom.xml
└── src
├── main
└── java
│ └── tech
│ └── ides
│ └── rest
│ ├── IdesAppController.scala
│ ├── IdesServer.scala
│ ├── RestController.scala
│ ├── ScalatraBootstrap.scala
│ └── WebServer.scala
└── test
└── java
└── tech
└── ides
└── rest
└── IdesServerSuite.scala
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | .idea
3 | *.iml
4 | jars
5 | target
6 | logs
7 | *.log
8 | *.tgz
9 | spark-warehouse
10 | metastore_db
11 | ides-*bin-spark*
12 | .DS_Store
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | IDES(智能数据探索服务)
2 | ---
3 | 
4 |
5 | **IDES**(*from DataLinked*) is an open source project to solve the unified `bigdata + AI` development process.
6 | In order to simplify the process of `data processing`, `data analysis`, `data mining`, `machine learning`, etc.
7 |
8 | 
9 | ## Features
10 | * Load and save various data sources for data exploration (such as ETL)
11 | * Support standard SQL for data processing
12 | * Support native Scala/Spark code execution
13 | * ...
14 |
15 | ## Building Ides from Source
16 | Prerequisites for building Ides Server:
17 | - Unix-like environment (we use Linux, Mac OS X, Cygwin, WSL)
18 | - Git
19 | - Maven (we recommend version 3.5.4)
20 | - Java 8 or 11 (we recommend version 1.8)
21 |
22 | ```shell
23 | git clone https://github.com/bebee4java/ides.git
24 | cd ides
25 | ./bin/ides-build.sh
26 | ```
27 | Now we got the executable distribution package, like this `ides-1.0.0-bin-spark2.4.tgz`.
28 |
29 | *NOTE: Maven 3.5.4, java 1.8.0_271 in Mac OS X are tested.*
30 |
31 | ## Deploy Ides Server
32 | - You can use bin/ides-shell.sh to start a Ides serve after unpack the distribution (tgz) package.
33 | - We recommend to run it use yarn-client or local mode in spark cluster after packaged jar(can use bin/package.sh).
34 |
35 | ## Documentation
36 | The documentation of Ides is located on the website: https://www.yuque.com/ides/docs or in the docs/ directory of the source code.
37 | We recommend that you go to the website: https://www.yuque.com/ides for help. We hosted all the documents on the `yuque` platform.
38 |
39 | ## Fork and Contribute
40 | This is an active open-source project. We are always open to people who want to use the system or contribute to it.
41 | Contact us if you are looking for implementation tasks that fit your skills.
42 |
43 | If you plan to contribute to this repository, we recommend you to create an issue at our [Issue page](https://github.com/bebee4java/ides/issues) first,
44 | then commit your source code or other topic (e.g., documentation, new idea and proposal).
45 |
46 |
47 |
48 |
Slack
49 |
50 | You can also join us and become part of our family by slack, to redefine the way we build data + ai pipelines today.
51 | [Join discussion group >>](https://join.slack.com/t/data-linked/shared_invite/zt-1dmksuk35-uiYrpUrWhQHPomqBHLd72w)
52 |
--------------------------------------------------------------------------------
/assembly/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | DataLinked
7 | tech.ides
8 | ${env.project.version}
9 |
10 | 4.0.0
11 | pom
12 |
13 | ides-assembly-${spark.big.version}_${scala.binary.version}
14 |
15 |
16 | ides-${project.version}-bin-spark${spark.big.version}
17 |
18 |
19 |
20 |
21 |
22 | org.apache.maven.plugins
23 | maven-assembly-plugin
24 | 3.1.0
25 |
26 |
27 | dist
28 |
29 | package
30 |
31 |
32 | single
33 |
34 |
35 |
36 |
37 | src/main/assembly/assembly.xml
38 |
39 |
40 | false
41 |
42 | ${session.root.dir}
43 |
44 | ${assembly.package.name}
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
--------------------------------------------------------------------------------
/assembly/src/main/assembly/assembly.xml:
--------------------------------------------------------------------------------
1 |
17 |
18 | dist
19 |
20 | tgz
21 |
22 | false
23 |
24 |
25 |
26 | ${session.root.dir}/README.md
27 | ${assembly.package.name}
28 | README.md
29 | true
30 |
31 |
32 |
33 |
34 |
35 |
36 | ${session.root.dir}/bin/
37 |
38 | ${assembly.package.name}/bin
39 |
40 | **/*
41 |
42 | 0755
43 |
44 |
45 |
46 | ${session.root.dir}/conf/
47 |
48 | ${assembly.package.name}/conf
49 |
50 | **/*
51 |
52 |
53 |
54 |
55 | ${session.root.dir}/jars/
56 |
57 | ${assembly.package.name}/jars
58 |
59 | **/*
60 |
61 |
62 |
63 |
64 | ${session.root.dir}/python/
65 |
66 | ${assembly.package.name}/python
67 |
68 | **/*
69 |
70 |
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/bin/datalinked.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ####################################################
4 | # 用于一次性提交脚本作业 #
5 | # 实例1:sh datalinked.sh -e "select 1 as t;" #
6 | # 实例2:sh datalinked.sh -f /home/ides/etl/etl.sql #
7 | ####################################################
8 |
9 |
10 | cygwin=false
11 | case "$(uname)" in
12 | CYGWIN*) cygwin=true;;
13 | esac
14 |
15 | # Enter posix mode for bash
16 | set -o posix
17 |
18 | if [ -z "${IDES_HOME}" ]; then
19 | source "$(dirname "$0")"/find-ides-home
20 | fi
21 |
22 | IDES_SUBMIT_OPTS="$IDES_SUBMIT_OPTS -Dscala.usejavacp=true"
23 | appName="ides-query-"$(date +"%Y%m%d%H")
24 |
25 | function main() {
26 | if $cygwin; then
27 | # Workaround for issue involving JLine and Cygwin
28 | # (see http://sourceforge.net/p/jline/bugs/40/).
29 | # If you're using the Mintty terminal emulator in Cygwin, may need to set the
30 | # "Backspace sends ^H" setting in "Keys" section of the Mintty options
31 | # (see https://github.com/sbt/sbt/issues/562).
32 | stty -icanon min 1 -echo > /dev/null 2>&1
33 | export IDES_SUBMIT_OPTS="$IDES_SUBMIT_OPTS -Djline.terminal=unix"
34 | "${IDES_HOME}"/bin/ides-submit --class tech.ides.cli.DatalinkedCli --name ${appName} "$@"
35 | stty icanon echo > /dev/null 2>&1
36 | else
37 | export IDES_SUBMIT_OPTS
38 | "${IDES_HOME}"/bin/ides-submit --class tech.ides.cli.DatalinkedCli --name ${appName} "$@"
39 | fi
40 | }
41 |
42 | # Copy restore-TTY-on-exit functions from Scala script so ides-shell exits properly even in
43 | # binary distribution of IDES where Scala is not installed
44 | exit_status=127
45 | saved_stty=""
46 |
47 | # restore stty settings (echo in particular)
48 | function restoreSttySettings() {
49 | stty $saved_stty
50 | saved_stty=""
51 | }
52 |
53 | function onExit() {
54 | if [[ "$saved_stty" != "" ]]; then
55 | restoreSttySettings
56 | fi
57 | exit $exit_status
58 | }
59 |
60 | # to reenable echo if we are interrupted before completing.
61 | trap onExit INT
62 |
63 | # save terminal settings
64 | saved_stty=$(stty -g 2>/dev/null)
65 | # clear on error so we don't later try to restore them
66 | if [[ ! $? ]]; then
67 | saved_stty=""
68 | fi
69 |
70 | main "$@" $IDES_HOME/bin/datalinked.sh
71 |
72 | # record the exit status lest it be overwritten:
73 | # then reenable echo and propagate the code.
74 | exit_status=$?
75 | onExit
--------------------------------------------------------------------------------
/bin/find-ides-home:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Short circuit if the user already has this set.
4 | if [ ! -z "${IDES_HOME}" ]; then
5 | exit 0
6 | else
7 | export IDES_HOME="$(cd "$(dirname "$0")"/..; pwd)"
8 | fi
--------------------------------------------------------------------------------
/bin/ides-build.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
4 | PROJECT_ROOT_DIR=$(cd $(dirname "$SHELL_FOLDER"); pwd)
5 |
6 | export MAVEN_OPTS="-Xmx512m"
7 |
8 | cd $PROJECT_ROOT_DIR
9 |
10 | rm -rf jars
11 |
12 | mode=$1
13 | version=$2
14 | echo "build start! mode: $mode, version: $version"
15 |
16 | usage() {
17 | echo "please input build mode:[shell/rest]"
18 | exit -1
19 | }
20 |
21 | if [ -z "${mode}" ]; then
22 | usage
23 | fi
24 |
25 | build_version="publish-version"
26 | if [ -n "${version}" -a "dev" == "${version}" ]; then
27 | build_version="dev-version"
28 | fi
29 |
30 | if [ "shell" == "${mode}" ]; then
31 | mvn -DskipTests clean package \
32 | -pl repl,assembly -am \
33 | -Pscala-2.11 \
34 | -Pspark-2.4.x \
35 | -Ponline \
36 | -P${build_version} \
37 | -Pbuild
38 | elif [ "rest" == "${mode}" ]; then
39 | mvn -DskipTests clean package \
40 | -pl repl,assembly -am \
41 | -Pscala-2.11 \
42 | -Pspark-2.4.x \
43 | -Ponline \
44 | -Prestful-server \
45 | -P${build_version} \
46 | -Pbuild
47 | else
48 | usage
49 | fi
50 |
--------------------------------------------------------------------------------
/bin/ides-shell:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Shell script for starting the IDES Shell REPL
4 |
5 | cygwin=false
6 | case "$(uname)" in
7 | CYGWIN*) cygwin=true;;
8 | esac
9 |
10 | # Enter posix mode for bash
11 | set -o posix
12 |
13 | if [ -z "${IDES_HOME}" ]; then
14 | source "$(dirname "$0")"/find-ides-home
15 | fi
16 |
17 | IDES_SUBMIT_OPTS="$IDES_SUBMIT_OPTS -Dscala.usejavacp=true"
18 |
19 | function main() {
20 | if $cygwin; then
21 | # Workaround for issue involving JLine and Cygwin
22 | # (see http://sourceforge.net/p/jline/bugs/40/).
23 | # If you're using the Mintty terminal emulator in Cygwin, may need to set the
24 | # "Backspace sends ^H" setting in "Keys" section of the Mintty options
25 | # (see https://github.com/sbt/sbt/issues/562).
26 | stty -icanon min 1 -echo > /dev/null 2>&1
27 | export IDES_SUBMIT_OPTS="$IDES_SUBMIT_OPTS -Djline.terminal=unix"
28 | "${IDES_HOME}"/bin/ides-submit --class tech.ides.repl.Main --name "Ides-shell" "$@"
29 | stty icanon echo > /dev/null 2>&1
30 | else
31 | export IDES_SUBMIT_OPTS
32 | "${IDES_HOME}"/bin/ides-submit --class tech.ides.repl.Main --name "Ides-shell" "$@"
33 | fi
34 | }
35 |
36 | # Copy restore-TTY-on-exit functions from Scala script so ides-shell exits properly even in
37 | # binary distribution of IDES where Scala is not installed
38 | exit_status=127
39 | saved_stty=""
40 |
41 | # restore stty settings (echo in particular)
42 | function restoreSttySettings() {
43 | stty $saved_stty
44 | saved_stty=""
45 | }
46 |
47 | function onExit() {
48 | if [[ "$saved_stty" != "" ]]; then
49 | restoreSttySettings
50 | fi
51 | exit $exit_status
52 | }
53 |
54 | # to reenable echo if we are interrupted before completing.
55 | trap onExit INT
56 |
57 | # save terminal settings
58 | saved_stty=$(stty -g 2>/dev/null)
59 | # clear on error so we don't later try to restore them
60 | if [[ ! $? ]]; then
61 | saved_stty=""
62 | fi
63 |
64 | main "$@" $IDES_HOME/bin/ides-shell
65 |
66 |
67 | # record the exit status lest it be overwritten:
68 | # then reenable echo and propagate the code.
69 | exit_status=$?
70 | onExit
71 |
--------------------------------------------------------------------------------
/bin/ides-submit:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | #set -x
4 |
5 | if [ -z "${IDES_HOME}" ]; then
6 | source "$(dirname "$0")"/find-ides-home
7 | fi
8 |
9 | . "${IDES_HOME}"/bin/load-ides-env
10 |
11 | # Find the java binary
12 | if [ -n "${JAVA_HOME}" ]; then
13 | RUNNER="${JAVA_HOME}/bin/java"
14 | else
15 | if [ "$(command -v java)" ]; then
16 | RUNNER="java"
17 | else
18 | echo "JAVA_HOME is not set" >&2
19 | exit 1
20 | fi
21 | fi
22 |
23 | # Find Spark jars.
24 | if [ -d "${IDES_HOME}/jars" ]; then
25 | IDES_JARS_DIR="${IDES_HOME}/jars"
26 | fi
27 |
28 | if [ ! -d "$IDES_JARS_DIR" ]; then
29 | echo "Failed to find IDES jars directory ($IDES_JARS_DIR)." 1>&2
30 | exit 1
31 | else
32 | LAUNCH_CLASSPATH="$IDES_JARS_DIR/*"
33 | fi
34 |
35 | . "${IDES_HOME}"/bin/options-tool "$@"
36 |
37 | JAVA_OPTS="-Xmx${DRIVER_MEM:-1g} -Dscala.usejavacp=true $DEBUG_OPTS"
38 |
39 | build_envs() {
40 | export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-""}"
41 | export YARN_CONF_DIR="${YARN_CONF_DIR:-""}"
42 | export SPARK_DIST_CLASSPATH="${SPARK_DIST_CLASSPATH:-""}"
43 | export IDES_CONF_DIR="${IDES_CONF_DIR:-"${IDES_HOME}"/conf}"
44 |
45 | printf "%s:" $IDES_CONF_DIR $HADOOP_CONF_DIR $YARN_CONF_DIR $SPARK_DIST_CLASSPATH
46 | }
47 |
48 | build_command() {
49 | # echo "$RUNNER" $JAVA_OPTS -cp "$LAUNCH_CLASSPATH" tech.ides.repl.Main "$@"
50 | printf "%s\0" $RUNNER $JAVA_OPTS -cp $(build_envs)"$LAUNCH_CLASSPATH" org.apache.spark.deploy.IdesSubmit "$@"
51 | printf "%d\0" $?
52 | }
53 |
54 | # Turn off posix mode since it does not allow process substitution
55 | set +o posix
56 | CMD=()
57 | while IFS= read -d '' -r ARG; do
58 | CMD+=("$ARG")
59 | done < <(build_command "$@")
60 |
61 | COUNT=${#CMD[@]}
62 | LAST=$((COUNT - 1))
63 | LAUNCHER_EXIT_CODE=${CMD[$LAST]}
64 |
65 | # Certain JVM failures result in errors being printed to stdout (instead of stderr), which causes
66 | # the code that parses the output of the launcher to get confused. In those cases, check if the
67 | # exit code is an integer, and if it's not, handle it as a special error case.
68 | if ! [[ $LAUNCHER_EXIT_CODE =~ ^[0-9]+$ ]]; then
69 | echo "${CMD[@]}" | head -n-1 1>&2
70 | exit 1
71 | fi
72 |
73 | if [ $LAUNCHER_EXIT_CODE != 0 ]; then
74 | exit $LAUNCHER_EXIT_CODE
75 | fi
76 |
77 | CMD=("${CMD[@]:0:$LAST}")
78 | exec "${CMD[@]}"
79 |
--------------------------------------------------------------------------------
/bin/load-ides-env:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # This script loads ides-env.sh if it exists, and ensures it is only loaded once.
4 | # ides-env.sh is loaded from IDES_CONF_DIR if set, or within the current directory's
5 | # conf/ subdirectory.
6 |
7 | # Figure out where IDES is installed
8 | if [ -z "${IDES_HOME}" ]; then
9 | source "$(dirname "$0")"/find-ides-home
10 | fi
11 |
12 | if [ -z "$IDES_ENV_LOADED" ]; then
13 | export IDES_ENV_LOADED=1
14 |
15 | export IDES_CONF_DIR="${IDES_CONF_DIR:-"${IDES_HOME}"/conf}"
16 |
17 | if [ -d "${IDES_CONF_DIR}" ]; then
18 | # Promote all variable declarations to environment (exported) variables
19 | shells=$(find $IDES_CONF_DIR -name "*.sh" -type f)
20 | for shell in ${shells};do
21 | set -a
22 | . "${shell}"
23 | set +a
24 | done
25 | fi
26 |
27 | fi
28 |
--------------------------------------------------------------------------------
/bin/options-tool:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | DEBUG=$(echo "$@" | grep "\-debug")
4 | if [ -n "${DEBUG}" ]; then
5 | PORT=$(echo "$@" | sed 's/.*-debug\([=\n \t\s]*\)\([0-9]*\).*/\2/g')
6 | if [ -z "${PORT}" ]; then
7 | echo "debug port error. e.g. -debug=8888 or -debug 8888"
8 | exit -1
9 | fi
10 | DEBUG_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=$PORT"
11 | export DEBUG_OPTS
12 | fi
13 |
14 | # 设置driver jvm的最大堆内存
15 | if [[ "$@" =~ "--driver-memory" ]]; then
16 | DRIVER_MEM=$(echo "$@" | sed 's/.*--driver-memory\([=\n \t\s]*\)\([0-9KkBbMmGgTt]*\).*/\2/g' )
17 | if [ -z "${DRIVER_MEM}" ]; then
18 | echo "driver memory set error. e.g. --driver-memory 2g. default: 1g."
19 | exit -1
20 | fi
21 | export DRIVER_MEM
22 | fi
23 |
24 | if [[ "$@" =~ "spark.driver.memory" ]]; then
25 | DRIVER_MEM=$(echo "$@" | sed 's/.*spark.driver.memory=\([0-9KkBbMmGgTt]*\).*/\1/g' )
26 | if [ -z "${DRIVER_MEM}" ]; then
27 | echo "driver memory set error. e.g. --conf spark.driver.memory=2g. default: 1g."
28 | exit -1
29 | fi
30 | export DRIVER_MEM
31 | fi
--------------------------------------------------------------------------------
/bin/package.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
4 | PROJECT_ROOT_DIR=$(cd $(dirname "$SHELL_FOLDER"); pwd)
5 |
6 | export MAVEN_OPTS="-Xmx512m"
7 |
8 | cd $PROJECT_ROOT_DIR
9 |
10 | function package_2.4 {
11 | mvn -DskipTests clean package \
12 | -pl engine -am \
13 | -Ponline \
14 | -Pscala-2.11 \
15 | -Pspark-2.4.x \
16 | -Prestful-server \
17 | -Ppublish-version \
18 | -Pshade
19 | }
20 |
21 | function package_2.3 {
22 | mvn -DskipTests clean package \
23 | -pl engine -am \
24 | -Ponline \
25 | -Pscala-2.11 \
26 | -Pspark-2.3.x \
27 | -Prestful-server \
28 | -Ppublish-version \
29 | -Pshade
30 | }
31 |
32 | function usage() {
33 | echo "Usage: ./package.sh <2.3|2.4>"
34 | }
35 |
36 | case "$1" in
37 | 2.3)
38 | package_2.3
39 | ;;
40 | 2.4)
41 | package_2.4
42 | ;;
43 | *)
44 | usage
45 | exit 0
46 | esac
--------------------------------------------------------------------------------
/bin/start-local-ides.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | if [ -z "${IDES_HOME}" ]; then
4 | source "$(dirname "$0")"/find-ides-home
5 | fi
6 |
7 | . "${IDES_HOME}"/bin/load-ides-env
8 |
9 | # Find the java binary
10 | if [ -n "${JAVA_HOME}" ]; then
11 | RUNNER="${JAVA_HOME}/bin/java"
12 | else
13 | if [ "$(command -v java)" ]; then
14 | RUNNER="java"
15 | else
16 | echo "JAVA_HOME is not set" >&2
17 | exit 1
18 | fi
19 | fi
20 |
21 | # Find Spark jars.
22 | if [ -d "${IDES_HOME}/jars" ]; then
23 | IDES_JARS_DIR="${IDES_HOME}/jars"
24 | fi
25 |
26 | if [ ! -d "$IDES_JARS_DIR" ]; then
27 | echo "Failed to find IDES jars directory ($IDES_JARS_DIR)." 1>&2
28 | exit 1
29 | else
30 | LAUNCH_CLASSPATH="$IDES_JARS_DIR/*"
31 | fi
32 |
33 | build_envs() {
34 | export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-""}"
35 | export YARN_CONF_DIR="${YARN_CONF_DIR:-""}"
36 | export SPARK_DIST_CLASSPATH="${SPARK_DIST_CLASSPATH:-""}"
37 | export IDES_CONF_DIR="${IDES_CONF_DIR:-"${IDES_HOME}"/conf}"
38 |
39 | printf "%s:" $IDES_CONF_DIR $HADOOP_CONF_DIR $YARN_CONF_DIR $SPARK_DIST_CLASSPATH
40 | }
41 |
42 | default_args() {
43 | printf "%s " \
44 | "-spark.app.name" "ides" \
45 | "-spark.master" "local[*]" \
46 | "-spark.driver.memory" "512m" \
47 | "-ides.spark.service" "true" \
48 | "-ides.server.port" "9009" \
49 | "-ides.server.request-log.enable" "true"
50 | }
51 |
52 | #DEBUG_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8888"
53 | DRIVER_MEM=4g
54 |
55 | JAVA_OPTS="-Xmx${DRIVER_MEM:-1g} $DEBUG_OPTS"
56 |
57 | $RUNNER $JAVA_OPTS -cp $(build_envs)"$LAUNCH_CLASSPATH" tech.ides.core.IdesApp $(default_args)" $@"
--------------------------------------------------------------------------------
/common/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | DataLinked
7 | tech.ides
8 | ${env.project.version}
9 |
10 | 4.0.0
11 |
12 | ides-common-${spark.big.version}_${scala.binary.version}
13 |
14 |
15 |
16 | org.apache.spark
17 | spark-core_${scala.binary.version}
18 | provided
19 |
20 |
21 | org.apache.spark
22 | spark-sql_${scala.binary.version}
23 | provided
24 |
25 |
26 |
27 |
28 | src/main/java/
29 |
30 |
31 | src/main/resources
32 |
33 | ides-version-info.properties
34 |
35 |
36 | true
37 |
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/conf/ConfigProvider.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package tech.ides.conf
19 |
20 | import java.util.{Map => JMap}
21 |
22 | /**
23 | * A source of configuration values.
24 | * 参考 org.apache.spark.internal.config.ConfigProvider
25 | */
26 | private[conf] trait ConfigProvider {
27 |
28 | def get(key: String): Option[String]
29 |
30 | }
31 |
32 | private[conf] class EnvProvider extends ConfigProvider {
33 |
34 | override def get(key: String): Option[String] = sys.env.get(key)
35 |
36 | }
37 |
38 | private[conf] class SystemProvider extends ConfigProvider {
39 |
40 | override def get(key: String): Option[String] = sys.props.get(key)
41 |
42 | }
43 |
44 | private[conf] class MapProvider(conf: JMap[String, String]) extends ConfigProvider {
45 |
46 | override def get(key: String): Option[String] = Option(conf.get(key))
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/constants/IdesEnvConstants.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.constants
2 |
3 | object IdesEnvConstants {
4 |
5 | val IDES_LOG_DIR = "IDES_LOG_DIR"
6 | val IDES_HOME = "IDES_HOME"
7 | val SPARK_HOME = "SPARK_HOME"
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/constants/ScriptConstants.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.constants
2 |
3 | import java.io.File
4 |
5 | /**
6 | * 脚本常用常量
7 | * Created by songgr on 2020/11/05.
8 | */
9 | object ScriptConstants {
10 |
11 | val DIRECT_QUERY = "directQuery"
12 |
13 | val IMPL_CLASS = "implClass"
14 |
15 | val FILE_FORMAT = "fileFormat"
16 |
17 | val PARTITION_BY_COL = "partitionByCol"
18 |
19 | val EXTERNAL_STORAGE = "externalStorage"
20 |
21 | val PRIMARY_KEYS = "primaryKeys"
22 |
23 | val PATH_SEPARATOR = File.pathSeparator
24 |
25 | val PATH_SEPARATOR_SIZE = PATH_SEPARATOR.length
26 |
27 | val SHELL_USER = "idesShell"
28 |
29 | val TEST_USER = "idesTest"
30 |
31 | val DEFAULT_PATH_PREFIX = "/tmp/ides"
32 |
33 | val BATCH_JOB = "batch"
34 | val STREAM_JOB = "stream"
35 |
36 | // transform 语句关键字
37 | val TRANSFORM_SET = Set("select", "load", "set", "include", "register", "connect")
38 |
39 | // action语句关键字
40 | val ACTION_SET = Set("save", "insert", "train", "run", "predict", "!")
41 |
42 | val KEY_WORDS = TRANSFORM_SET ++ ACTION_SET
43 |
44 | val __SESSION__ = "__session__"
45 |
46 | val RESOURCE = "resource"
47 |
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/core/ApplicationSetting.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.core
2 |
3 | import tech.sqlclub.common.context.YamlContext
4 | import java.util
5 | import scala.collection.JavaConverters._
6 |
7 | /**
8 | * 读取application.yml配置,解析成每个参数
9 | * Created by songgr on 2021/03/11.
10 | */
11 | object ApplicationSetting {
12 |
13 | private lazy val setting = YamlContext.getAnyRefMap("application")
14 |
15 | def getAllSettings: Map[String, AnyRef] = {
16 | setting.toMap
17 | }
18 |
19 | def getSetting(key: String): Option[AnyRef] = {
20 | setting.get(key)
21 | }
22 |
23 | def getList(key: String): List[Map[String, AnyRef]] = {
24 | getSetting(key).map(x => {
25 | x.asInstanceOf[util.List[util.Map[String, AnyRef]]].asScala.toList.map(y => y.asScala.toMap)
26 | }).getOrElse(List.empty)
27 | }
28 |
29 | def getString(key: String): String = {
30 | getSetting(key).map(x => x.asInstanceOf[String]).orNull
31 | }
32 |
33 | def getInt(key: String): Int = {
34 | getSetting(key).map(x => x.asInstanceOf[Int]).getOrElse(0)
35 | }
36 |
37 | def getDouble(key: String): Double = {
38 | getSetting(key).map(x => x.asInstanceOf[Double]).getOrElse(0D)
39 | }
40 |
41 | def getOptionString(key: String): Option[String] = {
42 | getSetting(key).map(x => x.asInstanceOf[String])
43 | }
44 |
45 | val SERVICE_IMPL = setting.get("service").map(_.asInstanceOf[String])
46 |
47 | val PLATFORM_LIFECYCLES = setting.get("platformLifecycles").map(_.asInstanceOf[util.List[String]].asScala)
48 |
49 | val SERVICE_LIFECYCLES = setting.get("serviceLifecycles").map(_.asInstanceOf[util.List[String]].asScala)
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/core/platform/Lifecycle.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.core.platform
2 |
3 | import tech.ides.conf.IdesConf
4 |
5 | trait Lifecycle
6 |
7 | /**
8 | * 平台整个生命周期,在指定环节加入规则
9 | * Created by songgr on 2021/03/11.
10 | */
11 | trait PlatformLifecycle extends Lifecycle {
12 |
13 | def beforeSQLRuntime(idesConf: IdesConf)
14 |
15 | def afterSQLRuntime(idesConf: IdesConf)
16 |
17 | }
18 |
19 | /**
20 | * 服务整个生命周期,在指定环节加入规则
21 | * Created by songgr on 2021/03/11.
22 | */
23 | trait ServiceLifecycle extends Lifecycle {
24 | def beforeService(idesConf: IdesConf)
25 |
26 | def afterService(idesConf: IdesConf)
27 | }
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/datasource/BaseDataSource.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.datasource
2 |
3 | import java.io.File
4 |
5 | /**
6 | *
7 | * Created by songgr on 2020/10/26.
8 | */
9 | trait BaseDataSource {
10 |
11 | def pathSeparator = {
12 | File.pathSeparator
13 | }
14 |
15 | def fullFormat: String
16 |
17 | @Deprecated
18 | def shortFormat: String
19 |
20 | def aliasFormat: String = {
21 | shortFormat
22 | }
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/datasource/DataSource.java:
--------------------------------------------------------------------------------
1 | package tech.ides.datasource;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * 数据源注解
10 | * @author songgr
11 | * @since 1.0.0
12 | * Created by songgr on 2020/10/25.
13 | */
14 | @Target(ElementType.TYPE)
15 | @Retention(RetentionPolicy.RUNTIME)
16 | public @interface DataSource {
17 | enum Method {
18 | /**
19 | * 数据输入
20 | */
21 | SOURCE,
22 | /**
23 | * 数据输出
24 | */
25 | SINK
26 | }
27 |
28 | /**
29 | * 支持的数据源类型: source/sink 两种
30 | * @return Method[]
31 | */
32 | Method[] types();
33 |
34 | /**
35 | * 数据源名称
36 | * @return String
37 | */
38 | String name() default "";
39 |
40 | /**
41 | * 数据源描述
42 | * @return String
43 | */
44 | String description() default "";
45 |
46 | /**
47 | * 是否是直接数据源操作
48 | * 比如jdbc直接direct sql查询需要设置为true, 默认false
49 | * @return boolean
50 | */
51 | boolean directDataSource() default false;
52 |
53 | /**
54 | * 从哪个版本开始支持
55 | * @return String
56 | */
57 | String sinceVersion();
58 |
59 | }
60 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/datasource/DataSourceKey.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.datasource
2 |
3 | import tech.ides.datasource.DataSource.Method
4 | import tech.ides.datasource.writer.SaveMode
5 |
6 | /**
7 | *
8 | * Created by songgr on 2020/11/05.
9 | */
10 |
11 | case class DataSourceKey(name:String, method:Method, direct:Boolean)
12 |
13 | case class DataSourceConfig(path: String, config: Map[String, String], dt: DataTable)
14 |
15 | case class DataSinkConfig(path: String, config: Map[String, String], mode: SaveMode, dt: DataTable)
16 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/datasource/reader/DataReader.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.datasource.reader
2 |
3 | import tech.ides.datasource.{BaseDataSource, DataSourceConfig, DataTable}
4 |
5 | /**
6 | * 数据读取接口
7 | * Created by songgr on 2020/11/05.
8 | */
9 | trait DataReader extends BaseDataSource {
10 | def load(reader: Reader, config: DataSourceConfig): DataTable
11 | }
12 | abstract class Reader {
13 | def getReader[T:Manifest]: T
14 | }
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/datasource/writer/DataWriter.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.datasource.writer
2 |
3 | import tech.ides.datasource.{BaseDataSource, DataSinkConfig}
4 |
5 | /**
6 | * 数据存储接口
7 | * Created by songgr on 2020/11/05.
8 | */
9 | trait DataWriter extends BaseDataSource {
10 | def save(writer: Writer, config: DataSinkConfig)
11 | }
12 |
13 | abstract class Writer {
14 | def getWriter[T:Manifest]: T
15 | }
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/datasource/writer/SaveMode.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package tech.ides.datasource.writer;
18 |
19 |
20 | /**
21 | * SaveMode is used to specify the expected behavior of saving a DataFrame to a data source.
22 | * 参考 org.apache.spark.sql.SaveMode
23 | * @since 1.0.0
24 | */
25 | public enum SaveMode {
26 | /**
27 | * Append mode means that when saving a DataFrame to a data source, if data/table already exists,
28 | * contents of the DataFrame are expected to be appended to existing data.
29 | *
30 | * @since 1.3.0
31 | */
32 | Append,
33 | /**
34 | * Overwrite mode means that when saving a DataFrame to a data source,
35 | * if data/table already exists, existing data is expected to be overwritten by the contents of
36 | * the DataFrame.
37 | *
38 | * @since 1.3.0
39 | */
40 | Overwrite,
41 | /**
42 | * ErrorIfExists mode means that when saving a DataFrame to a data source, if data already exists,
43 | * an exception is expected to be thrown.
44 | *
45 | * @since 1.3.0
46 | */
47 | ErrorIfExists,
48 | /**
49 | * Ignore mode means that when saving a DataFrame to a data source, if data already exists,
50 | * the save operation is expected to not save the contents of the DataFrame and to not
51 | * change the existing data.
52 | *
53 | * @since 1.3.0
54 | */
55 | Ignore
56 | }
57 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/doc/Document.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.doc
2 |
3 | /**
4 | * ET插件文档类
5 | * Created by songgr on 2022/02/11.
6 | */
7 |
8 | case class Document(docType: DocType, doc: String)
9 |
10 | sealed abstract class DocType
11 | (
12 | val docType: String
13 | )
14 |
15 | case object HtmlDoc extends DocType("html")
16 |
17 | case object MarkDownDoc extends DocType("md")
18 |
19 | case object TextDoc extends DocType("text")
20 |
21 |
22 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/exception/IdesException.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.exception
2 |
3 | import scala.collection.mutable.ArrayBuffer
4 |
5 | /**
6 | *
7 | * Created by songgr on 2020/06/08.
8 | */
9 | class IdesException(message: String, cause: Throwable)
10 | extends Exception(message, cause) {
11 |
12 | def this(message: String) = this(message, null)
13 | }
14 |
15 | object ExceptionUtil {
16 | def format_exception(e: Exception) = {
17 | (e.toString.split("\n") ++ e.getStackTrace.map(f => "\tat " + f.toString)).mkString("\n")
18 | }
19 |
20 | def format_throwable(e: Throwable) = {
21 | (e.toString.split("\n") ++ e.getStackTrace.map(f => "\tat " + f.toString)).mkString("\n")
22 | }
23 |
24 | def format_cause(e: Exception) = {
25 | var cause = e.asInstanceOf[Throwable]
26 | while (cause.getCause != null) {
27 | cause = cause.getCause
28 | }
29 | format_throwable(cause)
30 | }
31 |
32 | def format_full_exception(e: Exception) = {
33 | val buffer = new ArrayBuffer[String]
34 | var cause = e.asInstanceOf[Throwable]
35 | buffer += format_throwable(cause)
36 | while (cause.getCause != null) {
37 | cause = cause.getCause
38 | buffer += "caused by: " + format_throwable(cause)
39 | }
40 |
41 | buffer.mkString("\n")
42 | }
43 | }
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/extension/ETPlugin.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.extension
2 |
3 | import tech.ides.datasource.DataTable
4 | import tech.ides.doc.{Document, TextDoc}
5 | import tech.ides.exception.IdesException
6 | import tech.sqlclub.common.reflect.Reflection
7 | import tech.sqlclub.common.utils.VersionUtils
8 |
9 | /**
10 | * ET插件接口
11 | * Created by songgr on 2022/02/11.
12 | */
13 | trait ETPlugin {
14 |
15 | def exec(dataTable: DataTable, path: String, params: Map[String, String]): DataTable
16 |
17 | /**
18 | * 插件类型
19 | */
20 | def pluginType:PluginType
21 |
22 | /**
23 | * 插件文档
24 | */
25 | def doc: Document = Document(TextDoc, "")
26 |
27 | /**
28 | * 版本是否兼容
29 | */
30 | def versionCompatible = {
31 | import tech.ides.IDES_VERSION
32 | val currentVersion = IDES_VERSION
33 | val annotation = Reflection.getAnnotation(getClass, classOf[Extension])
34 | val sinceVersion = annotation.sinceVersion()
35 |
36 | val compare = VersionUtils.compareVersion(currentVersion, sinceVersion)
37 |
38 | if (compare < 0 ) {
39 | throw new IdesException(s"This plugin is not supported in the current version! currentVersion:$currentVersion, plugin sinceVersion:$sinceVersion")
40 | }
41 |
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/extension/Extension.java:
--------------------------------------------------------------------------------
1 | package tech.ides.extension;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * ET插件注解
10 | * @author songgr
11 | * @since 1.0.0
12 | * Created by songgr on 2022/02/11.
13 | */
14 | @Target(ElementType.TYPE)
15 | @Retention(RetentionPolicy.RUNTIME)
16 | public @interface Extension {
17 | /**
18 | * 插件名称
19 | * @return String
20 | */
21 | String name() default "";
22 |
23 | /**
24 | * 插件描述
25 | * @return String
26 | */
27 | String description() default "";
28 |
29 | /**
30 | * 从哪个版本开始支持
31 | * @return String
32 | */
33 | String sinceVersion();
34 | }
35 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/extension/PluginType.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.extension
2 |
3 | /**
4 | * 插件类型
5 | * Created by songgr on 2022/02/11.
6 | */
7 | case class PluginType(operatorType: OperatorType, processType: ProcessType)
8 |
9 | /**
10 | * 算子类型
11 | */
12 | sealed abstract class OperatorType
13 |
14 | case object Action extends OperatorType
15 |
16 | case object Transform extends OperatorType
17 |
18 | /**
19 | * 数据处理类型
20 | */
21 | sealed abstract class ProcessType
22 |
23 | case object ETL extends ProcessType
24 |
25 | case object Algo extends ProcessType
26 |
27 | case object Analysis extends ProcessType
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/package.scala:
--------------------------------------------------------------------------------
1 | package tech
2 |
3 | import java.util.Properties
4 |
5 | import tech.ides.exception.IdesException
6 |
7 | package object ides {
8 | private object IdesBuildInfo {
9 |
10 | val (
11 | ides_version: String,
12 | ides_branch: String,
13 | ides_revision: String,
14 | ides_build_user: String,
15 | ides_repo_url: String,
16 | ides_build_date: String,
17 | ides_srcChecksum:String,
18 | ides_protocVersion:String
19 | ) = {
20 |
21 | val resourceStream = Thread.currentThread().getContextClassLoader.
22 | getResourceAsStream("ides-version-info.properties")
23 | if (resourceStream == null) {
24 | throw new IdesException("Could not find ides-version-info.properties")
25 | }
26 |
27 | try {
28 | val unknownProp = ""
29 | val props = new Properties()
30 | props.load(resourceStream)
31 | (
32 | props.getProperty("version", unknownProp),
33 | props.getProperty("branch", unknownProp),
34 | props.getProperty("revision", unknownProp),
35 | props.getProperty("user", unknownProp),
36 | props.getProperty("url", unknownProp),
37 | props.getProperty("date", unknownProp),
38 | props.getProperty("srcChecksum", unknownProp),
39 | props.getProperty("protocVersion", unknownProp)
40 | )
41 | } catch {
42 | case e: Exception =>
43 | throw new IdesException("Error loading properties from ides-version-info.properties", e)
44 | } finally {
45 | if (resourceStream != null) {
46 | try {
47 | resourceStream.close()
48 | } catch {
49 | case e: Exception =>
50 | throw new IdesException("Error closing ides build info resource stream", e)
51 | }
52 | }
53 | }
54 | }
55 | }
56 |
57 | val IDES_VERSION = IdesBuildInfo.ides_version
58 | val IDES_BRANCH = IdesBuildInfo.ides_branch
59 | val IDES_REVISION = IdesBuildInfo.ides_revision
60 | val IDES_BUILD_USER = IdesBuildInfo.ides_build_user
61 | val IDES_REPO_URL = IdesBuildInfo.ides_repo_url
62 | val IDES_BUILD_DATE = IdesBuildInfo.ides_build_date
63 |
64 | val BANNER =
65 | """Welcome to
66 | ___ ________ _______ ________
67 | |\ \|\ ___ \|\ ___ \ |\ ____\
68 | \ \ \ \ \_|\ \ \ __/|\ \ \___|_
69 | \ \ \ \ \ \\ \ \ \_|/_\ \_____ \
70 | \ \ \ \ \_\\ \ \ \_|\ \|____|\ \
71 | \ \__\ \_______\ \_______\____\_\ \
72 | \|__|\|_______|\|_______|\_________\ version %s
73 | \|_________|
74 | """.format(IDES_VERSION)
75 |
76 |
77 | }
78 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/rest/RestServer.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.rest
2 |
3 | import tech.ides.conf.IdesConf
4 |
5 | /**
6 | *
7 | * Created by songgr on 2021/03/11.
8 | */
9 | trait RestServer {
10 |
11 | def start(conf: IdesConf):Unit
12 |
13 | def stop
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/runtime/SQLRuntime.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.runtime
2 |
3 | import java.util
4 |
5 | /**
6 | *
7 | * Created by songgr on 2020/06/24.
8 | */
9 | trait SQLRuntime {
10 |
11 | def awaitTermination
12 |
13 | private[runtime] def createRuntime:SQLRuntime
14 |
15 | def params:util.Map[Any, Any]
16 |
17 | }
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/script/ScriptTemplate.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.script
2 |
3 | import org.apache.commons.lang3.StringUtils
4 | import org.joda.time.DateTime
5 | import tech.ides.conf.IdesConf
6 | import tech.ides.conf.IdesConf.SCRIPT_DEFAULT_DATEFORMAT
7 | import tech.sqlclub.common.utils.StringTemplate
8 |
9 | /**
10 | * 脚本模块处理
11 | * Created by songgr on 2022/03/09.
12 | */
13 | object ScriptTemplate {
14 |
15 | val DATE_FORMAT = IdesConf.getOrCreate.get(SCRIPT_DEFAULT_DATEFORMAT)
16 |
17 | val PRESET_VARIABLES = Map[String, Any](
18 | "yesterday" -> DateTime.now().minusDays(1).toString(DATE_FORMAT),
19 | "today" -> DateTime.now().toString(DATE_FORMAT),
20 | "tomorrow" -> DateTime.now().plusDays(1).toString(DATE_FORMAT),
21 | "theDayBeforeYesterday" -> DateTime.now().minusDays(2).toString(DATE_FORMAT),
22 | "date" -> new DateTime(),
23 | "currentTimestamp" -> System.currentTimeMillis()
24 | )
25 |
26 | val NOESCAPE_LIST = List("##")
27 |
28 | def merge(sql: String, variables: Map[String, Any]): String = {
29 | // sql不包含$ 直接返回
30 | if (StringUtils.containsNone(sql, "$")) {
31 | return sql
32 | }
33 |
34 | StringTemplate.namedEvaluate(sql, PRESET_VARIABLES ++ variables, NOESCAPE_LIST)
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/strategy/PlatformFrameEnum.java:
--------------------------------------------------------------------------------
1 | package tech.ides.strategy;
2 |
3 | /**
4 | * 平台策略枚举类
5 | * Created by songgr on 2022/02/20.
6 | */
7 | public enum PlatformFrameEnum {
8 | SPARK("spark"),
9 | FLINK("flink");
10 |
11 | public final String frame;
12 |
13 | private PlatformFrameEnum(String frame) {
14 | this.frame = frame;
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/strategy/PlatformFrameImpl.java:
--------------------------------------------------------------------------------
1 | package tech.ides.strategy;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * 平台框架实现注解
10 | * Created by songgr on 2022/02/20.
11 | */
12 | @Target(ElementType.TYPE)
13 | @Retention(RetentionPolicy.RUNTIME)
14 | public @interface PlatformFrameImpl {
15 | /**
16 | * 实现的框架类型
17 | * @return PlatformFrameEnum
18 | */
19 | PlatformFrameEnum frameName() default PlatformFrameEnum.SPARK;
20 | }
21 |
--------------------------------------------------------------------------------
/common/src/main/java/tech/ides/utils/PlatformUtils.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.utils
2 |
3 | import tech.ides.conf.IdesConf
4 | import tech.ides.conf.IdesConf.IDES_RUN_PLATFORM_FRAME
5 | import tech.ides.exception.IdesException
6 | import tech.ides.strategy.PlatformFrameImpl
7 | import tech.sqlclub.common.reflect.Reflection
8 |
9 | /**
10 | * 平台工具
11 | * Created by songgr on 2022/02/25.
12 | */
13 | object PlatformUtils {
14 |
15 | def frameworkEquals(idesConf: IdesConf, clazz:Class[_]): Boolean = {
16 | if (!clazz.isAnnotationPresent(classOf[PlatformFrameImpl])) {
17 | throw new IdesException("the annotation of `PlatformFrameImpl` is required in class " + clazz.getCanonicalName)
18 | }
19 |
20 | val platformFrame = idesConf.get(IDES_RUN_PLATFORM_FRAME)
21 | val frameName = Reflection.getAnnotation(clazz, classOf[PlatformFrameImpl]).frameName()
22 | platformFrame.equalsIgnoreCase(frameName.frame)
23 | }
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/common/src/main/resources/ides-version-info.properties:
--------------------------------------------------------------------------------
1 | version=${pom.version}
2 | user=${user.name}
3 | revision=${version-info.scm.commit}
4 | branch=${version-info.scm.branch}
5 | date=${version-info.build.time}
6 | url=${version-info.scm.uri}
7 | srcChecksum=${version-info.source.md5}
8 | protocVersion=${protobuf.version}
--------------------------------------------------------------------------------
/common/src/test/java/org/apache/spark/IdesConfSuite.scala:
--------------------------------------------------------------------------------
1 | package org.apache.spark
2 |
3 | import org.scalatest.FunSuite
4 | import tech.ides.conf.IdesConf
5 | import tech.ides.conf.IdesConf._
6 |
7 | /**
8 | *
9 | * Created by songgr on 2020/06/09.
10 | */
11 | class IdesConfSuite extends FunSuite {
12 | test("test default conf value") {
13 | val conf = new IdesConf()
14 | val value = conf.get(IDES_SERVER_PORT)
15 |
16 | assert(IDES_SERVER_PORT.defaultValue.get == value)
17 |
18 | }
19 |
20 | test("test set conf value") {
21 | val conf = new IdesConf()
22 | conf.set(IDES_SERVER_PORT.key, "9009")
23 | val value = conf.get(IDES_SERVER_PORT)
24 |
25 | assert(9009 == value)
26 |
27 | }
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/common/src/test/java/org/apache/spark/ScriptUtilTest.scala:
--------------------------------------------------------------------------------
1 | package org.apache.spark
2 |
3 | import tech.ides.utils.ScriptUtils.readLines
4 |
5 | /**
6 | *
7 | * Created by songgr on 2020/11/10.
8 | */
9 | object ScriptUtilTest {
10 |
11 | def main(args: Array[String]): Unit = {
12 | val script =
13 | """
14 | |val a = ''' ""s; ''';
15 | |
16 | |val a = "sddede;;;;dede;;;dede"; val aa='''sds''';select `sddse;dede`;
17 | |
18 | |/*
19 | |ddede
20 | |;;;
21 | |dede
22 | |;;;
23 | |*/
24 | |
25 | |val a=`a`; val b='s'; val c="s";
26 | |
27 | |--- select * from as t; val a=1;
28 | |
29 | |select "sdsd;" from tt;
30 | """.stripMargin
31 |
32 | val strings = readLines(script)
33 |
34 | println(strings.mkString("\n"))
35 |
36 | println(readLines("val a=1;/*;;;*/ val a=3;").mkString("\n"))
37 |
38 | println(readLines("--val a=1;/*;;;*/ val a=3;").mkString("\n"))
39 |
40 | println(readLines("select \\s \"\"\"llllkkkkk;;;\"\"\" as tl as tl;").mkString("\n"))
41 | }
42 |
43 | }
44 |
--------------------------------------------------------------------------------
/conf/application.yml:
--------------------------------------------------------------------------------
1 | application:
2 | # service 实现类
3 | service: tech.ides.rest.IdesServer
4 | # platform 生命周期相关类
5 | platformLifecycles:
6 | - tech.ides.datasource.DataSourceFactory
7 | - tech.ides.extension.ETPluginRegister
8 | serviceLifecycles:
9 | - tech.ides.rest.ControlHandlerHook
10 |
11 | # 数据库注册信息
12 | connectMetaData:
13 | - name: test
14 | format: jdbc
15 | url: jdbc:mysql://127.0.0.1:3306/test?characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false
16 | driver: com.mysql.jdbc.Driver
17 | user: test
18 | password: test
--------------------------------------------------------------------------------
/conf/log4j.properties:
--------------------------------------------------------------------------------
1 | #
2 | # Licensed to the Apache Software Foundation (ASF) under one or more
3 | # contributor license agreements. See the NOTICE file distributed with
4 | # this work for additional information regarding copyright ownership.
5 | # The ASF licenses this file to You under the Apache License, Version 2.0
6 | # (the "License"); you may not use this file except in compliance with
7 | # the License. You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | #
17 | # Set everything to be logged to the console
18 | log4j.rootCategory=INFO, console,file
19 | log4j.appender.console=org.apache.log4j.ConsoleAppender
20 | log4j.appender.console.target=System.err
21 | log4j.appender.console.layout=org.apache.log4j.PatternLayout
22 | log4j.appender.console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %X{owner} %p %c{1}: %m%n
23 | log4j.appender.file=org.apache.log4j.RollingFileAppender
24 | log4j.appender.file.File=./logs/ides_engine.log
25 | log4j.appender.file.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy
26 | log4j.appender.file.rollingPolicy.fileNamePattern=${IDES_HOME:.}/logs/ides_engine.%d.gz
27 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
28 | log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %X{owner} %p %c{1}: %m%n
29 | log4j.appender.file.MaxBackupIndex=5
30 | # Set the default spark-shell log level to WARN. When running the spark-shell, the
31 | # log level for this class is used to overwrite the root logger's log level, so that
32 | # the user can have different defaults for the shell and regular Spark apps.
33 | #log4j.logger.org.apache.spark=WARN
34 | #log4j.logger.org.apache.spark=WARN
35 | # Settings to quiet third party logs that are too verbose
36 | log4j.logger.org.spark_project.jetty=WARN
37 | log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
38 | log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
39 | log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
40 | log4j.logger.org.apache.parquet=ERROR
41 | log4j.logger.org.apache.spark.ContextCleaner=ERROR
42 | log4j.logger.org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator=ERROR
43 | log4j.logger.parquet=ERROR
44 | # SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
45 | log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
46 | log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
--------------------------------------------------------------------------------
/conf/log4j.properties.template:
--------------------------------------------------------------------------------
1 | #
2 | # Licensed to the Apache Software Foundation (ASF) under one or more
3 | # contributor license agreements. See the NOTICE file distributed with
4 | # this work for additional information regarding copyright ownership.
5 | # The ASF licenses this file to You under the Apache License, Version 2.0
6 | # (the "License"); you may not use this file except in compliance with
7 | # the License. You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | #
17 | # Set everything to be logged to the console
18 | log4j.rootCategory=INFO, console,file
19 | log4j.appender.console=org.apache.log4j.ConsoleAppender
20 | log4j.appender.console.target=System.err
21 | log4j.appender.console.layout=org.apache.log4j.PatternLayout
22 | log4j.appender.console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %X{owner} %p %c{1}: %m%n
23 | log4j.appender.file=org.apache.log4j.RollingFileAppender
24 | log4j.appender.file.File=./logs/ides_engine.log
25 | log4j.appender.file.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy
26 | log4j.appender.file.rollingPolicy.fileNamePattern=./logs/ides_engine.%d.gz
27 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
28 | log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %X{owner} %p %c{1}: %m%n
29 | log4j.appender.file.MaxBackupIndex=5
30 | # Set the default spark-shell log level to WARN. When running the spark-shell, the
31 | # log level for this class is used to overwrite the root logger's log level, so that
32 | # the user can have different defaults for the shell and regular Spark apps.
33 | log4j.logger.org.apache.spark=WARN
34 | #log4j.logger.org.apache.spark=WARN
35 | # Settings to quiet third party logs that are too verbose
36 | log4j.logger.org.spark_project.jetty=WARN
37 | log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
38 | log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
39 | log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
40 | log4j.logger.org.apache.parquet=ERROR
41 | log4j.logger.org.apache.spark.ContextCleaner=ERROR
42 | log4j.logger.org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator=ERROR
43 | log4j.logger.parquet=ERROR
44 | # SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
45 | log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
46 | log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
--------------------------------------------------------------------------------
/core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | DataLinked
7 | tech.ides
8 | ${env.project.version}
9 |
10 | 4.0.0
11 |
12 | ides-core-${spark.big.version}_${scala.binary.version}
13 |
14 |
15 |
16 | tech.ides
17 | ides-common-${spark.big.version}_${scala.binary.version}
18 | ${project.version}
19 |
20 |
21 |
22 | tech.ides
23 | ides-dsl-${spark.big.version}_${scala.binary.version}
24 | ${project.version}
25 |
26 |
27 | org.apache.spark
28 | spark-core_${scala.binary.version}
29 | ${spark.version}
30 | provided
31 |
32 |
33 | org.apache.spark
34 | spark-sql_${scala.binary.version}
35 | ${spark.version}
36 | provided
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/core/src/main/java/org/apache/hadoop/hdfs/HdfsOperator.scala:
--------------------------------------------------------------------------------
1 | package org.apache.hadoop.hdfs
2 |
3 | import java.io.{BufferedReader, InputStreamReader}
4 | import org.apache.commons.io.output.ByteArrayOutputStream
5 | import org.apache.hadoop.conf.Configuration
6 | import org.apache.hadoop.fs.{FSDataInputStream, FileStatus, FileSystem, Path}
7 | import org.apache.hadoop.io.IOUtils
8 | import org.apache.spark.deploy.SparkHadoopUtil
9 | import scala.collection.mutable.ArrayBuffer
10 |
11 | /**
12 | * hdfs 文件系统操作类
13 | * Created by songgr on 2021/02/23.
14 | */
15 | object HdfsOperator {
16 |
17 | lazy val hadoopConfiguration:Configuration = {
18 | val sparkHadoopUtil = SparkHadoopUtil.get
19 | if(sparkHadoopUtil != null){
20 | sparkHadoopUtil.conf
21 | } else new Configuration()
22 | }
23 |
24 | def getFileSystem(path:String):FileSystem = {
25 | val _path = new Path(path)
26 | getFileSystem(_path)
27 | }
28 |
29 | def getFileSystem(path: Path):FileSystem = {
30 | FileSystem.get(path.toUri, hadoopConfiguration)
31 | }
32 |
33 | def isDir(path: String):Boolean = {
34 | val fs = getFileSystem(path)
35 | fs.isDirectory(new Path(path))
36 | }
37 |
38 | def isFile(path: String):Boolean = {
39 | val fs = getFileSystem(path)
40 | fs.isFile(new Path(path))
41 | }
42 |
43 | def fileExists(path: Path):Boolean = {
44 | val fs = getFileSystem(path)
45 | fs.exists(path)
46 | }
47 |
48 | def fileExists(path: String):Boolean = {
49 | fileExists(new Path(path))
50 | }
51 |
52 | def createDir(path: String):Boolean = {
53 | val fs = getFileSystem(path)
54 | fs.mkdirs(new Path(path))
55 | }
56 |
57 | def deleteDir(path: String) = {
58 | val fs = getFileSystem(path)
59 | fs.delete(new Path(path), true)
60 | }
61 |
62 | def getFileStatus(path: String):FileStatus = {
63 | val fs = getFileSystem(path)
64 | val file = fs.getFileStatus(new Path(path))
65 | file
66 | }
67 |
68 | def listFiles(path: String): Seq[FileStatus] = {
69 | val fs = getFileSystem(path)
70 | fs.globStatus(new Path(path)).toSeq
71 | }
72 |
73 | def readFileToString(path: String): String = {
74 | val fs = getFileSystem(path)
75 | var br: BufferedReader = null
76 | var line: String = null
77 | val result = new ArrayBuffer[String]()
78 | try {
79 | br = new BufferedReader(new InputStreamReader(fs.open(new Path(path))))
80 | line = br.readLine()
81 | while (line != null) {
82 | result += line
83 | line = br.readLine()
84 | }
85 | } finally {
86 | if (br != null) br.close()
87 | if (fs != null) fs.close()
88 | }
89 | result.mkString("\n")
90 | }
91 |
92 | def readFileToBytes(fileName: String): Array[Byte] = {
93 | val fs = getFileSystem(fileName)
94 | val src: Path = new Path(fileName)
95 | var in: FSDataInputStream = null
96 | try {
97 | in = fs.open(src)
98 | val byteArrayOut = new ByteArrayOutputStream()
99 | IOUtils.copyBytes(in, byteArrayOut, 1024, true)
100 | byteArrayOut.toByteArray
101 | } finally {
102 | if (null != in) in.close()
103 | if (fs != null) fs.close()
104 | }
105 | }
106 |
107 | }
108 |
--------------------------------------------------------------------------------
/core/src/main/java/org/apache/spark/sql/DataFrameUtils.scala:
--------------------------------------------------------------------------------
1 | package org.apache.spark.sql
2 |
3 | import org.apache.spark.sql.types._
4 |
5 | /**
6 | *
7 | * Created by songgr on 2021/02/15.
8 | */
9 | object DataFrameUtils {
10 |
11 | /**
12 | * 指定数据类型展开Row对象
13 | * @param row row数据
14 | * @param name 字段名
15 | * @param dataType 数据类型
16 | * @return Any
17 | */
18 | def unwrapRow(row: Row, name:String, dataType: DataType):Any = {
19 | val i = row.fieldIndex(name)
20 | dataType match {
21 | case IntegerType => row.getInt(i)
22 | case LongType => row.getLong(i)
23 | case DoubleType => row.getDouble(i)
24 | case FloatType => row.getFloat(i)
25 | case ShortType => row.getShort(i)
26 | case ByteType => row.getByte(i)
27 | case BooleanType => row.getBoolean(i)
28 | case StringType => row.getString(i)
29 | case BinaryType => row.getAs[Array[Byte]](i)
30 | case TimestampType => row.getAs[java.sql.Timestamp](i)
31 | case DateType => row.getAs[java.sql.Date](i)
32 | case t: DecimalType => row.getDecimal(i)
33 | case ArrayType(dt, _) => row.getSeq[AnyRef](i).toArray
34 | case _ => throw new IllegalArgumentException(
35 | s"Can't unwrap non-null value for field $name, $i"
36 | )
37 | }
38 | }
39 |
40 | /**
41 | * 获取指定列第一行数据
42 | * @param table 数据表
43 | * @param name 字段名
44 | * @return `Option[Any]`
45 | */
46 | def getFieldHeadValueByName(table:DataFrame, name:String):Option[Any] = {
47 | val fieldOption = table.schema.find(f => f.name == name)
48 |
49 | if (fieldOption.isEmpty) return None
50 |
51 | val dataType = fieldOption.get.dataType
52 | val results = table.select(name).head(1).map(row => unwrapRow(row, name, dataType))
53 | results.headOption
54 | }
55 |
56 | /**
57 | * 根据字段名获取整列数据,数据量大时driver可能会OOM,慎用!
58 | * @param table 数据表
59 | * @param name 字段名
60 | * @return `List[Any]`
61 | */
62 | def getFieldValueByName(table:DataFrame, name:String):List[Any] = {
63 | val fieldOption = table.schema.find(f => f.name == name)
64 |
65 | if (fieldOption.isEmpty) return List()
66 | val dataType = fieldOption.get.dataType
67 |
68 | table.select(name).collect().map(row => unwrapRow(row, name, dataType)).toList
69 | }
70 |
71 | /**
72 | * 获取所有列数据,数据量大时driver可能会OOM,慎用!
73 | * @param table 数据表
74 | * @return `List[Seq[Any]]`
75 | */
76 | def getAllFieldValue(table:DataFrame):List[Seq[Any]] = {
77 | val schema = table.schema.map(f => (f.name, f.dataType))
78 | table.collect().map{
79 | row =>
80 | schema.map(it => unwrapRow(row, it._1, it._2))
81 | }.toList
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/core/src/main/java/org/apache/spark/sql/jdbc/JdbcUpsert.scala:
--------------------------------------------------------------------------------
1 | package org.apache.spark.sql.jdbc
2 |
3 | import java.sql.Connection
4 | import org.apache.spark.sql.Row
5 | import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
6 | import org.apache.spark.sql.types.StructType
7 |
8 | /**
9 | * jdbc upsert 接口定义
10 | * Created by songgr on 2021/03/04.
11 | */
12 | trait JdbcUpsert {
13 |
14 | /**
15 | * 执行upsert
16 | * @param conn 数据库连接
17 | * @param upsertReq upsert请求对象
18 | * @param upsertRow upsert记录行
19 | * @param iterator 插入数据集
20 | * @param batchSize 批处理大小
21 | * @return Boolean
22 | */
23 | def execute(conn: Connection, upsertReq: UpsertReq, upsertRow:UpsertRow, iterator: Iterator[Row], batchSize:Int):Boolean
24 |
25 | /**
26 | * 预备SQL语句行
27 | * @param upsertReq upsert请求对象
28 | * @param isCaseSensitive 区分大小写
29 | * @return UpsertRow
30 | */
31 | def preparedStatementSQLRow(upsertReq: UpsertReq, isCaseSensitive: Boolean):UpsertRow
32 |
33 | }
34 |
35 | /**
36 | * Upsert 请求对象
37 | * @param option jdbc连接信息
38 | * @param schema 表schema结构
39 | * @param dialect jdbc方言
40 | * @param jdbcNullType jdbc null类型
41 | * @param primaryKeys 更新主key,可以由多个字段组成
42 | */
43 | case class UpsertReq(option: JDBCOptions, schema: StructType, dialect: JdbcDialect, jdbcNullType:Array[Int], primaryKeys: Option[Seq[String]])
44 |
45 | /**
46 | * upsert 行对象
47 | * @param upsertSQL upsert语句
48 | * @param cols 字段名称列表,对应所有的占位符
49 | */
50 | case class UpsertRow(upsertSQL:String, cols:Seq[String])
--------------------------------------------------------------------------------
/core/src/main/java/org/apache/spark/sql/jdbc/JdbcUtil.scala:
--------------------------------------------------------------------------------
1 | package org.apache.spark.sql.jdbc
2 |
3 | import org.apache.spark.sql.execution.datasources.jdbc.{JDBCOptions, JdbcUtils}
4 | import org.apache.spark.sql.types.{DataType, StructType}
5 |
6 | /**
7 | *
8 | * Created by songgr on 2021/03/04.
9 | */
10 | object JdbcUtil {
11 |
12 | def getJdbcType(dt: DataType, dialect: JdbcDialect): JdbcType = {
13 | dialect.getJDBCType(dt).orElse(JdbcUtils.getCommonJDBCType(dt)).getOrElse(
14 | throw new IllegalArgumentException(s"Can't get JDBC type for ${dt.simpleString}")
15 | )
16 | }
17 |
18 | def getDialect(option: JDBCOptions) = {
19 | JdbcDialects.get(option.url)
20 | }
21 |
22 | def jdbcNullType(schema: StructType, option: JDBCOptions) = {
23 | val dialect = getDialect(option)
24 | schema.fields.map { field =>
25 | getJdbcType(field.dataType, dialect).jdbcNullType
26 | }
27 | }
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/cli/OptionsProcessor.java:
--------------------------------------------------------------------------------
1 | package tech.ides.cli;
2 |
3 | import org.apache.commons.cli.*;
4 | import tech.ides.exception.IdesException;
5 | import java.util.*;
6 | import java.util.Properties;
7 |
8 | /**
9 | * cli 参数处理处理类(参考hive cli)
10 | // * @see org.apache.hadoop.hive.cli.OptionsProcessor
11 | * Created by songgr on 2021/03/16.
12 | */
13 | public class OptionsProcessor {
14 |
15 | private final Options options = new Options();
16 | private final String cmdLineSyntax = "DataLinked";
17 | private CommandLine commandLine;
18 |
19 | @SuppressWarnings("static-access")
20 | public OptionsProcessor() {
21 |
22 | // Substitution option -d, --define
23 | options.addOption(OptionBuilder
24 | .withValueSeparator()
25 | .hasArgs(2)
26 | .withArgName("key=value")
27 | .withLongOpt("define")
28 | .withDescription("Environment variable substitution to apply to datalinked commands. e.g. -d A=B or --define A=B")
29 | .create('d'));
30 |
31 | // [-v|--verbose]
32 | options.addOption(new Option("v", "verbose", false, "Verbose mode (echo executed SQL to the console)"));
33 |
34 | // [-h|--help]
35 | options.addOption(new Option("h", "help", false, "Print help information"));
36 |
37 | // -e 'quoted-query-string'
38 | options.addOption(OptionBuilder
39 | .hasArg()
40 | .withArgName("quoted-query-string")
41 | .withDescription("SQL from command line")
42 | .create('e'));
43 |
44 | // -f
45 | options.addOption(OptionBuilder
46 | .hasArg()
47 | .withValueSeparator(',')
48 | .withArgName("filename")
49 | .withDescription("SQL from files")
50 | .create('f'));
51 | }
52 |
53 | public String getValue(char opt) {
54 | return commandLine.getOptionValue(opt);
55 | }
56 |
57 | public boolean hasOption(char opt) {
58 | return commandLine.hasOption(opt);
59 | }
60 |
61 | public Map getOptionProperties(String opt) {
62 | Properties properties = commandLine.getOptionProperties(opt);
63 | return new HashMap((Map) properties);
64 | }
65 |
66 | public boolean isOption(String opt) {
67 | return options.hasOption(opt);
68 | }
69 |
70 | public boolean hasArg(String opt) {
71 | if ( !isOption(opt) ) return false;
72 | return options.getOption(opt).hasArg();
73 | }
74 |
75 | public Option getOption(String opt) {
76 | return options.getOption(opt);
77 | }
78 |
79 | public void process(String[] args) throws Exception {
80 | BasicParser parser = new BasicParser();
81 | commandLine = parser.parse(options, args);
82 |
83 | if (commandLine.hasOption('h')) {
84 | HelpFormatter hf = new HelpFormatter();
85 | hf.printHelp(cmdLineSyntax, "", options, "");
86 | System.exit(0);
87 | }
88 |
89 | if (commandLine.hasOption('f') && commandLine.hasOption('e')) {
90 | throw new IdesException("option -e|-f only one can used");
91 | }
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/core/IdesApp.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.core
2 |
3 | import tech.ides.conf.IdesConf.{IDES_SHELL_MODE, IDES_SPARK_SERVICE}
4 | import tech.ides.conf.IdesConf
5 | import tech.ides.core.platform.{PlatformLifecycle, PlatformManager, ServiceLifecycle}
6 | import tech.sqlclub.common.reflect.{ClassPath, Reflection}
7 | import tech.sqlclub.common.utils.ParamsUtils
8 |
9 | /**
10 | *
11 | * Created by songgr on 2020/07/14.
12 | */
13 | object IdesApp {
14 |
15 | def main(args: Array[String]): Unit = {
16 |
17 | val params = new ParamsUtils(args)
18 | val idesConf = IdesConf.getOrCreate
19 | params.getParamsMap.foreach(kv => idesConf.set(kv._1, kv._2))
20 |
21 | val platformLifecycles = ApplicationSetting.PLATFORM_LIFECYCLES
22 | if (platformLifecycles.isDefined) {
23 | platformLifecycles.get.foreach {
24 | clazz =>
25 | val lifecycle = Reflection(ClassPath.from(clazz)).instance[PlatformLifecycle]
26 | PlatformManager.registerLifecycle(lifecycle)
27 | }
28 | }
29 |
30 | if (idesConf.get(IDES_SPARK_SERVICE) && !idesConf.get(IDES_SHELL_MODE)) {
31 | val serviceLifecycles = ApplicationSetting.SERVICE_LIFECYCLES
32 | if (serviceLifecycles.isDefined) {
33 | serviceLifecycles.get.foreach {
34 | clazz =>
35 | val lifecycle = Reflection(ClassPath.from(clazz)).instance[ServiceLifecycle]
36 | PlatformManager.registerLifecycle(lifecycle)
37 | }
38 | }
39 | }
40 |
41 | PlatformManager.getOrCreate.run(idesConf)
42 |
43 |
44 | }
45 |
46 | }
47 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/core/ScriptQueryExecute.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.core
2 |
3 | import ides.dsl.parser._
4 | import org.antlr.v4.runtime.tree.ParseTreeWalker
5 | import org.antlr.v4.runtime.CommonTokenStream
6 | import tech.ides.dsl.listener.{PreProcessListener, ScriptQueryExecListener}
7 | import tech.sqlclub.common.log.Logging
8 | import tech.ides.constants.ScriptConstants._
9 | import tech.ides.dsl.{CaseChangeCharStream, SyntaxErrorListener}
10 |
11 | /**
12 | *
13 | * Created by songgr on 2020/10/28.
14 | */
15 | object ScriptQueryExecute extends Logging {
16 |
17 | // 脚本执行上下文当前线程独享
18 | private[this] val scriptExecuteContext: ThreadLocal[ScriptQueryExecuteContext] = new ThreadLocal[ScriptQueryExecuteContext]
19 |
20 | def context = scriptExecuteContext.get()
21 |
22 | def setContext(sqec:ScriptQueryExecuteContext) = scriptExecuteContext.set(sqec)
23 |
24 | def reset = scriptExecuteContext.remove()
25 |
26 | def getOrCreateContext() = {
27 | if (context == null) {
28 | logWarning("Create default context for unit test, there may be some problems if used in real program!")
29 | val scriptQueryExecListener = new ScriptQueryExecListener(null, DEFAULT_PATH_PREFIX, TEST_USER)
30 | setContext(ScriptQueryExecuteContext(scriptQueryExecListener, TEST_USER, scriptQueryExecListener.ownerPath(None), ""))
31 | }
32 | context
33 | }
34 |
35 | /**
36 | * 脚本执行
37 | * @param script 脚本内容
38 | * @param listener 脚本执行监听器
39 | * @param skipInclude 是否跳过include阶段
40 | * @param skipGrammarValidate 是否跳过语法检验阶段
41 | * @param skipAuth 是否跳过权限验证阶段
42 | * @param skipPhysicalJob 是否跳过物理执行阶段
43 | */
44 | def exec(script: String,
45 | listener: ScriptQueryExecListener,
46 | skipInclude: Boolean = true,
47 | skipGrammarValidate: Boolean = true,
48 | skipAuth: Boolean = true,
49 | skipPhysicalJob: Boolean = false
50 | ) = {
51 |
52 |
53 | var querySQL = script
54 |
55 | // include stage
56 | if ( !skipInclude ) {
57 | // todo 实现include功能
58 | }
59 |
60 | // set replace stage
61 | // todo 实现set语法替换
62 |
63 | val preProcessListener = new PreProcessListener(listener)
64 |
65 | listener.setStage(ScriptStage.preProcess)
66 | parse(querySQL, preProcessListener)
67 |
68 | // 预处理完的sql
69 | querySQL = preProcessListener.toScript
70 |
71 | // println("======预处理结束========")
72 | // println(querySQL)
73 |
74 |
75 |
76 | // grammar validate stage
77 | if ( !skipGrammarValidate ) {
78 | // todo 实现语法校验
79 | }
80 |
81 | // auth stage
82 | if ( !skipAuth ) {
83 | // todo 实现权限校验
84 | }
85 |
86 | // physical job stage
87 | if ( !skipPhysicalJob ) {
88 | listener.setStage(ScriptStage.physical)
89 | parse(querySQL, listener)
90 | }
91 |
92 | }
93 |
94 |
95 | private def parse(script: String, listener: IdesParserBaseListener): Unit = {
96 | val charStream = new CaseChangeCharStream(script)
97 | val idesDslLexer = new IdesLexer(charStream)
98 | val tokenStream = new CommonTokenStream(idesDslLexer)
99 | val parser = new IdesParser(tokenStream)
100 |
101 | // add syntax error listener
102 | parser.addErrorListener(new SyntaxErrorListener)
103 |
104 | val statement = parser.statement
105 | ParseTreeWalker.DEFAULT.walk(listener, statement)
106 | }
107 |
108 | }
109 |
110 | /**
111 | * 脚本查询执行上下文
112 | * @param execListener 脚本执行监听器
113 | * @param owner 脚本所有者
114 | * @param home 所有者家目录
115 | * @param groupId 脚本groupId
116 | * @param userDefinedParam 用户自定义参数
117 | */
118 | case class ScriptQueryExecuteContext(
119 | @transient execListener: ScriptQueryExecListener,
120 | owner: String,
121 | home: String,
122 | groupId: String,
123 | userDefinedParam: Map[String, String] = Map())
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/core/ScriptStage.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.core
2 |
3 | /**
4 | *
5 | * Created by songgr on 2020/11/05.
6 | */
7 | object ScriptStage extends Enumeration {
8 | type stage = Value
9 | val include = Value("include")
10 | val preProcess = Value("preProcess")
11 | val auth = Value("auth")
12 | val physical = Value("physical")
13 | val grammar = Value("grammar")
14 | }
15 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/datasource/BaseFileDataSource.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.datasource
2 |
3 | import org.apache.spark.sql.{DataFrameReader, DataFrameWriter, Row, SaveMode}
4 | import tech.ides.constants.ScriptConstants.{IMPL_CLASS, PARTITION_BY_COL}
5 | import tech.ides.datasource.reader.{DataReader, Reader}
6 | import tech.ides.datasource.writer.{DataWriter, Writer}
7 | import tech.ides.strategy.PlatformFrameImpl
8 | import tech.ides.strategy.PlatformFrameEnum.{FLINK, SPARK}
9 |
10 | /**
11 | * 文件基本类型数据
12 | * Created by songgr on 2020/11/05.
13 | */
14 | @PlatformFrameImpl(frameName = SPARK)
15 | abstract class BaseFileDataSource extends DataReader with DataWriter {
16 |
17 | override def load(reader: Reader, config: DataSourceConfig): DataTable = {
18 | val format = config.config.getOrElse(IMPL_CLASS, fullFormat)
19 | val sparkDataReader = reader.getReader[DataFrameReader]
20 | val dataFrame = sparkDataReader.options(config.config).format(format).load(config.path)
21 | SparkDataTable(dataFrame)
22 | }
23 |
24 | override def save(writer: Writer, config: DataSinkConfig): Unit = {
25 | val format = config.config.getOrElse(IMPL_CLASS, fullFormat)
26 | val partitionByCol = config.config.getOrElse(PARTITION_BY_COL, "").split(",").filterNot(_.isEmpty)
27 | val sparkDataWriter = writer.getWriter[DataFrameWriter[Row]]
28 | if (partitionByCol.length > 0) {
29 | sparkDataWriter.partitionBy(partitionByCol: _*)
30 | }
31 | val saveMode = SaveMode.valueOf(config.mode.name())
32 | sparkDataWriter.options(config.config).mode(saveMode).format(format).save(config.path)
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/datasource/DataSourceFactory.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.datasource
2 |
3 | import tech.ides.core.platform.PlatformLifecycle
4 | import tech.sqlclub.common.reflect.{ClassPath, Reflection}
5 | import tech.ides.conf.IdesConf.IDES_DATASOURCE_PACKAGES
6 | import tech.ides.conf.IdesConf
7 | import tech.ides.constants.ScriptConstants
8 | import tech.ides.datasource.DataSource.Method
9 | import tech.ides.utils.PlatformUtils
10 | import tech.sqlclub.common.log.Logging
11 |
12 | import scala.collection.JavaConverters._
13 |
14 | /**
15 | * 数据类工厂: 注册/获取 数据源
16 | * Created by songgr on 2020/10/25.
17 | */
18 | object DataSourceFactory extends PlatformLifecycle with Logging {
19 |
20 | // 数据源注册表
21 | private val registry = new java.util.HashMap[DataSourceKey, BaseDataSource]()
22 |
23 | def listName() = registry.keySet().asScala.map(_.name).toSet
24 |
25 | def findBothNames(name:String) = {
26 | registry.asScala.find(_._1.name == name).map(it => Set(it._2.fullFormat, it._2.aliasFormat))
27 | }
28 |
29 | /**
30 | * 注册所有数据源
31 | */
32 | def register(idesConf: IdesConf): Unit = {
33 | val dataSourceDefaultPackages = Array("tech.ides.external.datasource", "tech.ides.datasource.impl")
34 | val option = idesConf.get(IDES_DATASOURCE_PACKAGES)
35 | val userDatasourcePackages = if (option.isDefined) {
36 | option.get.split(",").filter(_.nonEmpty).map(_.trim)
37 | } else {Array[String]()}
38 |
39 | val scanPackages = dataSourceDefaultPackages ++ userDatasourcePackages
40 |
41 | logInfo("look for the DataSource plugin from packages: " + scanPackages.mkString(", "))
42 |
43 | val allDataSource = Reflection.allClassWithAnnotation(classOf[DataSource], scanPackages:_*)
44 | val ds = allDataSource.filter {
45 | dataSourceClass =>
46 | // 跳过 实现框架不一致的ds
47 | PlatformUtils.frameworkEquals(idesConf, dataSourceClass)
48 | }.map {
49 | dataSourceClass =>
50 | val annotation = Reflection.getAnnotation(dataSourceClass, classOf[DataSource])
51 | val dataSourceInstace = Reflection(ClassPath.from(dataSourceClass)).instance[BaseDataSource]
52 | val direct = annotation.directDataSource()
53 | val sourceTypes = annotation.types()
54 |
55 | if (sourceTypes != null && sourceTypes.nonEmpty) {
56 | sourceTypes.foreach(
57 | sourceType => {
58 | registry.put(DataSourceKey(dataSourceInstace.aliasFormat,sourceType, direct), dataSourceInstace)
59 | registry.put(DataSourceKey(dataSourceInstace.fullFormat, sourceType, direct), dataSourceInstace)
60 | }
61 | )
62 | }
63 | annotation.name()
64 | }
65 |
66 | logInfo(s"""A total of ${allDataSource.size} dataSource plugins scanned: [${ds.mkString(", ")}].""")
67 | }
68 |
69 | def take(name:String, sourceType:Method, options:Map[String,String]=Map()): Option[BaseDataSource] = {
70 | val direct = options.contains(ScriptConstants.DIRECT_QUERY)
71 | val key = DataSourceKey(name, sourceType, direct)
72 | if (registry.containsKey(key)) {
73 | Some(registry.get(key))
74 | } else {
75 | None
76 | }
77 | }
78 |
79 | override def beforeSQLRuntime(idesConf: IdesConf): Unit = { register(idesConf) }
80 |
81 | override def afterSQLRuntime(idesConf: IdesConf): Unit = {}
82 | }
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/CaseChangeCharStream.java:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl;
2 |
3 | import org.antlr.v4.runtime.CharStream;
4 | import org.antlr.v4.runtime.CharStreams;
5 | import org.antlr.v4.runtime.misc.Interval;
6 |
7 | import java.io.ByteArrayInputStream;
8 | import java.io.IOException;
9 |
10 | /**
11 | * 大小写更改字符流
12 | * Created by songgr on 2020/11/06.
13 | */
14 | public class CaseChangeCharStream implements CharStream {
15 |
16 | private final CharStream charStream;
17 | private final boolean upper;
18 |
19 | /**
20 | * Constructs a new CaseChangeCharStream wrapping the given {@link CharStream} forcing
21 | * all characters to upper case or lower case.
22 | * @param charStream The stream to wrap.
23 | * @param upper If true force each symbol to upper case, otherwise force to lower. Default is false.
24 | */
25 | public CaseChangeCharStream(CharStream charStream, boolean upper) {
26 | this.charStream = charStream;
27 | this.upper = upper;
28 | }
29 |
30 |
31 | public CaseChangeCharStream(CharStream charStream) {
32 | this.charStream = charStream;
33 | this.upper = false;
34 | }
35 |
36 | public CaseChangeCharStream(String string) throws IOException {
37 | ByteArrayInputStream bais = new ByteArrayInputStream(string.getBytes());
38 | CharStream charStream = CharStreams.fromStream(bais);
39 | this.charStream = charStream;
40 | this.upper = false;
41 | }
42 |
43 | @Override
44 | public String getText(Interval interval) {
45 | return charStream.getText(interval);
46 | }
47 |
48 | @Override
49 | public void consume() {
50 | charStream.consume();
51 | }
52 |
53 | @Override
54 | public int LA(int i) {
55 | int c = charStream.LA(i);
56 | if (c <= 0) { return c; }
57 | if (upper) {
58 | return Character.toUpperCase(c);
59 | }
60 | return Character.toLowerCase(c);
61 | }
62 |
63 | @Override
64 | public int mark() {
65 | return charStream.mark();
66 | }
67 |
68 | @Override
69 | public void release(int marker) {
70 | charStream.release(marker);
71 | }
72 |
73 | @Override
74 | public int index() {
75 | return charStream.index();
76 | }
77 |
78 | @Override
79 | public void seek(int index) {
80 | charStream.seek(index);
81 | }
82 |
83 | @Override
84 | public int size() {
85 | return charStream.size();
86 | }
87 |
88 | @Override
89 | public String getSourceName() {
90 | return charStream.getSourceName();
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/SyntaxErrorListener.java:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl;
2 |
3 | import org.antlr.v4.runtime.BaseErrorListener;
4 | import org.antlr.v4.runtime.RecognitionException;
5 | import org.antlr.v4.runtime.Recognizer;
6 |
7 | /**
8 | * 语法错误监听器,错误抛出异常
9 | * Created by songgr on 2020/11/06.
10 | */
11 | public class SyntaxErrorListener extends BaseErrorListener {
12 |
13 | @Override
14 | public void syntaxError(Recognizer, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) {
15 | throw new RuntimeException("IDES dsl parse error in [row:" + line + " column:" + charPositionInLine + "] error msg: " + msg);
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/adaptor/CommandAdaptor.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl.adaptor
2 |
3 | import ides.dsl.parser.IdesParser
4 | import ides.dsl.parser.IdesParser.CommandContext
5 | import tech.ides.dsl.listener.ScriptQueryExecListener
6 | import tech.ides.dsl.statement.{CommandSqlStatement, SqlStatement}
7 | import tech.ides.dsl.utils.DslUtil.currentText
8 |
9 |
10 | /**
11 | * Run 语法适配器
12 | * Created by songgr on 2022/02/16.
13 | */
14 | case class CommandAdaptor(scriptQueryExecListener: ScriptQueryExecListener) extends ScriptDslAdaptor {
15 | override def parse(context: IdesParser.QueryContext): SqlStatement = {
16 | val commandContext = context.asInstanceOf[CommandContext]
17 |
18 | val sql = currentText(commandContext)
19 | val module = commandContext.module().getText
20 | val commandParamStr = currentText(commandContext.commandParam())
21 |
22 | // 翻译原始命令
23 | val rawCommand = ""
24 |
25 | val parameters = Option(commandParamStr)
26 | .map(str => str.split(" ").map(_.trim).filter(_.nonEmpty))
27 | .getOrElse(Array[String]())
28 |
29 | CommandSqlStatement(sql, module, rawCommand, parameters)
30 | }
31 |
32 | override def execute(context: IdesParser.QueryContext): Unit = {
33 |
34 | val CommandSqlStatement(sql, module, rawCommand, parameters) = parse(context)
35 | println(sql)
36 | println(parameters.mkString(","))
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/adaptor/ConnectAdaptor.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl.adaptor
2 |
3 | import ides.dsl.parser.IdesParser
4 | import ides.dsl.parser.IdesParser.ConnectContext
5 | import tech.ides.datasource.DataSourceFactory
6 | import tech.ides.dsl.listener.ScriptQueryExecListener
7 | import tech.ides.dsl.statement.{ConnectSqlStatement, SqlStatement}
8 | import tech.ides.dsl.utils.DslUtil.{currentText, parseAssetName, whereExpressionsToMap}
9 | import tech.ides.metastore.{ConnectMappingKey, ConnectMetaStore}
10 | import tech.ides.storage.ExternalStorageInfo.ConnectMetaData
11 | import tech.ides.constants.ScriptConstants.EXTERNAL_STORAGE
12 |
13 | /**
14 | * Connect 语法适配器
15 | * Created by songgr on 2021/02/21.
16 | */
17 | case class ConnectAdaptor(scriptQueryExecListener: ScriptQueryExecListener) extends ScriptDslAdaptor {
18 |
19 | override def parse(context: IdesParser.QueryContext): SqlStatement = {
20 | val connectContext = context.asInstanceOf[ConnectContext]
21 | val sql = currentText(connectContext)
22 | val format = connectContext.format().getText
23 | val options = whereExpressionsToMap(connectContext.whereExpressions())
24 |
25 | val connectName = parseAssetName(connectContext.asAsset().assetName())
26 |
27 | ConnectSqlStatement(sql, format, options, connectName)
28 | }
29 |
30 | override def execute(context: IdesParser.QueryContext): Unit = {
31 | val ConnectSqlStatement(_, format, options, connectName) = parse(context)
32 |
33 | DataSourceFactory.findBothNames(format).map{
34 | names =>
35 | names.map(name => ConnectMetaStore.add(ConnectMappingKey(name, connectName), options))
36 | }.getOrElse(ConnectMetaStore.add(ConnectMappingKey(format, connectName), options))
37 |
38 | val storage = options.getOrElse(EXTERNAL_STORAGE, "false").toBoolean
39 |
40 | if (storage && ConnectMetaStore.storage != null) {
41 | ConnectMetaStore.storage.saveConfig(ConnectMappingKey(format, connectName).toString, ConnectMetaData, options + ("format" -> format), true)
42 | }
43 |
44 | scriptQueryExecListener.setLastTableName(null)
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/adaptor/DropAdaptor.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl.adaptor
2 |
3 | import java.util.UUID
4 |
5 | import ides.dsl.parser.{IdesLexer, IdesParser}
6 | import ides.dsl.parser.IdesParser.DropContext
7 | import tech.ides.dsl.listener.ScriptQueryExecListener
8 | import tech.ides.dsl.statement.{DropSqlStatement, SqlStatement}
9 | import tech.ides.dsl.utils.DslUtil.{currentText, parseAssetName}
10 | import tech.ides.metastore.{ConnectMappingKey, ConnectMetaStore}
11 | import tech.ides.storage.ExternalStorageInfo
12 | import tech.ides.constants.ScriptConstants.RESOURCE
13 |
14 | /**
15 | * Drop 语法适配器
16 | * Created by songgr on 2021/03/15.
17 | */
18 | case class DropAdaptor(scriptQueryExecListener: ScriptQueryExecListener) extends ScriptDslAdaptor {
19 |
20 | override def parse(context: IdesParser.QueryContext): SqlStatement = {
21 | val dropContext = context.asInstanceOf[DropContext]
22 | val sql = currentText(dropContext)
23 | val resource = dropContext.resource.getType
24 | val format = dropContext.format().getText
25 | val assetName = parseAssetName(dropContext.assetName())
26 |
27 | DropSqlStatement(sql, resource, format, assetName)
28 | }
29 |
30 | override def execute(context: IdesParser.QueryContext): Unit = {
31 | val DropSqlStatement(_, resource, format, assetName) = parse(context)
32 |
33 | val res = resource match {
34 | case IdesLexer.CONNECTION =>
35 | val mappingKey = ConnectMappingKey(format, assetName)
36 | ConnectMetaStore.remove(mappingKey)
37 | val flag = ConnectMetaStore.storage.dropConfig(mappingKey.toString, ExternalStorageInfo.ConnectMetaData)
38 | ("CONNECTION", flag)
39 | case IdesLexer.FUNCTION =>
40 | // todo
41 | ("FUNCTION", true)
42 | }
43 |
44 | val spark = scriptQueryExecListener.sparkSession
45 |
46 | val tempTable = "d_" + UUID.randomUUID().toString.replace("-", "")
47 | import spark.implicits._
48 | // 结果作为输出表
49 | val outputTable = Seq(res).toDF(RESOURCE, "result")
50 | outputTable.createOrReplaceTempView(tempTable)
51 | scriptQueryExecListener.setLastTableName(tempTable)
52 | }
53 |
54 | }
55 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/adaptor/LoadAdaptor.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl.adaptor
2 |
3 | import ides.dsl.parser.IdesParser
4 | import ides.dsl.parser.IdesParser.LoadContext
5 | import tech.ides.core.ScriptQueryExecute
6 | import tech.ides.datasource.DataSource.Method
7 | import tech.ides.datasource.reader.DataReader
8 | import tech.ides.datasource.{DataSourceConfig, DataSourceFactory, DataTable, SparkDataTable}
9 | import tech.ides.dsl.listener.ScriptQueryExecListener
10 | import tech.ides.dsl.statement.{LoadSqlStatement, SqlStatement}
11 | import tech.ides.dsl.utils.DslUtil._
12 | import tech.ides.strategy.PlatformStrategyCenter
13 | import tech.ides.strategy.PlatformFrameEnum.SPARK
14 | import tech.ides.strategy.PlatformStrategyCenter.SparkDataReader
15 |
16 | /**
17 | * Load 语法适配器
18 | * Created by songgr on 2020/11/05.
19 | */
20 | case class LoadAdaptor(scriptQueryExecListener: ScriptQueryExecListener) extends ScriptDslAdaptor {
21 |
22 | override def parse(context: IdesParser.QueryContext): SqlStatement = {
23 |
24 | val loadContext = context.asInstanceOf[LoadContext]
25 |
26 | val sql = currentText(loadContext)
27 | val format = loadContext.format().getText
28 | val path = loadContext.path().getText
29 |
30 | val whereExpsContext = loadContext.whereExpressions()
31 | val options = whereExpressionsToMap(whereExpsContext)
32 |
33 | val tableName = parseAssetName(loadContext.asAsset().assetName())
34 |
35 | LoadSqlStatement(sql, format, path, options, tableName)
36 | }
37 |
38 | override def execute(context: IdesParser.QueryContext): Unit = {
39 | val LoadSqlStatement(_, format, path, options, tableName) = parse(context)
40 | val sparkSession = scriptQueryExecListener.sparkSession
41 | val reader = sparkSession.read
42 | var table:DataTable = null
43 |
44 | val owner = options.getOrElse("owner", ScriptQueryExecute.getOrCreateContext().owner)
45 | val resourcePath = resourceRealPath(scriptQueryExecListener, Option(owner) , cleanStr(path))
46 |
47 | val (dataReader, dataTable) = PlatformStrategyCenter.platformFrame match {
48 | case SPARK => (SparkDataReader(reader), SparkDataTable(null))
49 | }
50 | val dsConf = DataSourceConfig(resourcePath, options, dataTable)
51 |
52 | // 从工厂获取数据源
53 | DataSourceFactory.take(format, Method.SOURCE, options).map {
54 | dataSource =>
55 | table = dataSource.asInstanceOf[DataReader].load(dataReader, dsConf)
56 | // todo 权限校验
57 | }.getOrElse{
58 | // todo 没有匹配做提示
59 | }
60 |
61 | // 注册临时表
62 | table.createOrReplaceTempView(tableName)
63 |
64 | // set last table name
65 | scriptQueryExecListener.setLastTableName(tableName)
66 |
67 | }
68 |
69 | }
70 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/adaptor/RunAdaptor.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl.adaptor
2 |
3 | import ides.dsl.parser.IdesParser
4 | import ides.dsl.parser.IdesParser.RunContext
5 | import tech.ides.datasource.{DataTable, SparkDataTable}
6 | import tech.ides.dsl.listener.ScriptQueryExecListener
7 | import tech.ides.dsl.statement.{RunSqlStatement, SqlStatement}
8 | import tech.ides.dsl.utils.DslUtil.{currentText, parseAssetName, whereExpressionsToMap}
9 | import tech.ides.extension.ETPluginRegister
10 | import tech.ides.strategy.PlatformFrameEnum.SPARK
11 | import tech.ides.strategy.PlatformStrategyCenter
12 |
13 | /**
14 | * Run 语法适配器
15 | * Created by songgr on 2022/02/16.
16 | */
17 | case class RunAdaptor(scriptQueryExecListener: ScriptQueryExecListener) extends ScriptDslAdaptor {
18 | override def parse(context: IdesParser.QueryContext): SqlStatement = {
19 | val runContext = context.asInstanceOf[RunContext]
20 |
21 | val sql = currentText(runContext)
22 | val module = runContext.module().getText
23 | val path = runContext.path().getText
24 |
25 | val whereExpsContext = runContext.whereExpressions()
26 | val options = whereExpressionsToMap(whereExpsContext)
27 |
28 | val tableName = parseAssetName(runContext.asAsset().assetName())
29 |
30 | RunSqlStatement(sql, module, path, options, tableName)
31 | }
32 |
33 | override def execute(context: IdesParser.QueryContext): Unit = {
34 |
35 | val RunSqlStatement(_, module, path, options, tableName) = parse(context)
36 | val sparkSession = scriptQueryExecListener.sparkSession
37 |
38 | val df = sparkSession.table(path)
39 | var table: DataTable = PlatformStrategyCenter.platformFrame match {
40 | case SPARK => SparkDataTable(df)
41 | }
42 |
43 | // 获取et插件
44 | ETPluginRegister.find(module).map {
45 | etPlugin =>
46 | // 插件版本校验
47 | etPlugin.versionCompatible
48 | // 执行插件
49 | table = etPlugin.exec(table, path, options)
50 | // todo 权限校验
51 | }.getOrElse{
52 | // todo 没有匹配做提示
53 | }
54 |
55 | // 注册临时表
56 | table.createOrReplaceTempView(tableName)
57 |
58 | // set last table name
59 | scriptQueryExecListener.setLastTableName(tableName)
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/adaptor/ScriptDslAdaptor.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl.adaptor
2 |
3 | import ides.dsl.parser.IdesParser.QueryContext
4 | import tech.ides.dsl.listener.ScriptQueryExecListener
5 | import tech.ides.dsl.statement.SqlStatement
6 |
7 | /**
8 | * 脚本语法适配器
9 | * Created by songgr on 2020/11/05.
10 | */
11 | trait ScriptDslAdaptor {
12 |
13 | /**
14 | * 脚本执行监听类
15 | * @return
16 | */
17 | def scriptQueryExecListener:ScriptQueryExecListener
18 |
19 | /**
20 | * 脚本解析
21 | * @param context 脚本上下文
22 | * @return
23 | */
24 | def parse(context: QueryContext): SqlStatement
25 |
26 | /**
27 | * 脚本执行
28 | * @param context 脚本上下文
29 | */
30 | def execute(context: QueryContext)
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/adaptor/SelectAdaptor.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl.adaptor
2 |
3 | import ides.dsl.parser.IdesParser
4 | import ides.dsl.parser.IdesParser.SelectContext
5 | import tech.ides.dsl.listener.ScriptQueryExecListener
6 | import tech.ides.dsl.statement.{SelectSqlStatement, SqlStatement}
7 | import tech.ides.dsl.utils.DslUtil.{currentText, parseAssetName}
8 |
9 | /**
10 | *
11 | * Created by songgr on 2020/11/06.
12 | */
13 | case class SelectAdaptor(scriptQueryExecListener: ScriptQueryExecListener) extends ScriptDslAdaptor {
14 | override def parse(context: IdesParser.QueryContext): SqlStatement = {
15 |
16 | val selectContext = context.asInstanceOf[SelectContext]
17 | val sql = currentText(selectContext)
18 |
19 | val tableName = parseAssetName(selectContext.asAsset().assetName())
20 |
21 | SelectSqlStatement(sql, tableName)
22 | }
23 |
24 | override def execute(context: IdesParser.QueryContext): Unit = {
25 |
26 | val SelectSqlStatement(_sql, tableName) = parse(context)
27 | val sparkSession = scriptQueryExecListener.sparkSession
28 |
29 | // tableName 索引
30 | val index = _sql.lastIndexOf(tableName)
31 | // last as 索引
32 | val asIndex = _sql.substring(0, index).toLowerCase.lastIndexOf("as")
33 | // 截取 asTableName 内容
34 | val sql = _sql.substring(0, asIndex)
35 |
36 | val df = sparkSession.sql(sql)
37 |
38 | // todo 权限校验
39 |
40 | df.createOrReplaceTempView(tableName)
41 |
42 | // set last table name
43 | scriptQueryExecListener.setLastTableName(tableName)
44 | }
45 |
46 | }
47 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/adaptor/SetAdaptor.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl.adaptor
2 |
3 | import ides.dsl.parser.IdesParser
4 | import ides.dsl.parser.IdesParser.SetContext
5 | import org.apache.commons.lang3.math.NumberUtils
6 | import tech.ides.dsl.listener.ScriptQueryExecListener
7 | import tech.ides.dsl.statement.{SetSqlStatement, SqlStatement}
8 | import tech.ides.dsl.utils.DslUtil.{cleanStr, currentText, whereExpressionsToMap}
9 |
10 | /**
11 | * Set 语法适配器
12 | * Created by songgr on 2022/03/07.
13 | */
14 | case class SetAdaptor(scriptQueryExecListener: ScriptQueryExecListener) extends ScriptDslAdaptor {
15 | /**
16 | * 脚本解析
17 | *
18 | * @param context 脚本上下文
19 | * @return
20 | */
21 | override def parse(context: IdesParser.QueryContext): SqlStatement = {
22 |
23 | val setContext = context.asInstanceOf[SetContext]
24 | val sql = currentText(setContext)
25 |
26 | val key = setContext.expression().keyName().getText
27 | val valueName = setContext.expression().valueName()
28 |
29 | val value = valueName match {
30 | case _ if null != valueName.mumericValue() => {
31 | val text = valueName.mumericValue().getText
32 | if (text.contains(".")) NumberUtils.createDouble(text) else NumberUtils.createLong(text)
33 | }
34 | case _ if null != valueName.stringValue() => cleanStr(valueName.stringValue().getText)
35 | case _ => valueName.getText
36 | }
37 |
38 | val whereExpsContext = setContext.whereExpressions()
39 | val options = whereExpressionsToMap(whereExpsContext)
40 |
41 | SetSqlStatement(sql, key, value, options)
42 | }
43 |
44 | /**
45 | * 脚本执行
46 | *
47 | * @param context 脚本上下文
48 | */
49 | override def execute(context: IdesParser.QueryContext): Unit = {
50 | val SetSqlStatement(sql, key, value, options) = parse(context)
51 |
52 | if (options.isEmpty) {
53 | scriptQueryExecListener.addEnv(key, value)
54 | }
55 |
56 | //todo 支持shell sql 设置变量
57 |
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/listener/PreProcessListener.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl.listener
2 |
3 | import ides.dsl.parser.{IdesParser, IdesParserBaseListener}
4 | import tech.ides.dsl.adaptor.{CommandAdaptor, SetAdaptor}
5 | import tech.ides.dsl.statement.{QuerySqlStatement, SqlStatement}
6 | import tech.ides.dsl.utils.DslUtil.currentText
7 | import tech.ides.script.ScriptTemplate
8 | import tech.sqlclub.common.log.Logging
9 | import scala.collection.mutable.ArrayBuffer
10 |
11 | /**
12 | * 脚本预处理监听器类
13 | * 主要处理 宏命令翻译、include脚本处理、set变量渲染脚本等
14 | * Created by songgr on 2022/03/02.
15 | */
16 | class PreProcessListener(sqel: ScriptQueryExecListener) extends IdesParserBaseListener with Logging {
17 |
18 | private val statements = new ArrayBuffer[SqlStatement]()
19 |
20 | def toScript = statements.map(_.sql).mkString("\n")
21 |
22 | def addStatement(statement: SqlStatement) = {
23 | statements += statement
24 | this
25 | }
26 |
27 | /**
28 | * command语句处理
29 | * 宏(!)命令 翻译成run语句
30 | */
31 | override def exitCommand(ctx: IdesParser.CommandContext): Unit = {
32 | val statement = CommandAdaptor(sqel).parse(ctx)
33 | // println("Command=============>")
34 | // println(statement.sql)
35 | }
36 |
37 |
38 | /**
39 | * set语句处理
40 | * 执行set语法将变量值设置到env中
41 | */
42 | override def exitSet(ctx: IdesParser.SetContext): Unit = {
43 | SetAdaptor(sqel).execute(ctx)
44 | }
45 |
46 | /**
47 | * 单条iql语句
48 | * 包含load、save、run、set等基本语法
49 | */
50 | override def exitIql(ctx: IdesParser.IqlContext): Unit = {
51 | val sql = currentText(ctx)
52 | // 进行变量渲染
53 | val script = ScriptTemplate.merge(sql, sqel.env)
54 | addStatement(QuerySqlStatement(script, sql))
55 | }
56 |
57 | /**
58 | * 单个ides语句
59 | * 包含 iql基本语法、python/shell/scala原始语法、sql原始语句
60 | */
61 | override def exitIdesScript(ctx: IdesParser.IdesScriptContext): Unit = {
62 | val script = currentText(ctx)
63 |
64 | val statement = QuerySqlStatement(script, script)
65 | if ( !statements.map(_.originalSQL).exists(_.equals(statement.originalSQL)) ) {
66 | log.debug(s"add sqlStatement: ${statement.originalSQL}")
67 | addStatement(statement)
68 | }
69 | }
70 |
71 | /**
72 | * 整个脚本
73 | */
74 | override def exitStatement(ctx: IdesParser.StatementContext): Unit = { }
75 | }
76 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/dsl/statement/SqlStatement.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.dsl.statement
2 |
3 | import tech.ides.datasource.writer.SaveMode
4 |
5 | /**
6 | *
7 | * Created by songgr on 2020/11/05.
8 | */
9 |
10 | sealed trait SqlStatement {
11 | def sql:String
12 | def originalSQL:String = sql
13 | }
14 |
15 | case class QuerySqlStatement(sql:String, override val originalSQL:String) extends SqlStatement
16 |
17 | // LOAD format DOT path whereExpressions? asAsset
18 | case class LoadSqlStatement(sql: String, format: String, path: String, options: Map[String, String], tableName: String) extends SqlStatement
19 |
20 | // SAVE assetName saveMode? INTO format DOT path whereExpressions? partitionbyExpression?
21 | case class SaveSqlStatement(sql: String, inputTableName: String, saveMode: SaveMode, format: String, path: String, options: Map[String, String], partitionByCol: List[String]) extends SqlStatement
22 |
23 | // SELECT ~(EOQ)+ asAsset
24 | case class SelectSqlStatement(sql:String, tableName:String) extends SqlStatement
25 |
26 | // CONNECT format whereExpressions asAsset
27 | case class ConnectSqlStatement(sql:String, format: String, options: Map[String, String], connectionName:String) extends SqlStatement
28 |
29 | // DROP resource=(CONNECTION|FUNCTION) format DOT assetName
30 | case class DropSqlStatement(sql:String, resource:Int, format:String, assetName:String) extends SqlStatement
31 |
32 | // RUN module DOT path whereExpressions? asAsset? # Run
33 | case class RunSqlStatement(sql: String, module: String, path: String, options: Map[String, String], tableName: String) extends SqlStatement
34 |
35 | // EXEC_TOKEN module commandParam? # Command
36 | case class CommandSqlStatement(sql: String, module: String, rawCommand: String, parameters:Array[String]) extends SqlStatement
37 |
38 | // SET expression whereExpressions? # Set
39 | case class SetSqlStatement(sql: String, key: String, value:Any, options: Map[String, String]) extends SqlStatement
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/extension/ETPluginRegister.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.extension
2 |
3 | import tech.ides.conf.IdesConf.IDES_PLUGIN_PACKAGES
4 | import tech.ides.conf.IdesConf
5 | import tech.ides.core.platform.PlatformLifecycle
6 | import tech.ides.utils.PlatformUtils
7 | import tech.sqlclub.common.log.Logging
8 | import tech.sqlclub.common.reflect.{ClassPath, Reflection}
9 |
10 | /**
11 | * ET插件注册入口
12 | * Created by songgr on 2022/02/16.
13 | */
14 | object ETPluginRegister extends PlatformLifecycle with Logging {
15 |
16 | // et插件注册表
17 | private val registry = new java.util.HashMap[String, ETPlugin]()
18 |
19 | /**
20 | * 注册所有ET插件
21 | */
22 | def register(idesConf: IdesConf): Unit = {
23 | val etDefaultPackages = Array("tech.ides.external.ets", "tech.ides.ets")
24 | val option = idesConf.get(IDES_PLUGIN_PACKAGES)
25 | val userETPackages = option.map(packages => packages.split(",").filter(_.nonEmpty).map(_.trim))
26 | .getOrElse(Array[String]())
27 |
28 | val scanPackages = etDefaultPackages ++ userETPackages
29 | logInfo("look for the et plugin from packages: " + scanPackages.mkString(", "))
30 |
31 | val allETs = Reflection.allClassWithAnnotation(classOf[Extension], scanPackages:_*)
32 | val ets = allETs.filter {
33 | etClass =>
34 | // 跳过 实现框架不一致的et
35 | PlatformUtils.frameworkEquals(idesConf, etClass)
36 | }.map {
37 | etClass =>
38 | val annotation = Reflection.getAnnotation(etClass, classOf[Extension])
39 | val etInstace = Reflection(ClassPath.from(etClass)).instance[ETPlugin]
40 | val etName = annotation.name()
41 |
42 | registry.put(etName, etInstace)
43 | etName
44 | }
45 | logInfo(s"""A total of ${allETs.size} ET plugins scanned: [${ets.mkString(", ")}].""")
46 | }
47 |
48 | def find(etName:String, options:Map[String,String]=Map()):Option[ETPlugin] = {
49 | if (registry.containsKey(etName)) {
50 | Some(registry.get(etName))
51 | } else {
52 | None
53 | }
54 | }
55 |
56 | override def beforeSQLRuntime(idesConf: IdesConf): Unit = { register(idesConf) }
57 |
58 | override def afterSQLRuntime(idesConf: IdesConf): Unit = {}
59 | }
60 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/job/ScriptJobInfo.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.job
2 |
3 | import tech.ides.job.ScriptJobType.jobType
4 |
5 |
6 | /**
7 | * Created by songgr on 2020/11/06.
8 | */
9 |
10 | /**
11 | * 脚本作业信息
12 | * @param owner 脚本所有者
13 | * @param jobType 脚本作业类型 batch/stream
14 | * @param jobName 作业名称
15 | * @param jobContent 脚本作业内容
16 | * @param groupId 作业groupId
17 | * @param progress 脚本作业进度
18 | * @param startTime 脚本作业开始时间
19 | * @param timeout 脚本作业超时时间(ms)
20 | */
21 | case class ScriptJobInfo(
22 | owner: String,
23 | jobType: jobType,
24 | jobName: String,
25 | jobContent: String,
26 | groupId: String,
27 | progress: ScriptJobProgress,
28 | startTime: Long,
29 | timeout: Long
30 | )
31 |
32 | /**
33 | * 脚本作业类型
34 | */
35 | object ScriptJobType extends Enumeration {
36 | type jobType = Value
37 | val BATCH = Value("batch")
38 | val STREAM = Value("stream")
39 | }
40 |
41 | /**
42 | * 脚本作业进度
43 | * @param totalJob 总共job数目
44 | * @param currentJobIndex 当前job索引
45 | * @param currentJobContent 当前job内容
46 | */
47 | case class ScriptJobProgress(var totalJob:Int = 0, var currentJobIndex:Int = 0, var currentJobContent:String = "") {
48 | def increment = currentJobIndex += 1
49 | }
50 |
51 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/job/ScriptJobManager.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.job
2 |
3 | import java.util.UUID
4 | import java.util.concurrent.ConcurrentHashMap
5 | import scala.collection.JavaConverters._
6 |
7 | /**
8 | * 脚本作业管理器
9 | * Created by songgr on 2020/10/23.
10 | */
11 | object ScriptJobManager {
12 |
13 | private[this] val groupIdToJobInfo = new ConcurrentHashMap[String, ScriptJobInfo]()
14 |
15 | def newJob(owner: String,
16 | jobType: String,
17 | jobName: String,
18 | jobContent: String,
19 | timeout: Long):ScriptJobInfo = {
20 | val scriptJobType = ScriptJobType.withName(jobType)
21 | val groupId = nextGroupId
22 | val startTime = System.currentTimeMillis()
23 | ScriptJobInfo(owner,scriptJobType, jobName, jobContent, groupId, ScriptJobProgress(), startTime, timeout)
24 | }
25 |
26 | def nextGroupId = UUID.randomUUID().toString
27 |
28 | def getScriptJobInfo = groupIdToJobInfo.asScala.toMap
29 |
30 | }
31 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/metastore/ConnectMetaStore.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.metastore
2 |
3 | import tech.ides.conf.IdesConf
4 | import java.util.concurrent.ConcurrentHashMap
5 | import java.util.concurrent.atomic.{AtomicBoolean, AtomicReference}
6 | import tech.ides.conf.IdesConf.EXTERNAL_STORAGE_IMPL_CLASS_NAME
7 | import tech.ides.storage.{ExternalStorage, ExternalStorageInfo}
8 | import tech.sqlclub.common.log.Logging
9 | import tech.sqlclub.common.reflect.{ClassPath, Reflection}
10 | import collection.JavaConverters._
11 |
12 | /**
13 | * 连接元数据存储
14 | * Created by songgr on 2021/02/23.
15 | */
16 | object ConnectMetaStore extends Logging {
17 | private val connectMapping = new ConcurrentHashMap[ConnectMappingKey, Map[String, String]]()
18 | private[this] val externalStorage: AtomicReference[ExternalStorage] = new AtomicReference[ExternalStorage]
19 | private[this] val flag = new AtomicBoolean(false)
20 | if (!flag.get) {
21 | flag.synchronized {
22 | if (!flag.get) {
23 | try {
24 | // 加入持久化的连接信息
25 | connectMapping.putAll(initialize)
26 | logInfo("ConnectMetaStore initialized successfully! Catalog info: " + catalog.keys.mkString(","))
27 | } catch {
28 | case e: Exception =>
29 | logError(e.getMessage, e)
30 | }
31 | flag.set(true)
32 | }
33 | }
34 | }
35 |
36 | def storage = externalStorage.get()
37 |
38 | private[this] def initialize = {
39 | val conf = IdesConf.getOrCreate
40 | val storageImplClass = conf.get(EXTERNAL_STORAGE_IMPL_CLASS_NAME)
41 | val externalStorage = Reflection(ClassPath.from(storageImplClass)).instance[ExternalStorage]
42 | this.externalStorage.set(externalStorage)
43 | val connectMetaData = externalStorage.readConfig(ExternalStorageInfo.ConnectMetaData)
44 | connectMetaData.map{
45 | it =>
46 | val connectName = it._1
47 | val format = it._2.getOrElse("format", null)
48 | if (format == null) {
49 | (null, it._2)
50 | } else {
51 | (ConnectMappingKey(format, connectName), it._2)
52 | }
53 | }.filterNot(_._1 == null).asJava
54 | }
55 |
56 | def add(key:ConnectMappingKey, options:Map[String,String]) = {
57 | this.synchronized {
58 | connectMapping.put(key, options)
59 | }
60 | }
61 |
62 | def remove(key: ConnectMappingKey) = {
63 | this.synchronized {
64 | connectMapping.remove(key)
65 | }
66 | }
67 |
68 | def options(key: ConnectMappingKey) = catalog.get(key)
69 |
70 | def catalog = connectMapping.asScala.toMap
71 | }
72 |
73 | case class ConnectMappingKey(format:String, connectName:String) {
74 | override def toString: String = format + "." + connectName
75 | }
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/storage/ExternalStorage.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.storage
2 |
3 | import tech.ides.storage.ExternalStorageInfo.StorageInfo
4 |
5 | /**
6 | * 外部存储数据接口
7 | * Created by songgr on 2021/02/15.
8 | */
9 | trait ExternalStorage {
10 |
11 | def rootPath = "__datalinked__/externalStorage"
12 |
13 | /**
14 | * 获取指定配置项
15 | */
16 | def readConfig(configId:String, storageInfo:StorageInfo, key:String): Option[String]
17 |
18 | /**
19 | * 获取指定configId的所有配置项
20 | */
21 | def readConfig(configId:String, storageInfo:StorageInfo):Map[String, String]
22 |
23 | /**
24 | *获取所有配置项
25 | */
26 | def readConfig(storageInfo:StorageInfo):Map[String,Map[String,String]]
27 |
28 | /**
29 | * 保存单个配置项
30 | */
31 | def saveConfig(configId:String, storageInfo:StorageInfo, key:String, value:String, overwrite:Boolean):Unit
32 |
33 | /**
34 | * 保存所有配置项
35 | */
36 | def saveConfig(configId:String, storageInfo:StorageInfo, configMap:Map[String,String], overwrite:Boolean):Unit
37 |
38 | /**
39 | * 删除指定configId的所有配置
40 | */
41 | def dropConfig(configId:String, storageInfo:StorageInfo):Boolean
42 |
43 | }
44 |
45 | object ExternalStorageInfo extends Enumeration {
46 | type StorageInfo = Value
47 | val ConnectMetaData = Value("connectMetaData")
48 | }
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/storage/impl/YamlFileExternalStorage.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.storage.impl
2 |
3 | import tech.ides.core.ApplicationSetting
4 | import tech.ides.storage.ExternalStorage
5 | import tech.ides.storage.ExternalStorageInfo.StorageInfo
6 | import tech.sqlclub.common.exception.SQLClubException
7 | import tech.sqlclub.common.utils.Assert
8 |
9 | /**
10 | * external storage implement by yaml config file
11 | * Created by songgr on 2022/05/19.
12 | */
13 | class YamlFileExternalStorage extends ExternalStorage {
14 | /**
15 | * 获取指定配置项
16 | */
17 | override def readConfig(configId: String, storageInfo: StorageInfo, key: String): Option[String] = {
18 | readConfig(configId, storageInfo).get(key)
19 | }
20 |
21 | /**
22 | * 获取指定configId的所有配置项
23 | */
24 | override def readConfig(configId: String, storageInfo: StorageInfo): Map[String, String] = {
25 | readConfig(storageInfo).getOrElse(configId, Map.empty[String, String])
26 | }
27 |
28 | /**
29 | * 获取所有配置项
30 | */
31 | override def readConfig(storageInfo: StorageInfo): Map[String, Map[String, String]] = {
32 | ApplicationSetting.getList(storageInfo.toString).map(item => {
33 | Assert.isTrue(item.contains("format"), "config format is not defined")
34 | Assert.isTrue(item.contains("name"), "config name is not defined")
35 | val name = item("name").asInstanceOf[String]
36 | val m = item - "name"
37 | name -> m.map(it => it._1 -> it._2.asInstanceOf[String])
38 | }).toMap
39 | }
40 |
41 | /**
42 | * 保存单个配置项
43 | */
44 | override def saveConfig(configId: String, storageInfo: StorageInfo, key: String, value: String, overwrite: Boolean): Unit = {
45 | throw new SQLClubException(s"${getClass.getName} not support saveConfig !")
46 | }
47 |
48 | /**
49 | * 保存所有配置项
50 | */
51 | override def saveConfig(configId: String, storageInfo: StorageInfo, configMap: Map[String, String], overwrite: Boolean): Unit = {
52 | throw new SQLClubException(s"${getClass.getName} not support saveConfig !")
53 | }
54 |
55 | /**
56 | * 删除指定configId的所有配置
57 | */
58 | override def dropConfig(configId: String, storageInfo: StorageInfo): Boolean = {
59 | throw new SQLClubException(s"${getClass.getName} not support dropConfig !")
60 | }
61 |
62 | }
63 |
--------------------------------------------------------------------------------
/core/src/main/java/tech/ides/strategy/PlatformStrategyCenter.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.strategy
2 |
3 | import org.apache.commons.lang3.StringUtils
4 | import tech.ides.conf.IdesConf.IDES_RUN_PLATFORM_FRAME
5 | import org.apache.spark.sql.{DataFrame, DataFrameReader, DataFrameWriter, Row}
6 | import tech.ides.conf.IdesConf
7 | import tech.ides.datasource.reader.Reader
8 | import tech.ides.datasource.writer.Writer
9 | import tech.ides.strategy.PlatformFrameEnum.SPARK
10 | import tech.sqlclub.common.utils.Assert
11 |
12 |
13 | /**
14 | * 平台策略中心
15 | * 屏蔽因平台差异产生不同的策略
16 | * Created by songgr on 2022/02/20.
17 | */
18 | object PlatformStrategyCenter {
19 | /**
20 | * 数据读取 spark实现
21 | *
22 | * @param reader {@link DataFrameReader}
23 | */
24 | case class SparkDataReader(reader: DataFrameReader) extends Reader {
25 | override def getReader[T: Manifest]: T = {
26 | Assert.isTrue(manifest[DataFrameReader].equals(manifest[T]),
27 | "the generic type must be DataFrameReader when using spark to read data!")
28 | reader.asInstanceOf[T]
29 | }
30 | }
31 |
32 |
33 | /**
34 | * 数据存储 spark实现
35 | *
36 | * @param writer {@link DataFrameWriter}
37 | */
38 | case class SparkDataWriter(writer: DataFrameWriter[Row]) extends Writer {
39 | override def getWriter[T:Manifest]: T = {
40 | Assert.isTrue(manifest[DataFrameWriter[Row]].equals(manifest[T]),
41 | "the generic type must be DataFrameWriter[Row] when using spark to write data!")
42 | writer.asInstanceOf[T]
43 | }
44 | }
45 |
46 | /**
47 | * 平台使用的框架
48 | * @return
49 | */
50 | def platformFrame = {
51 | val platformFrame = IdesConf.getOrCreate.get(IDES_RUN_PLATFORM_FRAME)
52 | platformFrame match {
53 | case _ if StringUtils.equalsIgnoreCase(SPARK.frame, platformFrame) => SPARK
54 | case _ => SPARK
55 | }
56 | }
57 |
58 | }
59 |
60 |
61 |
--------------------------------------------------------------------------------
/core/src/test/java/tech/ides/core/test/ListenerTest.java:
--------------------------------------------------------------------------------
1 | package tech.ides.core.test;
2 |
3 | import ides.dsl.parser.*;
4 | import org.antlr.v4.runtime.CharStream;
5 | import org.antlr.v4.runtime.CommonTokenStream;
6 | import org.antlr.v4.runtime.tree.ParseTreeWalker;
7 | import tech.ides.dsl.CaseChangeCharStream;
8 | import tech.ides.dsl.listener.ScriptQueryExecListener;
9 |
10 | import java.io.IOException;
11 |
12 | /**
13 | * Created by songgr on 2020/10/23.
14 | */
15 | public class ListenerTest {
16 | public static void main(String[] args) throws IOException {
17 | // String expr = "load hive.`a.bc` where a.aa.a=1 and a.b = 's' and a='''ssdsde.sdede''' as table1;";
18 | String expr = "%py \n" +
19 | "print('>')" +
20 | "exit 1 " +
21 | "\n%\n" +
22 | "> abc\n" +
23 | "select 1 AS \n" +
24 | "\n" +
25 | "\n" +
26 | "\n" +
27 | "\n" +
28 | "\n" +
29 | " Tb1;";
30 | System.out.println(expr);
31 | CharStream cpcs = new CaseChangeCharStream(expr);
32 | IdesLexer idesDslLexer = new IdesLexer(cpcs);
33 |
34 | CommonTokenStream tokenStream = new CommonTokenStream(idesDslLexer);
35 |
36 | IdesParser parser = new IdesParser(tokenStream);
37 | ScriptQueryExecListener listener = new ScriptQueryExecListener(null, "", "test");
38 |
39 | IdesParser.StatementContext statement = parser.statement();
40 |
41 | ParseTreeWalker.DEFAULT.walk(listener, statement);
42 | }
43 | }
44 |
45 |
--------------------------------------------------------------------------------
/docs/imgs/ides.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/docs/imgs/ides.png
--------------------------------------------------------------------------------
/docs/imgs/introduce.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/docs/imgs/introduce.jpg
--------------------------------------------------------------------------------
/docs/imgs/slack_icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/docs/imgs/slack_icon.png
--------------------------------------------------------------------------------
/dsl/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | DataLinked
7 | tech.ides
8 | ${env.project.version}
9 |
10 | 4.0.0
11 |
12 | ides-dsl-${spark.big.version}_${scala.binary.version}
13 |
14 |
15 |
16 |
17 | org.antlr
18 | antlr4-runtime
19 | 4.7.1
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/dsl/src/main/java/ides/dsl/parser/IdesLexer.tokens:
--------------------------------------------------------------------------------
1 | SCALA_MODE=1
2 | PY_MODE=2
3 | SQL_MODE=3
4 | SHELL_MODE=4
5 | AS=5
6 | INTO=6
7 | LOAD=7
8 | SAVE=8
9 | SELECT=9
10 | OPTIONS=10
11 | WHERE=11
12 | AND=12
13 | OVERWRITE=13
14 | APPEND=14
15 | ERRORIfExists=15
16 | IGNORE=16
17 | PARTITIONBY=17
18 | CONNECT=18
19 | SET=19
20 | CONNECTION=20
21 | FUNCTION=21
22 | CREATE=22
23 | DROP=23
24 | REGISTER=24
25 | RUN=25
26 | EXEC_TOKEN=26
27 | DOT=27
28 | EQ=28
29 | COMMA=29
30 | GT=30
31 | LT=31
32 | HYPHEN=32
33 | UNDERLINE=33
34 | SLASH=34
35 | TILDE=35
36 | COLON=36
37 | ASTERISK=37
38 | VERTICAL_BAR=38
39 | EOQ=39
40 | MUMERIC=40
41 | IDENTIFIER=41
42 | QUOTED_TEXT=42
43 | STRING_TEXT=43
44 | BLOCK_STRING_TEXT=44
45 | VARIABLE=45
46 | WS=46
47 | LINE_COMMENT=47
48 | BLOCK_COMMENT=48
49 | NL=49
50 | UNRECOGNIZED=50
51 | EXIT_SCALA=51
52 | SCALA_RETURN=52
53 | SCALA_STRING=53
54 | SCALA_NonEnd=54
55 | SCALA_IMPORT=55
56 | SCALA_CLASS_SEGMENT=56
57 | SCALA_METHOD_SEGMENT=57
58 | SCALA_BLOCK_BODY=58
59 | SCALA_METHOD=59
60 | SCALA_CLASS=60
61 | SCALA_TEXT=61
62 | SCALA_COMMENT=62
63 | SCALA_COMMENT_BLOCK=63
64 | SCALA_WS=64
65 | EXIT_PY=65
66 | PY_RETURN=66
67 | PY_STRING=67
68 | VariableRef=68
69 | PY_NonEnd=69
70 | PY_TEXT=70
71 | PY_COMMENT=71
72 | PY_WS=72
73 | EXIT_SQL=73
74 | SQL_RETURN=74
75 | SQL_TEXT=75
76 | DDL=76
77 | DML=77
78 | Profile=78
79 | SQL_COMMENT1=79
80 | SQL_COMMENT2=80
81 | SQL_COMMENT_BLOCK=81
82 | CreatStatement=82
83 | AlterStatement=83
84 | DropStatement=84
85 | RenameStatement=85
86 | TruncateStatement=86
87 | SelectStatement=87
88 | InsertStatement=88
89 | UpdateStatement=89
90 | DeleteStatement=90
91 | ReplaceStatement=91
92 | UseStatement=92
93 | ShowStatement=93
94 | ExplainStatement=94
95 | SetStatement=95
96 | CallStatement=96
97 | OpenStatement=97
98 | CloseStatement=98
99 | TransactionStatement=99
100 | CommitStatement=100
101 | RollbackStatement=101
102 | SQL_WS=102
103 | EXIT_SH=103
104 | SH_RETURN=104
105 | SH_STRING=105
106 | SH_NonEnd=106
107 | SHELL_TEXT=107
108 | SEHLL_COMMENT=108
109 | SH_WS=109
110 | 'as'=5
111 | 'into'=6
112 | 'load'=7
113 | 'save'=8
114 | 'select'=9
115 | 'options'=10
116 | 'where'=11
117 | 'and'=12
118 | 'overwrite'=13
119 | 'append'=14
120 | 'errorIfExists'=15
121 | 'ignore'=16
122 | 'connect'=18
123 | 'set'=19
124 | 'connection'=20
125 | 'function'=21
126 | 'create'=22
127 | 'drop'=23
128 | 'register'=24
129 | 'run'=25
130 | '!'=26
131 | '.'=27
132 | '='=28
133 | ','=29
134 | '>'=30
135 | '<'=31
136 | '-'=32
137 | '_'=33
138 | '/'=34
139 | '~'=35
140 | ':'=36
141 | '*'=37
142 | '|'=38
143 | ';'=39
144 |
--------------------------------------------------------------------------------
/dsl/src/main/java/ides/dsl/parser/IdesParser.tokens:
--------------------------------------------------------------------------------
1 | SCALA_MODE=1
2 | PY_MODE=2
3 | SQL_MODE=3
4 | SHELL_MODE=4
5 | AS=5
6 | INTO=6
7 | LOAD=7
8 | SAVE=8
9 | SELECT=9
10 | OPTIONS=10
11 | WHERE=11
12 | AND=12
13 | OVERWRITE=13
14 | APPEND=14
15 | ERRORIfExists=15
16 | IGNORE=16
17 | PARTITIONBY=17
18 | CONNECT=18
19 | SET=19
20 | CONNECTION=20
21 | FUNCTION=21
22 | CREATE=22
23 | DROP=23
24 | REGISTER=24
25 | RUN=25
26 | EXEC_TOKEN=26
27 | DOT=27
28 | EQ=28
29 | COMMA=29
30 | GT=30
31 | LT=31
32 | HYPHEN=32
33 | UNDERLINE=33
34 | SLASH=34
35 | TILDE=35
36 | COLON=36
37 | ASTERISK=37
38 | VERTICAL_BAR=38
39 | EOQ=39
40 | MUMERIC=40
41 | IDENTIFIER=41
42 | QUOTED_TEXT=42
43 | STRING_TEXT=43
44 | BLOCK_STRING_TEXT=44
45 | VARIABLE=45
46 | WS=46
47 | LINE_COMMENT=47
48 | BLOCK_COMMENT=48
49 | NL=49
50 | UNRECOGNIZED=50
51 | EXIT_SCALA=51
52 | SCALA_RETURN=52
53 | SCALA_STRING=53
54 | SCALA_NonEnd=54
55 | SCALA_IMPORT=55
56 | SCALA_CLASS_SEGMENT=56
57 | SCALA_METHOD_SEGMENT=57
58 | SCALA_BLOCK_BODY=58
59 | SCALA_METHOD=59
60 | SCALA_CLASS=60
61 | SCALA_TEXT=61
62 | SCALA_COMMENT=62
63 | SCALA_COMMENT_BLOCK=63
64 | SCALA_WS=64
65 | EXIT_PY=65
66 | PY_RETURN=66
67 | PY_STRING=67
68 | VariableRef=68
69 | PY_NonEnd=69
70 | PY_TEXT=70
71 | PY_COMMENT=71
72 | PY_WS=72
73 | EXIT_SQL=73
74 | SQL_RETURN=74
75 | SQL_TEXT=75
76 | DDL=76
77 | DML=77
78 | Profile=78
79 | SQL_COMMENT1=79
80 | SQL_COMMENT2=80
81 | SQL_COMMENT_BLOCK=81
82 | CreatStatement=82
83 | AlterStatement=83
84 | DropStatement=84
85 | RenameStatement=85
86 | TruncateStatement=86
87 | SelectStatement=87
88 | InsertStatement=88
89 | UpdateStatement=89
90 | DeleteStatement=90
91 | ReplaceStatement=91
92 | UseStatement=92
93 | ShowStatement=93
94 | ExplainStatement=94
95 | SetStatement=95
96 | CallStatement=96
97 | OpenStatement=97
98 | CloseStatement=98
99 | TransactionStatement=99
100 | CommitStatement=100
101 | RollbackStatement=101
102 | SQL_WS=102
103 | EXIT_SH=103
104 | SH_RETURN=104
105 | SH_STRING=105
106 | SH_NonEnd=106
107 | SHELL_TEXT=107
108 | SEHLL_COMMENT=108
109 | SH_WS=109
110 | 'as'=5
111 | 'into'=6
112 | 'load'=7
113 | 'save'=8
114 | 'select'=9
115 | 'options'=10
116 | 'where'=11
117 | 'and'=12
118 | 'overwrite'=13
119 | 'append'=14
120 | 'errorIfExists'=15
121 | 'ignore'=16
122 | 'connect'=18
123 | 'set'=19
124 | 'connection'=20
125 | 'function'=21
126 | 'create'=22
127 | 'drop'=23
128 | 'register'=24
129 | 'run'=25
130 | '!'=26
131 | '.'=27
132 | '='=28
133 | ','=29
134 | '>'=30
135 | '<'=31
136 | '-'=32
137 | '_'=33
138 | '/'=34
139 | '~'=35
140 | ':'=36
141 | '*'=37
142 | '|'=38
143 | ';'=39
144 |
--------------------------------------------------------------------------------
/dsl/src/main/resources/IdesParser.g4:
--------------------------------------------------------------------------------
1 | parser grammar IdesParser;
2 |
3 | @header {
4 | package ides.dsl.parser;
5 | }
6 |
7 | options { tokenVocab=IdesLexer; }
8 |
9 | // 一个脚本以0到n条idesScript语句组成
10 | statement
11 | : idesScript*
12 | ;
13 |
14 | idesScript
15 | : script
16 | ;
17 |
18 | script
19 | : query EOQ # Iql
20 | | SCALA_MODE scalaCode EXIT_SCALA NL? # Scala
21 | | PY_MODE pythonCode EXIT_PY NL? outTable? # Py
22 | | SQL_MODE sqlCode EXIT_SQL NL? outTable? # Sql
23 | | SHELL_MODE shellCode EXIT_SH NL? outTable? # Sh
24 | ;
25 |
26 | scalaCode
27 | : scalaStatement*
28 | ;
29 |
30 | scalaStatement
31 | : SCALA_TEXT
32 | ;
33 |
34 | pythonCode
35 | : pyStatement*
36 | ;
37 |
38 | pyStatement
39 | : PY_TEXT
40 | ;
41 |
42 | sqlCode
43 | : sqlStatement*
44 | ;
45 |
46 | sqlStatement
47 | : SQL_TEXT
48 | ;
49 |
50 | shellCode
51 | : shellStatement*
52 | ;
53 |
54 | shellStatement
55 | : SHELL_TEXT
56 | ;
57 |
58 | // query语句规则
59 | query
60 | : LOAD format DOT path whereExpressions? asAsset # Load
61 | | SAVE assetName saveMode? INTO format DOT path whereExpressions? partitionbyExpression? # Save
62 | | SELECT ~(EOQ)+ asAsset # Select
63 | | CONNECT format whereExpressions asAsset # Connect
64 | | SET expression whereExpressions? # Set
65 | | DROP resource=(CONNECTION|FUNCTION) format DOT assetName # Drop
66 | | CREATE ~(EOQ)+ # Create
67 | | REGISTER module DOT path whereExpressions? asAsset? # Register
68 | | RUN module DOT path whereExpressions? asAsset? # Run
69 | | EXEC_TOKEN module commandParam? # Command
70 | ;
71 |
72 | module
73 | : identifier
74 | | quotedIdentifier
75 | ;
76 |
77 | format
78 | : identifier
79 | | quotedIdentifier
80 | ;
81 |
82 | path
83 | : quotedIdentifier
84 | ;
85 |
86 | col
87 | : identifier
88 | ;
89 |
90 | colGroup
91 | : COMMA col
92 | ;
93 |
94 | whereExpressions
95 | : where expression booleanExpression*
96 | ;
97 |
98 | partitionbyExpression
99 | : PARTITIONBY col colGroup*
100 | ;
101 |
102 | booleanExpression
103 | : AND expression
104 | ;
105 |
106 | keyName
107 | : qualifiedName
108 | ;
109 | valueName
110 | : mumericValue
111 | | stringValue
112 | ;
113 |
114 | mumericValue
115 | : MUMERIC
116 | ;
117 |
118 | stringValue
119 | : STRING_TEXT
120 | | BLOCK_STRING_TEXT
121 | | QUOTED_TEXT
122 | ;
123 |
124 | expression
125 | : keyName EQ valueName
126 | ;
127 |
128 | qualifiedName
129 | : identifier (DOT identifier)*
130 | ;
131 |
132 | asAsset
133 | : AS assetName
134 | ;
135 |
136 | assetName
137 | : identifier
138 | | quotedIdentifier
139 | ;
140 |
141 | identifier
142 | : IDENTIFIER
143 | | LOAD
144 | | SAVE
145 | | SELECT
146 | | CONNECT
147 | | SET
148 | | DROP
149 | | CREATE
150 | | REGISTER
151 | | RUN
152 | ;
153 |
154 | quotedIdentifier
155 | : QUOTED_TEXT
156 | ;
157 |
158 | where: OPTIONS|WHERE;
159 | saveMode: OVERWRITE|APPEND|ERRORIfExists|IGNORE;
160 |
161 | outTable : GT assetName;
162 |
163 | commandSymbol : (DOT| COMMA | GT | LT | HYPHEN | UNDERLINE | SLASH | TILDE | COLON | ASTERISK | VERTICAL_BAR);
164 |
165 | commandParam : (commandSymbol | identifier | quotedIdentifier | stringValue | mumericValue)+;
--------------------------------------------------------------------------------
/dsl/src/test/java/ides/dsl/parser/ListenerTest.java:
--------------------------------------------------------------------------------
1 | package ides.dsl.parser;
2 |
3 | import org.antlr.v4.runtime.*;
4 | import org.antlr.v4.runtime.misc.Interval;
5 | import org.antlr.v4.runtime.tree.ParseTreeWalker;
6 |
7 | /**
8 | * Created by songgr on 2020/10/23.
9 | */
10 | public class ListenerTest {
11 | public static void main(String[] args) {
12 | String expr = "load hive.`a.bc` as table1;\n" +
13 | "\n----注释" +
14 | "\n" +
15 | "\n/*------*/" +
16 | "\n" +
17 | "\n" +
18 | "\n" +
19 | "load jsonStr.`abc` as table1;\n" +
20 | "load hive.`a.abc` as table1;\n" +
21 | "!hdfs -cp /tmp/abc.txt /tmp/dd;";
22 | CodePointCharStream cpcs = CharStreams.fromString(expr);
23 | IdesLexer idesDslLexer = new IdesLexer(cpcs);
24 |
25 | CommonTokenStream tokenStream = new CommonTokenStream(idesDslLexer);
26 |
27 | IdesParser parser = new IdesParser(tokenStream);
28 | MyListener listener = new MyListener();
29 |
30 | IdesParser.StatementContext statement = parser.statement();
31 |
32 | ParseTreeWalker.DEFAULT.walk(listener, statement);
33 | }
34 | }
35 |
36 | class MyListener extends IdesParserBaseListener {
37 | @Override
38 | public void exitSql(IdesParser.SqlContext ctx) {
39 | System.out.println(ctx.getText());
40 | }
41 |
42 | @Override
43 | public void exitLoad(IdesParser.LoadContext ctx) {
44 | String text = ctx.format().getText();
45 | System.out.println("load -----> " + text);
46 |
47 | }
48 |
49 | @Override
50 | public void exitCommand(IdesParser.CommandContext ctx) {
51 | String text = ctx.commandParam().getText();
52 | System.out.println("exec=======>" + text);
53 | String commandParam = currentText(ctx.commandParam());
54 |
55 | System.out.println(commandParam);
56 | System.out.println(commandParam.split(" "));
57 |
58 | super.exitCommand(ctx);
59 | }
60 |
61 | public String currentText(ParserRuleContext ctx) {
62 | if ( ctx == null ) return null;
63 | IdesLexer lexer = (IdesLexer)ctx.start.getTokenSource();
64 | CharStream input = lexer._input;
65 |
66 | int start = ctx.start.getStartIndex();
67 | int stop = ctx.stop.getStopIndex();
68 | Interval interval = new Interval(start, stop);
69 | return input.getText(interval);
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/dsl/src/test/java/ides/dsl/parser/VisitorTest.java:
--------------------------------------------------------------------------------
1 | package ides.dsl.parser;
2 |
3 | import org.antlr.v4.runtime.CharStreams;
4 | import org.antlr.v4.runtime.CodePointCharStream;
5 | import org.antlr.v4.runtime.CommonTokenStream;
6 |
7 | /**
8 | * Created by songgr on 2020/10/22.
9 | */
10 | public class VisitorTest {
11 |
12 | public static void main(String[] args) {
13 | String expr =
14 | "load hive.`a.bc` as table1;\n" +
15 | "\n----注释" +
16 | "\n" +
17 | "\n/*------*/" +
18 | "\n" +
19 | "\n" +
20 | "\n" +
21 | "load jsonStr.`abc` as table1;\n" +
22 | "load hive.`a.abc` as table1;\n" +
23 | "save a;";
24 | CodePointCharStream cpcs = CharStreams.fromString(expr);
25 | IdesLexer helloLexer = new IdesLexer(cpcs);
26 |
27 | CommonTokenStream tokenStream = new CommonTokenStream(helloLexer);
28 |
29 | IdesParser parser = new IdesParser(tokenStream);
30 |
31 |
32 | IdesParser.StatementContext statement = parser.statement();
33 |
34 | MyVisitor myVisitor = new MyVisitor();
35 | myVisitor.visit(statement);
36 |
37 | System.out.println("sql: " + myVisitor.cnt);
38 | System.out.println("load: " + myVisitor.loadcnt);
39 | }
40 |
41 | }
42 |
43 | class MyVisitor extends IdesParserBaseVisitor {
44 | int cnt = 0;
45 | int loadcnt = 0;
46 |
47 | @Override
48 | public Object visitSql(IdesParser.SqlContext ctx) {
49 |
50 | cnt += 1;
51 | System.out.println(ctx.getText());
52 | return super.visitSql(ctx);
53 | }
54 |
55 | @Override
56 | public Object visitLoad(IdesParser.LoadContext ctx) {
57 | loadcnt += 1;
58 | System.out.println("load ---> " + ctx.format().getText());
59 | return super.visitLoad(ctx);
60 | }
61 | }
--------------------------------------------------------------------------------
/engine/src/main/java/tech/ides/datasource/impl/CSVDataSource.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.datasource.impl
2 |
3 | import tech.ides.datasource.{BaseFileDataSource, DataSource}
4 | import tech.ides.datasource.DataSource.Method._
5 | import tech.ides.strategy.PlatformFrameEnum.SPARK
6 | import tech.ides.strategy.PlatformFrameImpl
7 |
8 | /**
9 | * CSV 文件数据源
10 | * Created by songgr on 2020/10/26.
11 | */
12 | @DataSource(
13 | types = Array(SOURCE, SINK),
14 | name = "csv",
15 | sinceVersion = "1.0.0"
16 | )
17 | @PlatformFrameImpl(frameName = SPARK)
18 | class CSVDataSource extends BaseFileDataSource {
19 |
20 | override def fullFormat: String = "csv"
21 |
22 | override def shortFormat: String = fullFormat
23 | }
24 |
--------------------------------------------------------------------------------
/engine/src/main/java/tech/ides/datasource/impl/TextDataSource.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.datasource.impl
2 |
3 | import org.apache.spark.sql.types.StringType
4 | import tech.ides.datasource.DataSource.Method._
5 | import tech.ides.datasource.{BaseFileDataSource, DataSinkConfig, DataSource, DataSourceConfig, DataTable, SparkDataTable}
6 | import tech.ides.constants.ScriptConstants.IMPL_CLASS
7 | import org.apache.spark.sql.{DataFrame, DataFrameReader, DataFrameWriter, Row, SaveMode, functions => F}
8 | import tech.ides.datasource.reader.Reader
9 | import tech.ides.datasource.writer.Writer
10 | import tech.ides.exception.IdesException
11 | import tech.ides.strategy.PlatformFrameEnum.SPARK
12 | import tech.ides.strategy.PlatformFrameImpl
13 |
14 | /**
15 | * CSV 文件数据源
16 | * Created by songgr on 2020/10/26.
17 | */
18 | @DataSource(
19 | types = Array(SOURCE, SINK),
20 | name = "text",
21 | sinceVersion = "1.0.0"
22 | )
23 | @PlatformFrameImpl(frameName = SPARK)
24 | class TextDataSource extends BaseFileDataSource {
25 |
26 | override def load(reader: Reader, config: DataSourceConfig): DataTable = {
27 | val format = config.config.getOrElse(IMPL_CLASS, fullFormat)
28 | val paths = config.path.split(",").map(_.trim).filter(_.nonEmpty)
29 | val sparkDataReader = reader.getReader[DataFrameReader]
30 | val df = sparkDataReader.options(config.config).format(format).text(paths: _*)
31 | val dataFrame = df.select(F.input_file_name().as("fileName"), F.col("value").as("content"))
32 | SparkDataTable(dataFrame)
33 | }
34 |
35 | override def save(writer: Writer, config: DataSinkConfig): Unit = {
36 | val options = config.config
37 | val sparkDataWriter = writer.getWriter[DataFrameWriter[Row]]
38 | val saveMode = SaveMode.valueOf(config.mode.name())
39 | if (options.contains("usingCol") && config.dt.table.isDefined) {
40 | val col = options("usingCol")
41 |
42 | val table = config.dt.table[DataFrame].get
43 | if ( !table.columns.contains(col) ) throw new IdesException(s""" cannot resolve '`$col`' given input columns: [${table.columns.mkString(", ")}]; """)
44 |
45 | val finalTable = if ( StringType != table.schema.apply(col).dataType ) {
46 | table.select(F.col(col).cast(StringType).as("value"))
47 | } else {
48 | table.select(F.col(col).as("value"))
49 | }
50 |
51 | finalTable.write.options(options).mode(saveMode).text(config.path)
52 | } else sparkDataWriter.options(options).mode(saveMode).format(fullFormat).save(config.path)
53 | }
54 |
55 | override def fullFormat: String = "text"
56 |
57 | override def shortFormat: String = fullFormat
58 | }
59 |
--------------------------------------------------------------------------------
/engine/src/main/java/tech/ides/ets/GetPartitionNum.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.ets
2 |
3 | import org.apache.spark.sql.DataFrame
4 | import tech.ides.datasource.{DataTable, SparkDataTable}
5 | import tech.ides.doc.{Document, MarkDownDoc}
6 | import tech.ides.extension.{Action, Analysis, ETPlugin, Extension, PluginType}
7 | import tech.ides.strategy.PlatformFrameEnum.SPARK
8 | import tech.ides.strategy.PlatformFrameImpl
9 |
10 | /**
11 | * 获取表分区数目
12 | * Created by songgr on 2022/02/11.
13 | */
14 | @Extension(
15 | name = "GetPartitionNum",
16 | description = "获取表分区数目",
17 | sinceVersion = "1.0.0"
18 | )
19 | @PlatformFrameImpl(frameName = SPARK)
20 | class GetPartitionNum extends ETPlugin {
21 |
22 | override def exec(dataTable: DataTable, path: String, params: Map[String, String]): DataTable = {
23 | val table = dataTable.table[DataFrame].get
24 | val partitionNum = table.toJavaRDD.partitions.size()
25 | val spark = table.sparkSession
26 |
27 | import spark.implicits._
28 | val df = Seq(partitionNum).toDF("partitionNum")
29 | SparkDataTable(df)
30 | }
31 |
32 | /**
33 | * 插件类型
34 | */
35 | override def pluginType: PluginType = PluginType(Action, Analysis)
36 |
37 | /**
38 | * 插件文档
39 | */
40 | override def doc: Document = Document(
41 | MarkDownDoc,
42 | """
43 | |GetPartitionNum is used to get the table partitions size.
44 | |
45 | |```sql
46 | |run GetPartitionNum.`tableName` as newTable;
47 | |```
48 | |
49 | |""".stripMargin
50 | )
51 | }
52 |
--------------------------------------------------------------------------------
/engine/src/main/java/tech/ides/ets/TableRepartition.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.ets
2 |
3 | import org.apache.spark.sql.DataFrame
4 | import tech.ides.datasource.{DataTable, SparkDataTable}
5 | import tech.ides.exception.IdesException
6 | import tech.ides.extension.{ETL, ETPlugin, Extension, PluginType, Transform}
7 | import tech.ides.strategy.PlatformFrameEnum.SPARK
8 | import tech.ides.strategy.PlatformFrameImpl
9 |
10 | /**
11 | * 对表进行重新分区
12 | * Created by songgr on 2022/02/16.
13 | */
14 | @Extension(
15 | name = "TableRepartition",
16 | description = "表数据重新分区",
17 | sinceVersion = "1.0.0"
18 | )
19 | @PlatformFrameImpl(frameName = SPARK)
20 | class TableRepartition extends ETPlugin {
21 | override def exec(dataTable: DataTable, path: String, params: Map[String, String]): DataTable = {
22 |
23 | val partitionNum = params.get("partitionNum")
24 | val table = dataTable.table[DataFrame].get
25 |
26 | if (partitionNum.isDefined) {
27 | val df = table.repartition(partitionNum.get.toInt)
28 | SparkDataTable(df)
29 | } else {
30 | throw new IdesException("partitionNum parameter must be set")
31 | }
32 | }
33 |
34 | /**
35 | * 插件类型
36 | */
37 | override def pluginType: PluginType = PluginType(Transform, ETL)
38 | }
39 |
--------------------------------------------------------------------------------
/engine/src/main/java/tech/ides/local/LocalIdesServiceApp.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.local
2 |
3 | import tech.ides.core.IdesApp
4 |
5 |
6 | /**
7 | *
8 | * Created by songgr on 2020/07/14.
9 | */
10 | object LocalIdesServiceApp {
11 |
12 | def main(args: Array[String]): Unit = {
13 |
14 | IdesApp.main(Array(
15 | "-spark.app.name", "ides"
16 | , "-spark.master", "local[*]"
17 | , "-spark.driver.memory", "512m"
18 | , "-ides.spark.service", "true"
19 | , "-ides.server.port", "9009"
20 | , "-ides.server.request-log.enable", "true"
21 | , "-ides.script.default.date.format", "yyyyMMdd"
22 | // , "-ides.enableHiveSupport", "true"
23 | ))
24 | }
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/engine/src/main/resources-local/application.yml:
--------------------------------------------------------------------------------
1 | application:
2 | # service 实现类
3 | service: tech.ides.rest.IdesServer
4 | # platform 生命周期相关类
5 | platformLifecycles:
6 | - tech.ides.datasource.DataSourceFactory
7 | - tech.ides.extension.ETPluginRegister
8 | serviceLifecycles:
9 | - tech.ides.rest.ControlHandlerHook
10 |
11 | # 数据库注册信息
12 | connectMetaData:
13 | - name: test
14 | format: jdbc
15 | url: jdbc:mysql://127.0.0.1:3306/test?characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false
16 | driver: com.mysql.jdbc.Driver
17 | user: test
18 | password: test
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/css/index.css:
--------------------------------------------------------------------------------
1 | .banner {
2 | display: flex;
3 | justify-content: left;
4 | padding-left: 1%;
5 | font-size: 12px;
6 | font-weight: bold;
7 | }
8 |
9 | .banner pre {
10 | color: #0027ff;
11 | }
12 |
13 | .query_box {
14 | margin-top: -18px;
15 | margin-left: 8px;
16 | margin-right: 8px;
17 | height: 218px;
18 | }
19 |
20 | .editor .ace_wrap {
21 | height: 348px;
22 | width: 100%;
23 | font-size: 14px;
24 | /*background-color: #141414;*/
25 | /*color: #F8F8F8;*/
26 | }
27 |
28 | .button_box #runButton, .button_box #stopButton {
29 | color: #000;
30 | padding: .42rem 1.07rem;
31 | margin: .375rem;
32 | border-radius: .225rem;
33 | border: 0;
34 | box-shadow: 0 2px 5px 0 rgba(0,0,0,.16), 0 2px 10px 0 rgba(0,0,0,.12);
35 | }
36 |
37 | .runnable {
38 | background-color: #00c851;
39 | cursor: pointer;
40 | }
41 | .disRunnable {
42 | background-color: gray;
43 | cursor: default;
44 | }
45 |
46 |
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/image/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/image/favicon-16x16.png
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/image/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/image/favicon-32x32.png
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/js/FileSaver.min.js:
--------------------------------------------------------------------------------
1 | /*
2 | @source http://purl.eligrey.com/github/FileSaver.js/blob/master/src/FileSaver.js
3 | */
4 | var module$FileSaver={},saveAs$$module$FileSaver=saveAs$$module$FileSaver||function(b){if(!("undefined"===typeof b||"undefined"!==typeof navigator&&/MSIE [1-9]\./.test(navigator.userAgent))){var f=b.document.createElementNS("http://www.w3.org/1999/xhtml","a"),q="download"in f,r=/constructor/i.test(b.HTMLElement)||b.safari,h=/CriOS\/[\d]+/.test(navigator.userAgent),k=b.setImmediate||b.setTimeout,t=function(a){k(function(){throw a;},0)},l=function(a){setTimeout(function(){"string"===typeof a?(b.URL||
5 | b.webkitURL||b).revokeObjectURL(a):a.remove()},4E4)},m=function(a){return/^\s*(?:text\/\S*|application\/xml|\S*\/\S*\+xml)\s*;.*charset\s*=\s*utf-8/i.test(a.type)?new Blob([String.fromCharCode(65279),a],{type:a.type}):a},p=function(a,c,u){u||(a=m(a));var d=this,n="application/octet-stream"===a.type,g=function(){var a=["writestart","progress","write","writeend"];a=[].concat(a);for(var b=a.length;b--;){var c=d["on"+a[b]];if("function"===typeof c)try{c.call(d,d)}catch(v){t(v)}}};d.readyState=d.INIT;
6 | if(q){var e=(b.URL||b.webkitURL||b).createObjectURL(a);k(function(){f.href=e;f.download=c;var a=new MouseEvent("click");f.dispatchEvent(a);g();l(e);d.readyState=d.DONE},0)}else(function(){if((h||n&&r)&&b.FileReader){var c=new FileReader;c.onloadend=function(){var a=h?c.result:c.result.replace(/^data:[^;]*;/,"data:attachment/file;");b.open(a,"_blank")||(b.location.href=a);d.readyState=d.DONE;g()};c.readAsDataURL(a);d.readyState=d.INIT}else e||(e=(b.URL||b.webkitURL||b).createObjectURL(a)),n?b.location.href=
7 | e:b.open(e,"_blank")||(b.location.href=e),d.readyState=d.DONE,g(),l(e)})()},c=p.prototype;if("undefined"!==typeof navigator&&navigator.msSaveOrOpenBlob)return function(a,b,c){b=b||a.name||"download";c||(a=m(a));return navigator.msSaveOrOpenBlob(a,b)};c.abort=function(){};c.readyState=c.INIT=0;c.WRITING=1;c.DONE=2;c.error=c.onwritestart=c.onprogress=c.onwrite=c.onabort=c.onerror=c.onwriteend=null;return function(a,b,c){return new p(a,b||a.name||"download",c)}}}("undefined"!==typeof self&&self||"undefined"!==
8 | typeof window&&window||this);module$FileSaver.saveAs=saveAs$$module$FileSaver;
9 |
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/js/ext-beautify.min.js:
--------------------------------------------------------------------------------
1 | define("ace/ext/beautify",["require","exports","module","ace/token_iterator"],function(e,F,t){"use strict";function S(e,t){return-1|>=|<|<=|=>)$/)?"punctuation.operator"===s.type&&";"===f?(B(),_(),m=!0,O&&v++):"punctuation.operator"===s.type&&f.match(/^(:|,)$/)?(B(),_(),f.match(/^(,)$/)&&0"!==f||u?S(s,"attribute-name")&&p.substr(-1).match(/^\s$/)?l=!0:S(s,"attribute-equals")?(j(),_()):S(s,"tag-close")&&(j(),"/>"===f&&(l=!0)):(B(),l=!0):(B(),_(),m=l=!0),u&&(!s.type.match(/^(comment)$/)||f.substr(0,1).match(/^[/#]$/))&&(!s.type.match(/^(string)$/)||f.substr(0,1).match(/^['"]$/))){if(g=b,y"===f)&&(v=q&&t&&""===t.value?-1:1),S(s,"tag-open")&&""===f?d--:S(s,"tag-open")&&"<"===f&&-1===i.indexOf(t.value)?d++:S(s,"tag-name")?h=f:S(s,"tag-close")&&"/>"===f&&-1===i.indexOf(h)&&d--,r=x}}s=t}p=p.trim(),e.doc.setValue(p)},F.commands=[{name:"beautify",description:"Format selection (Beautify)",exec:function(e){F.beautify(e.session)},bindKey:"Ctrl-Shift-B"}]}),window.require(["ace/ext/beautify"],function(e){"object"==typeof module&&"object"==typeof exports&&module&&(module.exports=e)});
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/js/mode-sql.js:
--------------------------------------------------------------------------------
1 | define("ace/mode/sql_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"],function(e,t,r){"use strict";function i(){var e=this.createKeywordMapper({"support.function":"avg|count|first|last|max|min|sum|ucase|lcase|mid|len|round|rank|now|format|coalesce|ifnull|isnull|nvl",keyword:"select|insert|update|delete|from|where|and|or|group|by|order|limit|offset|having|as|case|when|then|else|end|type|left|right|join|on|outer|desc|asc|union|create|table|primary|key|if|foreign|not|references|default|null|inner|cross|natural|database|drop|grant","constant.language":"true|false","storage.type":"int|numeric|decimal|date|varchar|char|bigint|float|double|bit|binary|text|set|timestamp|money|real|number|integer"},"identifier",!0);this.$rules={start:[{token:"comment",regex:"--.*$"},{token:"comment",start:"/\\*",end:"\\*/"},{token:"string",regex:'".*?"'},{token:"string",regex:"'.*?'"},{token:"string",regex:"`.*?`"},{token:"constant.numeric",regex:"[+-]?\\d+(?:(?:\\.\\d*)?(?:[eE][+-]?\\d+)?)?\\b"},{token:e,regex:"[a-zA-Z_$][a-zA-Z0-9_$]*\\b"},{token:"keyword.operator",regex:"\\+|\\-|\\/|\\/\\/|%|<@>|@>|<@|&|\\^|~|<|>|<=|=>|==|!=|<>|="},{token:"paren.lparen",regex:"[\\(]"},{token:"paren.rparen",regex:"[\\)]"},{token:"text",regex:"\\s+"}]},this.normalizeRules()}var o=e("../lib/oop"),n=e("./text_highlight_rules").TextHighlightRules;o.inherits(i,n),t.SqlHighlightRules=i}),define("ace/mode/sql",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/sql_highlight_rules"],function(e,t,r){"use strict";function i(){this.HighlightRules=s,this.$behaviour=this.$defaultBehaviour}var o=e("../lib/oop"),n=e("./text").Mode,s=e("./sql_highlight_rules").SqlHighlightRules;o.inherits(i,n),function(){this.lineCommentStart="--",this.$id="ace/mode/sql",this.snippetFileId="ace/snippets/sql"}.call(i.prototype),t.Mode=i}),window.require(["ace/mode/sql"],function(e){"object"==typeof module&&"object"==typeof exports&&module&&(module.exports=e)});
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/js/sql.min.js:
--------------------------------------------------------------------------------
1 | define("ace/snippets/sql",["require","exports","module"],function(t,e,n){"use strict";e.snippetText="snippet tbl\n\tcreate table ${1:table} (\n\t\t${2:columns}\n\t);\nsnippet col\n\t${1:name}\t${2:type}\t${3:default ''}\t${4:not null}\nsnippet ccol\n\t${1:name}\tvarchar2(${2:size})\t${3:default ''}\t${4:not null}\nsnippet ncol\n\t${1:name}\tnumber\t${3:default 0}\t${4:not null}\nsnippet dcol\n\t${1:name}\tdate\t${3:default sysdate}\t${4:not null}\nsnippet ind\n\tcreate index ${3:$1_$2} on ${1:table}(${2:column});\nsnippet uind\n\tcreate unique index ${1:name} on ${2:table}(${3:column});\nsnippet tblcom\n\tcomment on table ${1:table} is '${2:comment}';\nsnippet colcom\n\tcomment on column ${1:table}.${2:column} is '${3:comment}';\nsnippet addcol\n\talter table ${1:table} add (${2:column} ${3:type});\nsnippet seq\n\tcreate sequence ${1:name} start with ${2:1} increment by ${3:1} minvalue ${4:1};\nsnippet s*\n\tselect * from ${1:table}\n",e.scope="sql"}),window.require(["ace/snippets/sql"],function(t){"object"==typeof module&&"object"==typeof exports&&module&&(module.exports=t)});
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/js/theme-twilight.js:
--------------------------------------------------------------------------------
1 | define("ace/theme/twilight",["require","exports","module","ace/lib/dom"],function(e,t,n){t.isDark=!0,t.cssClass="ace-twilight",t.cssText=".ace-twilight .ace_gutter {background: #232323;color: #E2E2E2}.ace-twilight .ace_print-margin {width: 1px;background: #232323}.ace-twilight {background-color: #141414;color: #F8F8F8}.ace-twilight .ace_cursor {color: #A7A7A7}.ace-twilight .ace_marker-layer .ace_selection {background: rgba(221, 240, 255, 0.20)}.ace-twilight.ace_multiselect .ace_selection.ace_start {box-shadow: 0 0 3px 0px #141414;}.ace-twilight .ace_marker-layer .ace_step {background: rgb(102, 82, 0)}.ace-twilight .ace_marker-layer .ace_bracket {margin: -1px 0 0 -1px;border: 1px solid rgba(255, 255, 255, 0.25)}.ace-twilight .ace_marker-layer .ace_active-line {background: rgba(255, 255, 255, 0.031)}.ace-twilight .ace_gutter-active-line {background-color: rgba(255, 255, 255, 0.031)}.ace-twilight .ace_marker-layer .ace_selected-word {border: 1px solid rgba(221, 240, 255, 0.20)}.ace-twilight .ace_invisible {color: rgba(255, 255, 255, 0.25)}.ace-twilight .ace_keyword,.ace-twilight .ace_meta {color: #CDA869}.ace-twilight .ace_constant,.ace-twilight .ace_constant.ace_character,.ace-twilight .ace_constant.ace_character.ace_escape,.ace-twilight .ace_constant.ace_other,.ace-twilight .ace_heading,.ace-twilight .ace_markup.ace_heading,.ace-twilight .ace_support.ace_constant {color: #CF6A4C}.ace-twilight .ace_invalid.ace_illegal {color: #F8F8F8;background-color: rgba(86, 45, 86, 0.75)}.ace-twilight .ace_invalid.ace_deprecated {text-decoration: underline;font-style: italic;color: #D2A8A1}.ace-twilight .ace_support {color: #9B859D}.ace-twilight .ace_fold {background-color: #AC885B;border-color: #F8F8F8}.ace-twilight .ace_support.ace_function {color: #DAD085}.ace-twilight .ace_list,.ace-twilight .ace_markup.ace_list,.ace-twilight .ace_storage {color: #F9EE98}.ace-twilight .ace_entity.ace_name.ace_function,.ace-twilight .ace_meta.ace_tag,.ace-twilight .ace_variable {color: #AC885B}.ace-twilight .ace_string {color: #8F9D6A}.ace-twilight .ace_string.ace_regexp {color: #E9C062}.ace-twilight .ace_comment {font-style: italic;color: #5F5A60}.ace-twilight .ace_variable {color: #7587A6}.ace-twilight .ace_xml-pe {color: #494949}.ace-twilight .ace_indent-guide {background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAEklEQVQImWMQERFpYLC1tf0PAAgOAnPnhxyiAAAAAElFTkSuQmCC) right repeat-y}";var r=e("../lib/dom");r.importCssString(t.cssText,t.cssClass)}); (function() {
2 | window.require(["ace/theme/twilight"], function(m) {
3 | if (typeof module == "object" && typeof exports == "object" && module) {
4 | module.exports = m;
5 | }
6 | });
7 | })();
8 |
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/js/theme-twilight.min.js:
--------------------------------------------------------------------------------
1 | define("ace/theme/twilight",["require","exports","module","ace/lib/dom"],function(e,a,c){a.isDark=!0,a.cssClass="ace-twilight",a.cssText=".ace-twilight .ace_gutter {background: #232323;color: #E2E2E2}.ace-twilight .ace_print-margin {width: 1px;background: #232323}.ace-twilight {background-color: #141414;color: #F8F8F8}.ace-twilight .ace_cursor {color: #A7A7A7}.ace-twilight .ace_marker-layer .ace_selection {background: rgba(221, 240, 255, 0.20)}.ace-twilight.ace_multiselect .ace_selection.ace_start {box-shadow: 0 0 3px 0px #141414;}.ace-twilight .ace_marker-layer .ace_step {background: rgb(102, 82, 0)}.ace-twilight .ace_marker-layer .ace_bracket {margin: -1px 0 0 -1px;border: 1px solid rgba(255, 255, 255, 0.25)}.ace-twilight .ace_marker-layer .ace_active-line {background: rgba(255, 255, 255, 0.031)}.ace-twilight .ace_gutter-active-line {background-color: rgba(255, 255, 255, 0.031)}.ace-twilight .ace_marker-layer .ace_selected-word {border: 1px solid rgba(221, 240, 255, 0.20)}.ace-twilight .ace_invisible {color: rgba(255, 255, 255, 0.25)}.ace-twilight .ace_keyword,.ace-twilight .ace_meta {color: #CDA869}.ace-twilight .ace_constant,.ace-twilight .ace_constant.ace_character,.ace-twilight .ace_constant.ace_character.ace_escape,.ace-twilight .ace_constant.ace_other,.ace-twilight .ace_heading,.ace-twilight .ace_markup.ace_heading,.ace-twilight .ace_support.ace_constant {color: #CF6A4C}.ace-twilight .ace_invalid.ace_illegal {color: #F8F8F8;background-color: rgba(86, 45, 86, 0.75)}.ace-twilight .ace_invalid.ace_deprecated {text-decoration: underline;font-style: italic;color: #D2A8A1}.ace-twilight .ace_support {color: #9B859D}.ace-twilight .ace_fold {background-color: #AC885B;border-color: #F8F8F8}.ace-twilight .ace_support.ace_function {color: #DAD085}.ace-twilight .ace_list,.ace-twilight .ace_markup.ace_list,.ace-twilight .ace_storage {color: #F9EE98}.ace-twilight .ace_entity.ace_name.ace_function,.ace-twilight .ace_meta.ace_tag,.ace-twilight .ace_variable {color: #AC885B}.ace-twilight .ace_string {color: #8F9D6A}.ace-twilight .ace_string.ace_regexp {color: #E9C062}.ace-twilight .ace_comment {font-style: italic;color: #5F5A60}.ace-twilight .ace_variable {color: #7587A6}.ace-twilight .ace_xml-pe {color: #494949}.ace-twilight .ace_indent-guide {background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAEklEQVQImWMQERFpYLC1tf0PAAgOAnPnhxyiAAAAAElFTkSuQmCC) right repeat-y}",e("../lib/dom").importCssString(a.cssText,a.cssClass)}),window.require(["ace/theme/twilight"],function(e){"object"==typeof module&&"object"==typeof exports&&module&&(module.exports=e)});
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-brands-400.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-brands-400.eot
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-brands-400.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-brands-400.ttf
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-brands-400.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-brands-400.woff
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-brands-400.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-brands-400.woff2
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-regular-400.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-regular-400.eot
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-regular-400.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-regular-400.ttf
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-regular-400.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-regular-400.woff
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-regular-400.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-regular-400.woff2
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-solid-900.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-solid-900.eot
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-solid-900.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-solid-900.ttf
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-solid-900.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-solid-900.woff
--------------------------------------------------------------------------------
/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-solid-900.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-local/webapp/WEB-INF/static/webfonts/fa-solid-900.woff2
--------------------------------------------------------------------------------
/engine/src/main/resources-online/application.yml:
--------------------------------------------------------------------------------
1 | application:
2 | # service 实现类
3 | service: tech.ides.rest.IdesServer
4 | # platform 生命周期相关类
5 | platformLifecycles:
6 | - tech.ides.datasource.DataSourceFactory
7 | - tech.ides.extension.ETPluginRegister
8 | serviceLifecycles:
9 | - tech.ides.rest.ControlHandlerHook
10 |
11 | # 数据库注册信息
12 | connectMetaData:
13 | - name: test
14 | format: jdbc
15 | url: jdbc:mysql://127.0.0.1:3306/test?characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false
16 | driver: com.mysql.jdbc.Driver
17 | user: test
18 | password: test
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/css/index.css:
--------------------------------------------------------------------------------
1 | .banner {
2 | display: flex;
3 | justify-content: left;
4 | padding-left: 1%;
5 | font-size: 12px;
6 | font-weight: bold;
7 | }
8 |
9 | .banner pre {
10 | color: #0027ff;
11 | }
12 |
13 | .query_box {
14 | margin-top: -18px;
15 | margin-left: 8px;
16 | margin-right: 8px;
17 | height: 218px;
18 | }
19 |
20 | .editor .ace_wrap {
21 | height: 348px;
22 | width: 100%;
23 | font-size: 14px;
24 | /*background-color: #141414;*/
25 | /*color: #F8F8F8;*/
26 | }
27 |
28 | .button_box #runButton, .button_box #stopButton {
29 | color: #000;
30 | padding: .42rem 1.07rem;
31 | margin: .375rem;
32 | border-radius: .225rem;
33 | border: 0;
34 | box-shadow: 0 2px 5px 0 rgba(0,0,0,.16), 0 2px 10px 0 rgba(0,0,0,.12);
35 | }
36 |
37 | .runnable {
38 | background-color: #00c851;
39 | cursor: pointer;
40 | }
41 | .disRunnable {
42 | background-color: gray;
43 | cursor: default;
44 | }
45 |
46 |
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/image/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/image/favicon-16x16.png
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/image/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/image/favicon-32x32.png
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/js/FileSaver.min.js:
--------------------------------------------------------------------------------
1 | /*
2 | @source http://purl.eligrey.com/github/FileSaver.js/blob/master/src/FileSaver.js
3 | */
4 | var module$FileSaver={},saveAs$$module$FileSaver=saveAs$$module$FileSaver||function(b){if(!("undefined"===typeof b||"undefined"!==typeof navigator&&/MSIE [1-9]\./.test(navigator.userAgent))){var f=b.document.createElementNS("http://www.w3.org/1999/xhtml","a"),q="download"in f,r=/constructor/i.test(b.HTMLElement)||b.safari,h=/CriOS\/[\d]+/.test(navigator.userAgent),k=b.setImmediate||b.setTimeout,t=function(a){k(function(){throw a;},0)},l=function(a){setTimeout(function(){"string"===typeof a?(b.URL||
5 | b.webkitURL||b).revokeObjectURL(a):a.remove()},4E4)},m=function(a){return/^\s*(?:text\/\S*|application\/xml|\S*\/\S*\+xml)\s*;.*charset\s*=\s*utf-8/i.test(a.type)?new Blob([String.fromCharCode(65279),a],{type:a.type}):a},p=function(a,c,u){u||(a=m(a));var d=this,n="application/octet-stream"===a.type,g=function(){var a=["writestart","progress","write","writeend"];a=[].concat(a);for(var b=a.length;b--;){var c=d["on"+a[b]];if("function"===typeof c)try{c.call(d,d)}catch(v){t(v)}}};d.readyState=d.INIT;
6 | if(q){var e=(b.URL||b.webkitURL||b).createObjectURL(a);k(function(){f.href=e;f.download=c;var a=new MouseEvent("click");f.dispatchEvent(a);g();l(e);d.readyState=d.DONE},0)}else(function(){if((h||n&&r)&&b.FileReader){var c=new FileReader;c.onloadend=function(){var a=h?c.result:c.result.replace(/^data:[^;]*;/,"data:attachment/file;");b.open(a,"_blank")||(b.location.href=a);d.readyState=d.DONE;g()};c.readAsDataURL(a);d.readyState=d.INIT}else e||(e=(b.URL||b.webkitURL||b).createObjectURL(a)),n?b.location.href=
7 | e:b.open(e,"_blank")||(b.location.href=e),d.readyState=d.DONE,g(),l(e)})()},c=p.prototype;if("undefined"!==typeof navigator&&navigator.msSaveOrOpenBlob)return function(a,b,c){b=b||a.name||"download";c||(a=m(a));return navigator.msSaveOrOpenBlob(a,b)};c.abort=function(){};c.readyState=c.INIT=0;c.WRITING=1;c.DONE=2;c.error=c.onwritestart=c.onprogress=c.onwrite=c.onabort=c.onerror=c.onwriteend=null;return function(a,b,c){return new p(a,b||a.name||"download",c)}}}("undefined"!==typeof self&&self||"undefined"!==
8 | typeof window&&window||this);module$FileSaver.saveAs=saveAs$$module$FileSaver;
9 |
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/js/ext-beautify.min.js:
--------------------------------------------------------------------------------
1 | define("ace/ext/beautify",["require","exports","module","ace/token_iterator"],function(e,F,t){"use strict";function S(e,t){return-1|>=|<|<=|=>)$/)?"punctuation.operator"===s.type&&";"===f?(B(),_(),m=!0,O&&v++):"punctuation.operator"===s.type&&f.match(/^(:|,)$/)?(B(),_(),f.match(/^(,)$/)&&0"!==f||u?S(s,"attribute-name")&&p.substr(-1).match(/^\s$/)?l=!0:S(s,"attribute-equals")?(j(),_()):S(s,"tag-close")&&(j(),"/>"===f&&(l=!0)):(B(),l=!0):(B(),_(),m=l=!0),u&&(!s.type.match(/^(comment)$/)||f.substr(0,1).match(/^[/#]$/))&&(!s.type.match(/^(string)$/)||f.substr(0,1).match(/^['"]$/))){if(g=b,y"===f)&&(v=q&&t&&""===t.value?-1:1),S(s,"tag-open")&&""===f?d--:S(s,"tag-open")&&"<"===f&&-1===i.indexOf(t.value)?d++:S(s,"tag-name")?h=f:S(s,"tag-close")&&"/>"===f&&-1===i.indexOf(h)&&d--,r=x}}s=t}p=p.trim(),e.doc.setValue(p)},F.commands=[{name:"beautify",description:"Format selection (Beautify)",exec:function(e){F.beautify(e.session)},bindKey:"Ctrl-Shift-B"}]}),window.require(["ace/ext/beautify"],function(e){"object"==typeof module&&"object"==typeof exports&&module&&(module.exports=e)});
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/js/mode-sql.js:
--------------------------------------------------------------------------------
1 | define("ace/mode/sql_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"],function(e,t,r){"use strict";function i(){var e=this.createKeywordMapper({"support.function":"avg|count|first|last|max|min|sum|ucase|lcase|mid|len|round|rank|now|format|coalesce|ifnull|isnull|nvl",keyword:"select|insert|update|delete|from|where|and|or|group|by|order|limit|offset|having|as|case|when|then|else|end|type|left|right|join|on|outer|desc|asc|union|create|table|primary|key|if|foreign|not|references|default|null|inner|cross|natural|database|drop|grant","constant.language":"true|false","storage.type":"int|numeric|decimal|date|varchar|char|bigint|float|double|bit|binary|text|set|timestamp|money|real|number|integer"},"identifier",!0);this.$rules={start:[{token:"comment",regex:"--.*$"},{token:"comment",start:"/\\*",end:"\\*/"},{token:"string",regex:'".*?"'},{token:"string",regex:"'.*?'"},{token:"string",regex:"`.*?`"},{token:"constant.numeric",regex:"[+-]?\\d+(?:(?:\\.\\d*)?(?:[eE][+-]?\\d+)?)?\\b"},{token:e,regex:"[a-zA-Z_$][a-zA-Z0-9_$]*\\b"},{token:"keyword.operator",regex:"\\+|\\-|\\/|\\/\\/|%|<@>|@>|<@|&|\\^|~|<|>|<=|=>|==|!=|<>|="},{token:"paren.lparen",regex:"[\\(]"},{token:"paren.rparen",regex:"[\\)]"},{token:"text",regex:"\\s+"}]},this.normalizeRules()}var o=e("../lib/oop"),n=e("./text_highlight_rules").TextHighlightRules;o.inherits(i,n),t.SqlHighlightRules=i}),define("ace/mode/sql",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/sql_highlight_rules"],function(e,t,r){"use strict";function i(){this.HighlightRules=s,this.$behaviour=this.$defaultBehaviour}var o=e("../lib/oop"),n=e("./text").Mode,s=e("./sql_highlight_rules").SqlHighlightRules;o.inherits(i,n),function(){this.lineCommentStart="--",this.$id="ace/mode/sql",this.snippetFileId="ace/snippets/sql"}.call(i.prototype),t.Mode=i}),window.require(["ace/mode/sql"],function(e){"object"==typeof module&&"object"==typeof exports&&module&&(module.exports=e)});
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/js/sql.min.js:
--------------------------------------------------------------------------------
1 | define("ace/snippets/sql",["require","exports","module"],function(t,e,n){"use strict";e.snippetText="snippet tbl\n\tcreate table ${1:table} (\n\t\t${2:columns}\n\t);\nsnippet col\n\t${1:name}\t${2:type}\t${3:default ''}\t${4:not null}\nsnippet ccol\n\t${1:name}\tvarchar2(${2:size})\t${3:default ''}\t${4:not null}\nsnippet ncol\n\t${1:name}\tnumber\t${3:default 0}\t${4:not null}\nsnippet dcol\n\t${1:name}\tdate\t${3:default sysdate}\t${4:not null}\nsnippet ind\n\tcreate index ${3:$1_$2} on ${1:table}(${2:column});\nsnippet uind\n\tcreate unique index ${1:name} on ${2:table}(${3:column});\nsnippet tblcom\n\tcomment on table ${1:table} is '${2:comment}';\nsnippet colcom\n\tcomment on column ${1:table}.${2:column} is '${3:comment}';\nsnippet addcol\n\talter table ${1:table} add (${2:column} ${3:type});\nsnippet seq\n\tcreate sequence ${1:name} start with ${2:1} increment by ${3:1} minvalue ${4:1};\nsnippet s*\n\tselect * from ${1:table}\n",e.scope="sql"}),window.require(["ace/snippets/sql"],function(t){"object"==typeof module&&"object"==typeof exports&&module&&(module.exports=t)});
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/js/theme-twilight.js:
--------------------------------------------------------------------------------
1 | define("ace/theme/twilight",["require","exports","module","ace/lib/dom"],function(e,t,n){t.isDark=!0,t.cssClass="ace-twilight",t.cssText=".ace-twilight .ace_gutter {background: #232323;color: #E2E2E2}.ace-twilight .ace_print-margin {width: 1px;background: #232323}.ace-twilight {background-color: #141414;color: #F8F8F8}.ace-twilight .ace_cursor {color: #A7A7A7}.ace-twilight .ace_marker-layer .ace_selection {background: rgba(221, 240, 255, 0.20)}.ace-twilight.ace_multiselect .ace_selection.ace_start {box-shadow: 0 0 3px 0px #141414;}.ace-twilight .ace_marker-layer .ace_step {background: rgb(102, 82, 0)}.ace-twilight .ace_marker-layer .ace_bracket {margin: -1px 0 0 -1px;border: 1px solid rgba(255, 255, 255, 0.25)}.ace-twilight .ace_marker-layer .ace_active-line {background: rgba(255, 255, 255, 0.031)}.ace-twilight .ace_gutter-active-line {background-color: rgba(255, 255, 255, 0.031)}.ace-twilight .ace_marker-layer .ace_selected-word {border: 1px solid rgba(221, 240, 255, 0.20)}.ace-twilight .ace_invisible {color: rgba(255, 255, 255, 0.25)}.ace-twilight .ace_keyword,.ace-twilight .ace_meta {color: #CDA869}.ace-twilight .ace_constant,.ace-twilight .ace_constant.ace_character,.ace-twilight .ace_constant.ace_character.ace_escape,.ace-twilight .ace_constant.ace_other,.ace-twilight .ace_heading,.ace-twilight .ace_markup.ace_heading,.ace-twilight .ace_support.ace_constant {color: #CF6A4C}.ace-twilight .ace_invalid.ace_illegal {color: #F8F8F8;background-color: rgba(86, 45, 86, 0.75)}.ace-twilight .ace_invalid.ace_deprecated {text-decoration: underline;font-style: italic;color: #D2A8A1}.ace-twilight .ace_support {color: #9B859D}.ace-twilight .ace_fold {background-color: #AC885B;border-color: #F8F8F8}.ace-twilight .ace_support.ace_function {color: #DAD085}.ace-twilight .ace_list,.ace-twilight .ace_markup.ace_list,.ace-twilight .ace_storage {color: #F9EE98}.ace-twilight .ace_entity.ace_name.ace_function,.ace-twilight .ace_meta.ace_tag,.ace-twilight .ace_variable {color: #AC885B}.ace-twilight .ace_string {color: #8F9D6A}.ace-twilight .ace_string.ace_regexp {color: #E9C062}.ace-twilight .ace_comment {font-style: italic;color: #5F5A60}.ace-twilight .ace_variable {color: #7587A6}.ace-twilight .ace_xml-pe {color: #494949}.ace-twilight .ace_indent-guide {background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAEklEQVQImWMQERFpYLC1tf0PAAgOAnPnhxyiAAAAAElFTkSuQmCC) right repeat-y}";var r=e("../lib/dom");r.importCssString(t.cssText,t.cssClass)}); (function() {
2 | window.require(["ace/theme/twilight"], function(m) {
3 | if (typeof module == "object" && typeof exports == "object" && module) {
4 | module.exports = m;
5 | }
6 | });
7 | })();
8 |
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/js/theme-twilight.min.js:
--------------------------------------------------------------------------------
1 | define("ace/theme/twilight",["require","exports","module","ace/lib/dom"],function(e,a,c){a.isDark=!0,a.cssClass="ace-twilight",a.cssText=".ace-twilight .ace_gutter {background: #232323;color: #E2E2E2}.ace-twilight .ace_print-margin {width: 1px;background: #232323}.ace-twilight {background-color: #141414;color: #F8F8F8}.ace-twilight .ace_cursor {color: #A7A7A7}.ace-twilight .ace_marker-layer .ace_selection {background: rgba(221, 240, 255, 0.20)}.ace-twilight.ace_multiselect .ace_selection.ace_start {box-shadow: 0 0 3px 0px #141414;}.ace-twilight .ace_marker-layer .ace_step {background: rgb(102, 82, 0)}.ace-twilight .ace_marker-layer .ace_bracket {margin: -1px 0 0 -1px;border: 1px solid rgba(255, 255, 255, 0.25)}.ace-twilight .ace_marker-layer .ace_active-line {background: rgba(255, 255, 255, 0.031)}.ace-twilight .ace_gutter-active-line {background-color: rgba(255, 255, 255, 0.031)}.ace-twilight .ace_marker-layer .ace_selected-word {border: 1px solid rgba(221, 240, 255, 0.20)}.ace-twilight .ace_invisible {color: rgba(255, 255, 255, 0.25)}.ace-twilight .ace_keyword,.ace-twilight .ace_meta {color: #CDA869}.ace-twilight .ace_constant,.ace-twilight .ace_constant.ace_character,.ace-twilight .ace_constant.ace_character.ace_escape,.ace-twilight .ace_constant.ace_other,.ace-twilight .ace_heading,.ace-twilight .ace_markup.ace_heading,.ace-twilight .ace_support.ace_constant {color: #CF6A4C}.ace-twilight .ace_invalid.ace_illegal {color: #F8F8F8;background-color: rgba(86, 45, 86, 0.75)}.ace-twilight .ace_invalid.ace_deprecated {text-decoration: underline;font-style: italic;color: #D2A8A1}.ace-twilight .ace_support {color: #9B859D}.ace-twilight .ace_fold {background-color: #AC885B;border-color: #F8F8F8}.ace-twilight .ace_support.ace_function {color: #DAD085}.ace-twilight .ace_list,.ace-twilight .ace_markup.ace_list,.ace-twilight .ace_storage {color: #F9EE98}.ace-twilight .ace_entity.ace_name.ace_function,.ace-twilight .ace_meta.ace_tag,.ace-twilight .ace_variable {color: #AC885B}.ace-twilight .ace_string {color: #8F9D6A}.ace-twilight .ace_string.ace_regexp {color: #E9C062}.ace-twilight .ace_comment {font-style: italic;color: #5F5A60}.ace-twilight .ace_variable {color: #7587A6}.ace-twilight .ace_xml-pe {color: #494949}.ace-twilight .ace_indent-guide {background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAEklEQVQImWMQERFpYLC1tf0PAAgOAnPnhxyiAAAAAElFTkSuQmCC) right repeat-y}",e("../lib/dom").importCssString(a.cssText,a.cssClass)}),window.require(["ace/theme/twilight"],function(e){"object"==typeof module&&"object"==typeof exports&&module&&(module.exports=e)});
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-brands-400.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-brands-400.eot
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-brands-400.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-brands-400.ttf
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-brands-400.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-brands-400.woff
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-brands-400.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-brands-400.woff2
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-regular-400.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-regular-400.eot
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-regular-400.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-regular-400.ttf
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-regular-400.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-regular-400.woff
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-regular-400.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-regular-400.woff2
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-solid-900.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-solid-900.eot
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-solid-900.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-solid-900.ttf
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-solid-900.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-solid-900.woff
--------------------------------------------------------------------------------
/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-solid-900.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/engine/src/main/resources-online/webapp/WEB-INF/static/webfonts/fa-solid-900.woff2
--------------------------------------------------------------------------------
/engine/src/test/java/tech/ides/engine/test/AnyDataTableTest.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.engine.test
2 |
3 | import tech.ides.datasource.AnyDataTable
4 | import tech.ides.strategy.PlatformFrameEnum.SPARK
5 |
6 | /**
7 | * test
8 | * Created by songgr on 2022/02/27.
9 | */
10 | object AnyDataTableTest {
11 |
12 | def main(args: Array[String]): Unit = {
13 |
14 | val anyDataTable = AnyDataTable("test any data table", SPARK)
15 |
16 | val frame = anyDataTable.frame.frame
17 | val table = anyDataTable.table[String]
18 |
19 | println(frame)
20 | println(table)
21 |
22 | }
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/engine/src/test/java/tech/ides/engine/test/DataTableTest.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.engine.test
2 |
3 | import org.apache.spark.sql.{DataFrame, SparkSession}
4 | import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
5 | import org.scalatest.FunSuite
6 | import tech.ides.datasource.{AnyJavaBeanDataTable, AnyProductDataTable}
7 | import tech.ides.strategy.PlatformFrameEnum.SPARK
8 |
9 | /**
10 | * DataTable 测试
11 | * Created by songgr on 2022/05/30.
12 | */
13 | class DataTableTest extends FunSuite {
14 |
15 | implicit val spark = SparkSession.builder()
16 | .master("local[*]")
17 | .appName("test")
18 | .getOrCreate()
19 |
20 |
21 | test("spark") {
22 | spark.createDataFrame(Seq(A("1"))).show()
23 |
24 | spark.createDataFrame(Seq((1,2,3))).toDF().show()
25 |
26 | spark.createDataFrame(Seq((1,2,3))).toDF().show()
27 | }
28 |
29 |
30 | test("AnyProductDataTable") {
31 | val anyDataTable = AnyProductDataTable(Seq(A("test any data table")), SPARK)
32 |
33 | anyDataTable.table[DataFrame].get.show()
34 |
35 |
36 | AnyProductDataTable(Seq((1,2,3)), SPARK).table[DataFrame].get.show()
37 |
38 | }
39 |
40 | test("AnyJavaBeanDataTable") {
41 | val list = Seq(new Person("a", 1), new Person("b", 2), new Person("c", 3))
42 | AnyJavaBeanDataTable(list, SPARK).createOrReplaceTempView("aa")
43 | spark.sql("select name, count(1) from aa group by name").show()
44 | }
45 |
46 | test("AnyJavaBeanDataTable + schema") {
47 | val list = Seq(new Person("a", 1), new Person("b", 2), new Person("c", 3))
48 | val schema = StructType(Seq(
49 | StructField("name111", StringType, nullable = false),
50 | StructField("age111", IntegerType, nullable = false))
51 | )
52 |
53 | AnyJavaBeanDataTable(list, SPARK, schema).table[DataFrame].get.show()
54 |
55 | }
56 |
57 | case class A(a:String)
58 | }
--------------------------------------------------------------------------------
/engine/src/test/java/tech/ides/engine/test/Person.java:
--------------------------------------------------------------------------------
1 | package tech.ides.engine.test;
2 |
3 | /**
4 | * Created by songgr on 2022/05/31.
5 | */
6 | public class Person {
7 | String name;
8 | int age;
9 |
10 | public String getName() {
11 | return name;
12 | }
13 |
14 | public void setName(String name) {
15 | this.name = name;
16 | }
17 |
18 | public int getAge() {
19 | return age;
20 | }
21 |
22 | public void setAge(int age) {
23 | this.age = age;
24 | }
25 |
26 | public Person(String name, int age) {
27 | this.name = name;
28 | this.age = age;
29 | }
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/external/ds-spark-excel/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | DataLinked
7 | tech.ides
8 | ${env.project.version}
9 | ../../pom.xml
10 |
11 | 4.0.0
12 |
13 | ds-spark-excel-${spark.big.version}_${scala.binary.version}
14 |
15 |
16 |
17 | tech.ides
18 | ides-core-${spark.big.version}_${scala.binary.version}
19 | ${project.version}
20 | provided
21 |
22 |
23 | org.apache.spark
24 | spark-sql_${scala.binary.version}
25 | ${spark.version}
26 | provided
27 |
28 |
29 | com.crealytics
30 | spark-excel_${scala.binary.version}
31 | 0.13.5
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/external/ds-spark-excel/src/main/java/tech/ides/external/datasource/excel/ExcelDataSource.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.external.datasource.excel
2 |
3 | import org.apache.hadoop.hdfs.HdfsOperator
4 | import org.apache.spark.sql.{DataFrame, DataFrameReader}
5 | import tech.ides.constants.ScriptConstants.IMPL_CLASS
6 | import tech.ides.core.ScriptQueryExecute
7 | import tech.ides.datasource.{BaseFileDataSource, DataSource, DataSourceConfig, DataTable, SparkDataTable}
8 | import tech.ides.datasource.DataSource.Method._
9 | import tech.ides.datasource.reader.Reader
10 | import tech.ides.strategy.PlatformFrameEnum.SPARK
11 | import tech.ides.strategy.PlatformFrameImpl
12 | import tech.sqlclub.common.log.Logging
13 | import tech.sqlclub.common.utils.Assert
14 | import scala.collection.mutable.ListBuffer
15 |
16 | /**
17 | *
18 | * Created by songgr on 2020/10/25.
19 | */
20 |
21 | @DataSource(
22 | types = Array(SOURCE, SINK),
23 | name = "excel",
24 | sinceVersion = "1.0.0"
25 | )
26 | @PlatformFrameImpl(frameName = SPARK)
27 | class ExcelDataSource extends BaseFileDataSource with Logging {
28 |
29 | override def load(reader: Reader, config: DataSourceConfig): DataTable = {
30 | Assert.isNotEmpty(config.path, "excel path must be non-empty!")
31 | val fileList = new ListBuffer[String]()
32 |
33 | config.path.split(",").foreach( path => {
34 | val isFile = HdfsOperator.isFile(path)
35 | if (isFile) {
36 | fileList += path
37 | } else {
38 | val files = HdfsOperator.listFiles(path)
39 | files.foreach(file => fileList += file.getPath.toString)
40 | }
41 | })
42 |
43 | log.info("read excel file list: {}", fileList)
44 | if (fileList.isEmpty) {
45 | val spark = ScriptQueryExecute.context.execListener.sparkSession
46 | return SparkDataTable(spark.emptyDataFrame)
47 | }
48 |
49 | val format = config.config.getOrElse(IMPL_CLASS, fullFormat)
50 | val sparkDataReader = reader.getReader[DataFrameReader]
51 |
52 | // 读取excel
53 | val readExcel = (file: String) => sparkDataReader.options(config.config).format(format).load(file)
54 |
55 | val dataFrame = if (fileList.size > 1) fileList.map(readExcel).reduce(_ union _) else readExcel(fileList.head)
56 | SparkDataTable(dataFrame)
57 | }
58 |
59 | override def fullFormat: String = "com.crealytics.spark.excel"
60 |
61 | override def shortFormat: String = "excel"
62 | }
63 |
--------------------------------------------------------------------------------
/external/hive-exec/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | DataLinked
7 | tech.ides
8 | ${env.project.version}
9 | ../../pom.xml
10 |
11 | 4.0.0
12 |
13 | hive-exec-${spark.big.version}_${scala.binary.version}
14 |
15 |
16 |
17 | org.apache.spark
18 | spark-hive_${scala.binary.version}
19 | ${spark.version}
20 | provided
21 |
22 |
23 |
--------------------------------------------------------------------------------
/external/hive-exec/src/main/java/org/apache/hadoop/hive/custom/inputformat/MultiLineCSVInputFormat.java:
--------------------------------------------------------------------------------
1 | package org.apache.hadoop.hive.custom.inputformat;
2 |
3 | import org.apache.hadoop.classification.InterfaceAudience;
4 | import org.apache.hadoop.classification.InterfaceStability;
5 | import org.apache.hadoop.fs.FileSystem;
6 | import org.apache.hadoop.fs.Path;
7 | import org.apache.hadoop.hive.custom.serde.OpenCSVSerde;
8 | import org.apache.hadoop.io.LongWritable;
9 | import org.apache.hadoop.io.Text;
10 | import org.apache.hadoop.io.compress.CompressionCodec;
11 | import org.apache.hadoop.io.compress.CompressionCodecFactory;
12 | import org.apache.hadoop.io.compress.SplittableCompressionCodec;
13 | import org.apache.hadoop.mapred.*;
14 | import tech.sqlclub.common.utils.StringEscapeUtils;
15 |
16 | import java.io.IOException;
17 | import java.util.Locale;
18 |
19 | /**
20 | * 处理csv字段带有换行符的InputFormat
21 | *
22 | * An {@link InputFormat} for plain text csv files. Files are broken into lines.
23 | * Either linefeed or carriage-return are used to signal end of line. Keys are
24 | * the position in the file, and values are the line of text..
25 | *
26 | * @see org.apache.hadoop.mapred.TextInputFormat
27 | * Created by songgr on 2020/12/01.
28 | */
29 |
30 | @InterfaceAudience.Public
31 | @InterfaceStability.Stable
32 | public class MultiLineCSVInputFormat extends FileInputFormat
33 | implements JobConfigurable {
34 |
35 | private CompressionCodecFactory compressionCodecs = null;
36 |
37 | @Override
38 | public void configure(JobConf conf) {
39 | compressionCodecs = new CompressionCodecFactory(conf);
40 | }
41 |
42 | @Override
43 | protected boolean isSplitable(FileSystem fs, Path file) {
44 | final CompressionCodec codec = compressionCodecs.getCodec(file);
45 | if (null == codec) {
46 | return true;
47 | }
48 | return codec instanceof SplittableCompressionCodec;
49 | }
50 |
51 | @Override
52 | public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException {
53 | reporter.setStatus(split.toString());
54 |
55 | String separatorChar = StringEscapeUtils.unescapeJava(job.get(OpenCSVSerde.SEPARATORCHAR.toLowerCase(Locale.ROOT)));
56 | String quoteChar = StringEscapeUtils.unescapeJava(job.get(OpenCSVSerde.QUOTECHAR.toLowerCase(Locale.ROOT)));
57 | String escapeChar = StringEscapeUtils.unescapeJava(job.get(OpenCSVSerde.ESCAPECHAR.toLowerCase(Locale.ROOT)));
58 |
59 | // 核心是RecordReader 切分每一行
60 | return new MultiLineCSVRecordReader(job, (FileSplit) split, separatorChar, quoteChar, escapeChar);
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/kubernetes/dockerfiles/ides-shell/Dockerfile:
--------------------------------------------------------------------------------
1 | #
2 | # Licensed to the Apache Software Foundation (ASF) under one or more
3 | # contributor license agreements. See the NOTICE file distributed with
4 | # this work for additional information regarding copyright ownership.
5 | # The ASF licenses this file to You under the Apache License, Version 2.0
6 | # (the "License"); you may not use this file except in compliance with
7 | # the License. You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | #
17 |
18 | # 执行下面命令进行构建
19 | # docker build -f kubernetes/dockerfiles/ides-shell/Dockerfile -t datalinked/ides-shell:latest .
20 |
21 | # 解除docker build日志限制,[output clipped, log limit 1MiB reached]
22 | # 最优解:设置坏境变量 export DOCKER_BUILDKIT=0 ,然后构建。
23 |
24 | # 另一种方法使用docker buildx 参考(不推荐):https://docs.docker.com/engine/reference/commandline/buildx_build/
25 | # 1. 创建更大日志输出的构建器
26 | # docker buildx create --use --name larger_log --driver-opt env.BUILDKIT_STEP_LOG_MAX_SIZE=50000000
27 | # 2. 构建镜像并导出结果
28 | # docker buildx build -f kubernetes/dockerfiles/ides-shell/Dockerfile -t datalinked/ides-shell:latest -o - . > datalinked_img.tar
29 | # 3. 导入镜像到本地仓库
30 | # docker import datalinked_img.tar datalinked/ides-shell:latest
31 | # end. 删除构建器实例: docker buildx rm larger_log
32 |
33 | ARG maven_version=3.5.4
34 |
35 | FROM maven:${maven_version}
36 |
37 | # 必须要在FROM之后指定ARG,不然后续变量使用将不生效
38 | ARG ides_version=1.0.0
39 | ARG spark_big_verison=2.4
40 |
41 | # 创建工作目录
42 | RUN mkdir /datalinked
43 | WORKDIR /datalinked
44 | # 添加源码
45 | ADD . .
46 | # 设置maven源 加速编译
47 | RUN cp kubernetes/dockerfiles/settings.xml /usr/share/maven/conf/
48 | # 加入daemon
49 | RUN cp kubernetes/dockerfiles/ides-shell/daemon /opt/
50 |
51 | # 构建shell执行包
52 | RUN ./bin/ides-build.sh shell
53 | RUN tar -zxvf ides-${ides_version}-bin-spark${spark_big_verison}.tgz -C /opt
54 | # 删除临时构建目录
55 | RUN rm -rf /datalinked
56 | # 删除maven构建依赖
57 | RUN rm -rf /root/.m2/repository
58 |
59 | # 端口声明
60 | # spark-ui's port
61 | EXPOSE 4040/tcp
62 | # ides rest api's port
63 | EXPOSE 9003/tcp
64 |
65 | # 配置环境变量
66 | RUN echo "\n\
67 | IDES_HOME=/opt/ides-1.0.0-bin-spark2.4\n\
68 | export IDES_HOME\n\
69 | export PATH=\$PATH:\$IDES_HOME/bin" >> /etc/profile
70 |
71 | RUN echo "\n\
72 | source /etc/profile" >> /root/.bashrc
73 |
74 | WORKDIR /opt/ides-${ides_version}-bin-spark${spark_big_verison}
75 | ENTRYPOINT ["/bin/bash", "bin/ides-shell"]
--------------------------------------------------------------------------------
/kubernetes/dockerfiles/ides-shell/daemon:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # 为了让容器不退出, 将entrypoint换成daemon
4 | tail -f /dev/null
--------------------------------------------------------------------------------
/kubernetes/dockerfiles/settings.xml:
--------------------------------------------------------------------------------
1 |
5 | /root/.m2/repository
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 | aliyunmaven
14 | central
15 | 阿里云公共仓库
16 | https://maven.aliyun.com/repository/public
17 |
18 |
19 | repo1
20 | central
21 | central repo
22 | http://repo1.maven.org/maven2/
23 |
24 |
25 | aliyunmaven
26 | apache snapshots
27 | 阿里云阿帕奇仓库
28 | https://maven.aliyun.com/repository/apache-snapshots
29 |
30 |
31 |
32 |
33 |
34 |
35 | maven_central
36 |
37 |
38 | aliyunmaven
39 | aliyunmaven
40 | https://maven.aliyun.com/repository/public
41 | default
42 |
43 | true
44 |
45 |
46 | true
47 |
48 |
49 |
50 | MavenCentral
51 | http://repo1.maven.org/maven2/
52 |
53 |
54 | aliyunmavenApache
55 | https://maven.aliyun.com/repository/apache-snapshots
56 |
57 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/libs/antlr-4.7.1-complete.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/libs/antlr-4.7.1-complete.jar
--------------------------------------------------------------------------------
/libs/mysql-connector-java-5.1.46.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/libs/mysql-connector-java-5.1.46.jar
--------------------------------------------------------------------------------
/python/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bebee4java/ides/f2311695c8e626957087bbaece9b9d5419eeed38/python/__init__.py
--------------------------------------------------------------------------------
/repl/src/main/java/tech/ides/repl/Signaling.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package tech.ides.repl
19 |
20 | import tech.sqlclub.common.log.Logging
21 |
22 |
23 | private[repl] object Signaling extends Logging {
24 |
25 | /**
26 | * Register a SIGINT handler, that terminates all active jobs or terminates
27 | * when no jobs are currently running.
28 | * This makes it possible to interrupt a running shell job by pressing Ctrl+C.
29 | */
30 | def cancelOnInterrupt(): Unit = SignalUtils.register("INT") {
31 | // if has active jobs, cancel all
32 | true
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/repl/src/main/scala-2.11/org/apache/spark/repl/SparkExprTyper.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package org.apache.spark.repl
19 |
20 | import scala.tools.nsc.interpreter.{ExprTyper, IR}
21 |
22 | trait SparkExprTyper extends ExprTyper {
23 |
24 | import repl._
25 | import global.{reporter => _, Import => _, _}
26 | import naming.freshInternalVarName
27 |
28 | def doInterpret(code: String): IR.Result = {
29 | // interpret/interpretSynthetic may change the phase,
30 | // which would have unintended effects on types.
31 | val savedPhase = phase
32 | try interpretSynthetic(code) finally phase = savedPhase
33 | }
34 |
35 | override def symbolOfLine(code: String): Symbol = {
36 | def asExpr(): Symbol = {
37 | val name = freshInternalVarName()
38 | // Typing it with a lazy val would give us the right type, but runs
39 | // into compiler bugs with things like existentials, so we compile it
40 | // behind a def and strip the NullaryMethodType which wraps the expr.
41 | val line = "def " + name + " = " + code
42 |
43 | doInterpret(line) match {
44 | case IR.Success =>
45 | val sym0 = symbolOfTerm(name)
46 | // drop NullaryMethodType
47 | sym0.cloneSymbol setInfo exitingTyper(sym0.tpe_*.finalResultType)
48 | case _ => NoSymbol
49 | }
50 | }
51 |
52 | def asDefn(): Symbol = {
53 | val old = repl.definedSymbolList.toSet
54 |
55 | doInterpret(code) match {
56 | case IR.Success =>
57 | repl.definedSymbolList filterNot old match {
58 | case Nil => NoSymbol
59 | case sym :: Nil => sym
60 | case syms => NoSymbol.newOverloaded(NoPrefix, syms)
61 | }
62 | case _ => NoSymbol
63 | }
64 | }
65 |
66 | def asError(): Symbol = {
67 | doInterpret(code)
68 | NoSymbol
69 | }
70 |
71 | beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
72 | }
73 |
74 | }
75 |
--------------------------------------------------------------------------------
/restfulserver/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | DataLinked
7 | tech.ides
8 | ${env.project.version}
9 |
10 | 4.0.0
11 |
12 | ides-restfulserver-${spark.big.version}_${scala.binary.version}
13 |
14 |
15 | 9.4.34.v20201102
16 | 2.5.4
17 |
18 |
19 |
20 |
21 | org.eclipse.jetty
22 | jetty-servlet
23 | ${jetty.version}
24 |
25 |
26 | org.eclipse.jetty
27 | jetty-webapp
28 | ${jetty.version}
29 |
30 |
31 | org.scalatra
32 | scalatra_${scala.binary.version}
33 | ${scalatra.version}
34 |
35 |
36 | org.scalatra
37 | scalatra-scalate_${scala.binary.version}
38 | ${scalatra.version}
39 |
40 |
41 | org.scalatra
42 | scalatra-swagger_${scala.binary.version}
43 | ${scalatra.version}
44 |
45 |
46 |
47 | org.apache.spark
48 | spark-sql_${scala.binary.version}
49 | ${spark.version}
50 | provided
51 |
52 |
53 |
54 | tech.ides
55 | ides-core-${spark.big.version}_${scala.binary.version}
56 | ${project.version}
57 | provided
58 |
59 |
60 |
61 |
--------------------------------------------------------------------------------
/restfulserver/src/main/java/tech/ides/rest/IdesServer.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.rest
2 |
3 | import java.io.File
4 | import org.eclipse.jetty.server.NCSARequestLog
5 | import org.eclipse.jetty.server.handler.{HandlerCollection, RequestLogHandler}
6 | import org.eclipse.jetty.servlet.DefaultServlet
7 | import org.eclipse.jetty.webapp.WebAppContext
8 | import org.scalatra.servlet.ScalatraListener
9 | import tech.ides.conf.IdesConf
10 | import tech.sqlclub.common.log.Logging
11 | import tech.sqlclub.common.utils.FileUtils
12 | import tech.ides.constants.IdesEnvConstants._
13 |
14 | /**
15 | * Ides server
16 | * Created by songgr on 2020/06/16.
17 | */
18 | object IdesServer extends RestServer with Logging {
19 |
20 | import IdesConf._
21 |
22 | private var server: WebServer = _
23 | private var _serverUrl: Option[String] = None
24 | private[rest] var host:String = _
25 | private[rest] var port:Int = _
26 |
27 | override def start(conf:IdesConf): Unit = {
28 | host = conf.get(IDES_SERVER_HOST)
29 | port = conf.get(IDES_SERVER_PORT)
30 | server = new WebServer(conf, host, port)
31 |
32 | val handlers = new HandlerCollection
33 |
34 | val context = new WebAppContext()
35 | context.setContextPath("/")
36 | val resourceUrl = getClass.getClassLoader.getResource("webapp/WEB-INF")
37 | if (resourceUrl != null) {
38 | context.setResourceBase(resourceUrl.toExternalForm)
39 | logInfo("Ides Server set resource base dir: " + resourceUrl.toExternalForm)
40 | } else {
41 | context.setResourceBase("")
42 | logError("We can't find the resource base dir for ides server!")
43 | }
44 | context.addEventListener(new ScalatraListener)
45 | context.addServlet(classOf[DefaultServlet], "/")
46 | context.setInitParameter(ScalatraListener.LifeCycleKey, classOf[ScalatraBootstrap].getName)
47 | handlers.addHandler(context)
48 |
49 | if (conf.get(IDES_REQUEST_LOG)) {
50 | // Configure the access log
51 | val requestLogHandler = new RequestLogHandler
52 | val requestLog = new NCSARequestLog(sys.env.getOrElse(IDES_LOG_DIR,
53 | sys.env.getOrElse(IDES_HOME, ".") + "/logs") + "/ides_yyyy_mm_dd.request.log")
54 | requestLog.setAppend(true)
55 | // 设置时区
56 | requestLog.setLogTimeZone("GMT+8")
57 | // 设置日期格式
58 | requestLog.setLogDateFormat("yyyy-MM-dd hh:mm:ss:SSS")
59 |
60 | requestLog.setExtended(false)
61 | requestLog.setPreferProxiedForAddress(true)
62 | // 请求 延迟时间
63 | requestLog.setLogLatency(true)
64 | requestLog.setRetainDays(conf.get(IdesConf.REQUEST_LOG_RETAIN_DAYS))
65 | requestLogHandler.setRequestLog(requestLog)
66 | val logFile = new File(requestLog.getFilename)
67 | if (!logFile.getParentFile.exists()) {
68 | FileUtils.mkdirs(logFile.getParent)
69 | }
70 | handlers.addHandler(requestLogHandler)
71 | }
72 | server.setHandler(handlers)
73 |
74 | server.start()
75 |
76 | Runtime.getRuntime.addShutdownHook(new Thread("Ides Server Shutdown") {
77 | override def run(): Unit = {
78 | logInfo("Shutting down ides server.")
79 | server.stop()
80 | }
81 | })
82 |
83 | _serverUrl = Some(s"${server.protocol}://${server.host}:${server.port}")
84 | sys.props("ides.server.server-url") = _serverUrl.get
85 | }
86 |
87 | def awaitStarted() = {
88 | // server.join()
89 | new Thread(){
90 | setDaemon(true)
91 | override def run(): Unit = server.join()
92 | }.start()
93 | }
94 |
95 | override def stop(): Unit = {
96 | if (server != null) {
97 | server.stop()
98 | }
99 | }
100 |
101 | def serverUrl(): String = {
102 | _serverUrl.getOrElse(throw new IllegalStateException("Server not yet started."))
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/restfulserver/src/main/java/tech/ides/rest/RestController.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.rest
2 |
3 | import org.json4s.JsonAST._
4 | import org.json4s.{DefaultFormats, Formats}
5 | import org.scalatra
6 | import org.scalatra.{Route, ScalatraServlet}
7 | import org.scalatra.json.JacksonJsonSupport
8 | import org.scalatra.scalate.ScalateSupport
9 | import org.scalatra.swagger._
10 | import tech.sqlclub.common.log.Logging
11 | import tech.sqlclub.common.net.NetUtils
12 | import tech.sqlclub.common.utils.CaseClassUtils.{JString => CJString , caseClassFields, SInt, SLong}
13 |
14 | /**
15 | *
16 | * Created by songgr on 2020/06/16.
17 | */
18 | abstract class RestController extends ScalatraServlet with ScalateSupport with JacksonJsonSupport with SwaggerSupport with Logging {
19 | // 请求开始时间
20 | private final val requestStartTime: ThreadLocal[Long] = new ThreadLocal[Long]
21 | // 请求结束时间
22 | private final val requestEndTime: ThreadLocal[Long] = new ThreadLocal[Long]
23 |
24 | def actionWithTimeTrack(action: => Any):Any = {
25 | val now = System.currentTimeMillis()
26 | requestStartTime.set(now)
27 | // 执行post body 方法
28 | val result = action
29 | // 移除线程记录的时间
30 | requestStartTime.remove()
31 | requestEndTime.remove()
32 | result
33 | }
34 |
35 | override def post(transformers: scalatra.RouteTransformer*)(action: => Any): Route = {
36 | super.post(transformers:_*)(actionWithTimeTrack(action))
37 | }
38 |
39 |
40 |
41 | /**
42 | * 渲染响应结果,然后直接结束请求(通过throw空异常)
43 | * @param code 状态码
44 | * @param content 响应内容
45 | * @return Nothing
46 | */
47 | def render(code:Int, content:String) = {
48 | val method = request.getMethod
49 | val uri = request.getRequestURI
50 | val now = System.currentTimeMillis()
51 | requestEndTime.set(now)
52 | val durationBackend = requestEndTime.get() - requestStartTime.get()
53 | val response = Response(code, content, durationBackend)
54 | val result = caseClassFields(response).map {
55 | it =>
56 | it.fieldType match {
57 | case SInt => JField(it.fieldName, JInt(it.fieldValue.asInstanceOf[Int]))
58 | case CJString => JField(it.fieldName, JString(it.fieldValue.asInstanceOf[String]))
59 | case SLong => JField(it.fieldName, JLong(it.fieldValue.asInstanceOf[Long]))
60 | case _ => throw new RuntimeException(s"Unsupported dataType: ${it.fieldType.getName}")
61 | }
62 | }
63 | val obj = JObject(result:_*)
64 | val out = super.render(obj)
65 | super.renderResponse(out)
66 | logInfo(s"Request completed!\t${Array(method, uri, code, NetUtils.getLocalServerIp, durationBackend+"ms").mkString("\t")}")
67 | throw new RuntimeException()
68 | }
69 |
70 | def requestParam = super.params.toMap[String,String]
71 |
72 | def param(key: String) = super.params(key)
73 |
74 | def param(key: String, defaultValue:String): String = {
75 | val p = super.params.get(key)
76 | if (p.isDefined) p.get else defaultValue
77 | }
78 |
79 | def paramAsInt(key:String, defaultValue:Int):Int = {
80 | val p = super.params.get(key)
81 | if (p.isDefined) p.get.toInt else defaultValue
82 | }
83 |
84 | def paramAsLong(key:String, defaultValue:Long):Long = {
85 | val p = super.params.get(key)
86 | if (p.isDefined) p.get.toLong else defaultValue
87 | }
88 |
89 | def paramAsBoolean(key:String, defaultValue:Boolean):Boolean = {
90 | val p = super.params.get(key)
91 | if (p.isDefined) p.get.toBoolean else defaultValue
92 | }
93 |
94 | override protected implicit def jsonFormats: Formats = DefaultFormats
95 |
96 | override protected implicit def swagger: SwaggerEngine[_] = SwaggerInfo.swagger
97 |
98 | override protected def applicationDescription: String = "The IDES REST API"
99 | }
100 |
101 | case class Response(code:Int, content:String, durationBackend:Long)
102 |
--------------------------------------------------------------------------------
/restfulserver/src/main/java/tech/ides/rest/ScalatraBootstrap.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.rest
2 |
3 | import javax.servlet.ServletContext
4 | import org.json4s.{DefaultFormats, Formats}
5 | import org.scalatra.{LifeCycle, ScalatraServlet}
6 | import org.scalatra.swagger.{ApiInfo, JacksonSwaggerBase, Swagger}
7 | import tech.ides.conf.IdesConf
8 | import tech.ides.core.platform.ServiceLifecycle
9 | import tech.sqlclub.common.log.Logging
10 | import tech.sqlclub.common.reflect.{ClassPath, Reflection}
11 | import scala.collection.mutable
12 |
13 | /**
14 | *
15 | * Created by songgr on 2020/06/22.
16 | */
17 | class ResourcesApp(implicit val swagger: Swagger) extends ScalatraServlet with JacksonSwaggerBase {
18 | override implicit val jsonFormats: Formats = DefaultFormats
19 | }
20 |
21 | object SwaggerApiInfo extends ApiInfo(
22 | "The DESC API",
23 | "Docs for the DESC API",
24 | "https://github.com/bebee4java/ides",
25 | "grsong.cn@gmail.com",
26 | "LICENSE",
27 | "https://github.com/bebee4java/ides/blob/master/LICENSE"
28 | )
29 |
30 | class SwaggerInfo(override val apiVersion: String) extends Swagger(Swagger.SpecVersion, apiVersion, SwaggerApiInfo)
31 |
32 | object SwaggerInfo {
33 | import tech.ides.IDES_VERSION
34 | implicit val swagger = new SwaggerInfo(IDES_VERSION)
35 | }
36 |
37 | class ScalatraBootstrap extends LifeCycle {
38 |
39 | override def init(context: ServletContext) {
40 | import SwaggerInfo._
41 | context.mount(new ResourcesApp, "/api-docs")
42 | ControlHandlerHook.allHandlers.foreach {
43 | handler =>
44 | context.mount(handler, "/*")
45 | }
46 | }
47 | }
48 |
49 | object ControlHandlerHook extends ServiceLifecycle with Logging {
50 |
51 | import org.scalatra.Handler
52 | private val _handlers = mutable.Set[Handler]()
53 |
54 | def allHandlers = _handlers.toSet
55 |
56 | def addHandler(handlers: Handler*) = handlers.foreach(_handlers.add)
57 |
58 | def removeHandler(handlers: Handler*) = handlers.foreach(_handlers.remove)
59 |
60 | def registerControllers(idesConf: IdesConf) = {
61 | import IdesConf.IDES_CONTROLLER_PACKAGES
62 | val option = idesConf.get(IDES_CONTROLLER_PACKAGES)
63 |
64 | val controllerDefaultPackages = Array("tech.ides.rest")
65 |
66 | val userControllersPackages = if (option.isDefined) {
67 | option.get.split(",").filter(_.nonEmpty).map(_.trim)
68 | } else {Array[String]()}
69 |
70 | val scanPackages = controllerDefaultPackages ++ userControllersPackages
71 |
72 | logInfo("look for the rest controllers from packages: " + scanPackages.mkString(", "))
73 | val allControllers = Reflection.allSubtypeClasses(classOf[RestController], scanPackages:_*)
74 |
75 | val controllers = allControllers.map {
76 | controllerClass =>
77 | Reflection(ClassPath.from(controllerClass)).instance[RestController]
78 | }
79 |
80 | addHandler(controllers.toSeq:_*)
81 |
82 | logInfo(s"""A total of ${controllers.size} rest controller scanned: [${controllers.map(_.getClass.getName).mkString(", ")}].""")
83 | }
84 |
85 | override def beforeService(idesConf: IdesConf): Unit = { registerControllers(idesConf) }
86 |
87 | override def afterService(idesConf: IdesConf): Unit = {}
88 | }
--------------------------------------------------------------------------------
/restfulserver/src/main/java/tech/ides/rest/WebServer.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.rest
2 |
3 | import java.net.InetAddress
4 | import tech.ides.conf.IdesConf._
5 | import org.eclipse.jetty.server.{Handler, _}
6 | import org.eclipse.jetty.util.ssl.SslContextFactory
7 | import org.eclipse.jetty.util.thread.QueuedThreadPool
8 | import tech.ides.conf.IdesConf
9 | import tech.sqlclub.common.log.Logging
10 |
11 | /**
12 | *
13 | * Created by songgr on 2020/06/16.
14 | */
15 | class WebServer(val idesConf: IdesConf, var host: String, var port: Int) extends Logging {
16 |
17 | val threadPool = new QueuedThreadPool
18 | threadPool.setName("jetty-server-threadPool")
19 | threadPool.setIdleTimeout(idesConf.get(IDES_HTTP_THREAD_IDLETIMEOUT))
20 | threadPool.setMinThreads(idesConf.get(IDES_HTTP_MIN_THREADS))
21 | threadPool.setMaxThreads(idesConf.get(IDES_HTTP_MAX_THREADS))
22 | threadPool.setDaemon(true)
23 | val server = new Server(threadPool)
24 |
25 | def jettyServer = server
26 |
27 | server.setStopTimeout(1000)
28 | server.setStopAtShutdown(true)
29 |
30 | val (connector, protocol) = idesConf.get(IdesConf.SSL_KEYSTORE) match {
31 | case None =>
32 | val http = new HttpConfiguration()
33 | http.setRequestHeaderSize(idesConf.get(IdesConf.REQUEST_HEADER_SIZE))
34 | http.setResponseHeaderSize(idesConf.get(IdesConf.RESPONSE_HEADER_SIZE))
35 | (new ServerConnector(server, new HttpConnectionFactory(http)), "http")
36 |
37 | case Some(keystore) =>
38 | val https = new HttpConfiguration()
39 | https.setRequestHeaderSize(idesConf.get(IdesConf.REQUEST_HEADER_SIZE))
40 | https.setResponseHeaderSize(idesConf.get(IdesConf.RESPONSE_HEADER_SIZE))
41 | https.addCustomizer(new SecureRequestCustomizer())
42 |
43 | val sslContextFactory = new SslContextFactory()
44 | sslContextFactory.setKeyStorePath(keystore)
45 | Option(idesConf.get(IdesConf.SSL_KEYSTORE_PASSWORD).get)
46 | .foreach(sslContextFactory.setKeyStorePassword)
47 | Option(idesConf.get(IdesConf.SSL_KEY_PASSWORD).get)
48 | .foreach(sslContextFactory.setKeyManagerPassword)
49 |
50 | (new ServerConnector(server,
51 | new SslConnectionFactory(sslContextFactory, "http/1.1"),
52 | new HttpConnectionFactory(https)), "https")
53 | }
54 |
55 | connector.setHost(host)
56 | connector.setPort(port)
57 |
58 | server.setConnectors(Array(connector))
59 |
60 | def setHandler(handler: Handler) = {
61 | server.setHandler(handler)
62 | }
63 |
64 | def start(): Unit = {
65 | server.start()
66 |
67 | val connector = server.getConnectors()(0).asInstanceOf[NetworkConnector]
68 |
69 | if (host == "0.0.0.0") {
70 | host = InetAddress.getLocalHost.getCanonicalHostName
71 | }
72 | port = connector.getLocalPort
73 |
74 | logInfo(s"Starting server on $protocol://$host:$port ,api-docs url on $protocol://$host:$port/api-docs")
75 | }
76 |
77 | def join(): Unit = {
78 | server.join()
79 | }
80 |
81 | def stop(): Unit = {
82 | if (server != null) {
83 | server.getHandlers.foreach(_.stop())
84 | server.stop()
85 | server.destroy()
86 | }
87 | }
88 | }
89 |
--------------------------------------------------------------------------------
/restfulserver/src/test/java/tech/ides/rest/IdesServerSuite.scala:
--------------------------------------------------------------------------------
1 | package tech.ides.rest
2 |
3 | import org.scalatest.FunSuite
4 | import tech.ides.conf.IdesConf
5 |
6 | /**
7 | *
8 | * Created by songgr on 2020/06/16.
9 | */
10 | class IdesServerSuite extends FunSuite {
11 |
12 | test("ides server test") {
13 |
14 | IdesServer.start(new IdesConf)
15 | println(IdesServer.serverUrl())
16 | Thread.currentThread().join()
17 |
18 | }
19 |
20 | }
21 |
--------------------------------------------------------------------------------