├── .gitignore ├── .scalafmt.conf ├── README.md ├── build.sbt ├── project ├── Commons.scala ├── Dependencies.scala ├── build.properties └── plugins.sbt ├── sample-common └── src │ └── main │ ├── protobuf │ └── sample │ │ └── define.proto │ └── resources │ └── sample-common.conf ├── sample-discovery └── src │ └── test │ ├── resources │ ├── application.conf │ └── logback-test.xml │ └── scala │ └── sample │ └── discovery │ └── SampleDiscoveryApplicationTest.scala ├── sample-elasticsearch ├── docker-compose.yml └── src │ └── test │ └── resources │ ├── application.conf │ └── logback-test.xml ├── sample-grpc └── src │ ├── main │ ├── protobuf │ │ └── sample │ │ │ ├── model.proto │ │ │ └── service.proto │ └── scala │ │ └── sample │ │ └── grpc │ │ ├── Routes.scala │ │ ├── SampleGrpcAggregate.scala │ │ ├── SampleGrpcApplication.scala │ │ └── service │ │ └── HelloServiceImpl.scala │ └── test │ ├── resources │ ├── application.conf │ └── logback-test.xml │ └── scala │ └── sample │ └── grpc │ └── SampleGrpcApplicationTest.scala ├── sample-http-client └── src │ └── test │ ├── resources │ ├── application.conf │ └── logback-test.xml │ └── scala │ └── sample │ └── http │ └── client │ └── SampleHttpClientTest.scala ├── sample-http-gateway ├── README.md └── src │ ├── main │ ├── resources │ │ └── application.conf │ └── scala │ │ └── sample │ │ └── http │ │ └── gateway │ │ ├── Routes.scala │ │ └── SampleHttpGatewayApplication.scala │ └── test │ ├── resources │ ├── application.conf │ └── logback-test.xml │ └── scala │ └── sample │ └── http │ └── gateway │ └── SampleHttpGatewayApplicationTest.scala ├── sample-http-server └── src │ ├── main │ └── scala │ │ └── sample │ │ └── http │ │ └── server │ │ ├── Routes.scala │ │ └── SampleHttpServerApplication.scala │ └── test │ ├── resources │ ├── application.conf │ └── logback-test.xml │ └── scala │ └── sample │ └── http │ └── server │ └── SampleHttpServerApplicationTest.scala ├── sample-jdbc └── src │ ├── main │ └── scala │ │ └── sample │ │ └── jdbc │ │ └── model │ │ └── User.scala │ └── test │ ├── resources │ ├── application.conf │ └── logback-test.xml │ └── scala │ └── sample │ └── jdbc │ └── SampleJdbcTest.scala ├── sample-kafka ├── docker-compose.yml └── src │ └── test │ └── resources │ ├── application.conf │ └── logback-test.xml ├── sample-log └── src │ └── main │ ├── resources │ ├── application.conf │ └── logback-test.xml │ └── scala │ └── sample │ └── log │ └── SampleLogApplication.scala ├── sample-mongodb └── src │ └── test │ └── resources │ ├── application.conf │ └── logback-test.xml ├── sample-scheduler-job ├── Dockerfile ├── README.md ├── docs │ └── init.sql └── src │ ├── main │ ├── protobuf │ │ └── sample │ │ │ └── scheduler │ │ │ ├── grpc │ │ │ └── scheduler.proto │ │ │ └── model │ │ │ └── scheduler.proto │ ├── resources │ │ └── sample-scheduler.conf │ └── scala │ │ └── sample │ │ └── scheduler │ │ ├── Routes.scala │ │ ├── SampleSchedulerApplication.scala │ │ ├── SchedulerAggregate.scala │ │ ├── constant │ │ └── JobConstants.scala │ │ ├── model │ │ └── End.scala │ │ ├── route │ │ └── SchedulerRoute.scala │ │ ├── service │ │ ├── SchedulerActor.scala │ │ ├── SchedulerServiceComponent.scala │ │ ├── SchedulerServiceImpl.scala │ │ └── job │ │ │ └── HongkaDefaultJob.scala │ │ └── util │ │ └── JobUtils.scala │ └── test │ ├── resources │ ├── application.conf │ ├── logback-test.xml │ └── sql │ │ └── scheduler.sql │ └── scala │ └── sample │ └── scheduler │ ├── InitTablesTest.scala │ └── SchedulerApplicationTest.scala ├── sample-slick └── src │ ├── main │ └── scala │ │ └── sample │ │ └── slick │ │ ├── model │ │ └── Person.scala │ │ └── schema │ │ ├── PersonTable.scala │ │ └── SlickSchema.scala │ └── test │ ├── resources │ ├── application.conf │ └── logback-test.xml │ └── scala │ └── sample │ ├── jdbc │ └── SampleJdbcTest.scala │ └── slick │ └── schema │ └── SlickSchemaTest.scala ├── sbt ├── sbt-dist ├── bin │ ├── java9-rt-export.jar │ ├── sbt │ ├── sbt-launch-lib.bash │ ├── sbt-launch.jar │ └── sbt.bat └── conf │ ├── sbtconfig.txt │ └── sbtopts ├── sbt.bat └── version.sbt /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### macOS template 3 | # General 4 | .DS_Store 5 | .AppleDouble 6 | .LSOverride 7 | 8 | # Icon must end with two \r 9 | Icon 10 | 11 | # Thumbnails 12 | ._* 13 | 14 | # Files that might appear in the root of a volume 15 | .DocumentRevisions-V100 16 | .fseventsd 17 | .Spotlight-V100 18 | .TemporaryItems 19 | .Trashes 20 | .VolumeIcon.icns 21 | .com.apple.timemachine.donotpresent 22 | 23 | # Directories potentially created on remote AFP share 24 | .AppleDB 25 | .AppleDesktop 26 | Network Trash Folder 27 | Temporary Items 28 | .apdisk 29 | ### Vim template 30 | # Swap 31 | [._]*.s[a-v][a-z] 32 | [._]*.sw[a-p] 33 | [._]s[a-rt-v][a-z] 34 | [._]ss[a-gi-z] 35 | [._]sw[a-p] 36 | 37 | # Session 38 | Session.vim 39 | 40 | # Temporary 41 | .netrwhist 42 | *~ 43 | # Auto-generated tag files 44 | tags 45 | # Persistent undo 46 | [._]*.un~ 47 | ### NotepadPP template 48 | # Notepad++ backups # 49 | *.bak 50 | ### SBT template 51 | # Simple Build Tool 52 | # http://www.scala-sbt.org/release/docs/Getting-Started/Directories.html#configuring-version-control 53 | 54 | dist/* 55 | target/ 56 | lib_managed/ 57 | src_managed/ 58 | project/boot/ 59 | project/plugins/project/ 60 | .history 61 | .cache 62 | .lib/ 63 | ### Emacs template 64 | # -*- mode: gitignore; -*- 65 | \#*\# 66 | /.emacs.desktop 67 | /.emacs.desktop.lock 68 | *.elc 69 | auto-save-list 70 | tramp 71 | .\#* 72 | 73 | # Org-mode 74 | .org-id-locations 75 | *_archive 76 | 77 | # flymake-mode 78 | *_flymake.* 79 | 80 | # eshell files 81 | /eshell/history 82 | /eshell/lastdir 83 | 84 | # elpa packages 85 | /elpa/ 86 | 87 | # reftex files 88 | *.rel 89 | 90 | # AUCTeX auto folder 91 | /auto/ 92 | 93 | # cask packages 94 | .cask/ 95 | dist/ 96 | 97 | # Flycheck 98 | flycheck_*.el 99 | 100 | # server auth directory 101 | /server/ 102 | 103 | # projectiles files 104 | .projectile 105 | 106 | # directory configuration 107 | .dir-locals.el 108 | ### Eclipse template 109 | 110 | .metadata 111 | bin/ 112 | tmp/ 113 | *.tmp 114 | *.swp 115 | *~.nib 116 | local.properties 117 | .settings/ 118 | .loadpath 119 | .recommenders 120 | 121 | # External tool builders 122 | .externalToolBuilders/ 123 | 124 | # Locally stored "Eclipse launch configurations" 125 | *.launch 126 | 127 | # PyDev specific (Python IDE for Eclipse) 128 | *.pydevproject 129 | 130 | # CDT-specific (C/C++ Development Tooling) 131 | .cproject 132 | 133 | # CDT- autotools 134 | .autotools 135 | 136 | # Java annotation processor (APT) 137 | .factorypath 138 | 139 | # PDT-specific (PHP Development Tools) 140 | .buildpath 141 | 142 | # sbteclipse plugin 143 | .target 144 | 145 | # Tern plugin 146 | .tern-project 147 | 148 | # TeXlipse plugin 149 | .texlipse 150 | 151 | # STS (Spring Tool Suite) 152 | .springBeans 153 | 154 | # Code Recommenders 155 | .recommenders/ 156 | 157 | # Annotation Processing 158 | .apt_generated/ 159 | 160 | # Scala IDE specific (Scala & Java development for Eclipse) 161 | .cache-main 162 | .scala_dependencies 163 | .worksheet 164 | ### Windows template 165 | # Windows thumbnail cache files 166 | Thumbs.db 167 | ehthumbs.db 168 | ehthumbs_vista.db 169 | 170 | # Dump file 171 | *.stackdump 172 | 173 | # Folder config file 174 | [Dd]esktop.ini 175 | 176 | # Recycle Bin used on file shares 177 | $RECYCLE.BIN/ 178 | 179 | # Windows Installer files 180 | *.cab 181 | *.msi 182 | *.msix 183 | *.msm 184 | *.msp 185 | 186 | # Windows shortcuts 187 | *.lnk 188 | ### JetBrains template 189 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm 190 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 191 | 192 | # User-specific stuff 193 | .idea/ 194 | 195 | # CMake 196 | cmake-build-*/ 197 | 198 | # File-based project format 199 | *.iws 200 | 201 | # IntelliJ 202 | out/ 203 | 204 | # mpeltonen/sbt-idea plugin 205 | .idea_modules/ 206 | 207 | # JIRA plugin 208 | atlassian-ide-plugin.xml 209 | 210 | # Cursive Clojure plugin 211 | .idea/replstate.xml 212 | 213 | # Crashlytics plugin (for Android Studio and IntelliJ) 214 | com_crashlytics_export_strings.xml 215 | crashlytics.properties 216 | crashlytics-build.properties 217 | fabric.properties 218 | 219 | # Editor-based Rest Client 220 | .idea/httpRequests 221 | ### GitBook template 222 | # Node rules: 223 | ## Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 224 | .grunt 225 | 226 | ## Dependency directory 227 | ## Commenting this out is preferred by some people, see 228 | ## https://docs.npmjs.com/misc/faq#should-i-check-my-node_modules-folder-into-git 229 | node_modules 230 | 231 | # Book build output 232 | _book 233 | 234 | # eBook build output 235 | *.epub 236 | *.mobi 237 | *.pdf 238 | ### NetBeans template 239 | nbproject/private/ 240 | build/ 241 | nbbuild/ 242 | nbdist/ 243 | .nb-gradle/ 244 | ### Linux template 245 | 246 | # temporary files which can be created if a process still has a handle open of a deleted file 247 | .fuse_hidden* 248 | 249 | # KDE directory preferences 250 | .directory 251 | 252 | # Linux trash folder which might appear on any partition or disk 253 | .Trash-* 254 | 255 | # .nfs files are created when an open file is removed but is still being accessed 256 | .nfs* 257 | ### VisualStudioCode template 258 | .vscode/* 259 | !.vscode/settings.json 260 | !.vscode/tasks.json 261 | !.vscode/launch.json 262 | !.vscode/extensions.json 263 | !/sbt-dist/bin/ 264 | 265 | hk-docs/docs/.vuepress/dist/ 266 | 267 | /.scannerwork/ 268 | *.log 269 | /logs/ 270 | .metals/ 271 | .bloop/ 272 | .vscode/ 273 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = 2.2.2 2 | style = defaultWithAlign 3 | lineEndings = unix 4 | encoding = "UTF-8" 5 | project.git = true 6 | docstrings = JavaDoc 7 | maxColumn = 120 8 | indentOperator = spray 9 | unindentTopLevelOperators = true 10 | align.tokens = [{code = "=>", owner = "Case"}] 11 | align.openParenDefnSite = false 12 | align.openParenCallSite = false 13 | optIn.breakChainOnFirstMethodDot = false 14 | optIn.configStyleArguments = false 15 | danglingParentheses = false 16 | spaces.inImportCurlyBraces = true 17 | rewrite.neverInfix.excludeFilters = [ 18 | and 19 | min 20 | max 21 | until 22 | to 23 | by 24 | eq 25 | ne 26 | "should.*" 27 | "contain.*" 28 | "must.*" 29 | in 30 | ignore 31 | be 32 | taggedAs 33 | thrownBy 34 | synchronized 35 | have 36 | when 37 | size 38 | only 39 | noneOf 40 | oneElementOf 41 | noElementsOf 42 | atLeastOneElementOf 43 | atMostOneElementOf 44 | allElementsOf 45 | inOrderElementsOf 46 | theSameElementsAs 47 | ] 48 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Akka Fusion Samples 2 | 3 | [**akka-fusion**](https://github.com/ihongka/akka-fusion) 4 | 5 | ## Samples 6 | 7 | - [sample-http-server](sample-http-server):HTTP服务 8 | - [sample-http-client](sample-http-client):HTTP客户端 9 | - [sample-scheduler-job](sample-scheduler-job):分布式调用作业 10 | - [sample-jdbc](sample-jdbc):JDBC数据库访问 11 | - [sample-slick](sample-slick):Slick数据库访问,Slick Codegen 12 | - [sample-discovery](sample-discovery):Akka Discovery 13 | - [sample-http-gateway](sample-http-gateway):HTTP反向代理网关,集成服务发现功能 14 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | import Commons._ 2 | import Dependencies._ 3 | import Publishing._ 4 | 5 | ThisBuild / scalaVersion := Dependencies.versionScala 6 | 7 | ThisBuild / scalafmtOnCompile := true 8 | 9 | ThisBuild / resolvers += "Bintray akka-fusion".at("https://akka-fusion.bintray.com/maven") 10 | 11 | lazy val root = 12 | Project("akka-fusion-samples", file(".")) 13 | .aggregate( 14 | `sample-docs`, 15 | `sample-http-gateway`, 16 | `sample-discovery`, 17 | `sample-kafka`, 18 | `sample-mongodb`, 19 | `sample-elasticsearch`, 20 | `sample-slick`, 21 | `sample-jdbc`, 22 | `sample-scheduler-job`, 23 | `sample-grpc`, 24 | `sample-log`, 25 | `sample-http-client`, 26 | `sample-http-server`, 27 | `sample-common`) 28 | .settings(noPublish: _*) 29 | 30 | lazy val `sample-docs` = _project("sample-docs") 31 | .dependsOn( 32 | `sample-http-gateway`, 33 | `sample-discovery`, 34 | `sample-kafka`, 35 | `sample-mongodb`, 36 | `sample-elasticsearch`, 37 | `sample-slick`, 38 | `sample-jdbc`, 39 | `sample-scheduler-job`, 40 | `sample-grpc`, 41 | `sample-log`, 42 | `sample-http-client`, 43 | `sample-http-server`, 44 | `sample-common`) 45 | .enablePlugins(ParadoxMaterialThemePlugin) 46 | .settings(noPublish: _*) 47 | .settings( 48 | Compile / paradoxMaterialTheme ~= { 49 | _.withLanguage(java.util.Locale.SIMPLIFIED_CHINESE) 50 | .withColor("indigo", "red") 51 | .withRepository(uri("https://github.com/ihongka/akka-fusion")) 52 | .withSocial( 53 | uri("http://ihongka.github.io/akka-fusion/"), 54 | uri("https://github.com/ihongka"), 55 | uri("https://weibo.com/yangbajing")) 56 | }, 57 | paradoxProperties ++= Map( 58 | "github.base_url" -> s"https://github.com/ihongka/akka-fusion-samples/tree/${version.value}", 59 | "version" -> version.value, 60 | "scala.version" -> scalaVersion.value, 61 | "scala.binary_version" -> scalaBinaryVersion.value, 62 | "scaladoc.akka.base_url" -> s"http://doc.akka.io/api/$versionAkka", 63 | "akka.version" -> versionAkka)) 64 | 65 | lazy val `sample-http-gateway` = _project("sample-http-gateway") 66 | .enablePlugins(JavaAgent) 67 | .dependsOn(`sample-common`) 68 | .settings( 69 | javaAgents += "org.mortbay.jetty.alpn" % "jetty-alpn-agent" % "2.0.9" % "runtime;test", 70 | assemblyJarName in assembly := "sample-http-gateway.jar", 71 | mainClass in assembly := Some("sample.http.gateway.SampleHttpGatewayApplication"), 72 | libraryDependencies ++= Seq(_fusionHttp, _fusionHttpGateway)) 73 | 74 | lazy val `sample-mongodb` = 75 | _project("sample-mongodb").dependsOn(`sample-common`).settings(libraryDependencies += _fusionMongodb) 76 | 77 | lazy val `sample-kafka` = 78 | _project("sample-kafka").dependsOn(`sample-common`).settings(libraryDependencies += _fusionKafka) 79 | 80 | lazy val `sample-elasticsearch` = 81 | _project("sample-elasticsearch").dependsOn(`sample-common`).settings(libraryDependencies += _fusionElasticsearch) 82 | 83 | lazy val `sample-discovery` = 84 | _project("sample-discovery") 85 | .dependsOn(`sample-common`) 86 | .settings(libraryDependencies ++= Seq(_fusionDiscoveryClient, _akkaHttpTestkit % Test)) 87 | 88 | lazy val `sample-slick` = 89 | _project("sample-slick") 90 | .dependsOn(`sample-jdbc`, `sample-common`) 91 | .settings(libraryDependencies ++= Seq(_postgresql, _fusionSlick)) 92 | 93 | lazy val `sample-jdbc` = 94 | _project("sample-jdbc").dependsOn(`sample-common`).settings(libraryDependencies ++= Seq(_fusionJdbc, _h2Database)) 95 | 96 | lazy val `sample-scheduler-job` = _project("sample-scheduler-job") 97 | .dependsOn(`sample-common`) 98 | .enablePlugins(AkkaGrpcPlugin, JavaAgent) 99 | .settings( 100 | PB.protocVersion := "-v371", 101 | javaAgents += "org.mortbay.jetty.alpn" % "jetty-alpn-agent" % "2.0.9" % "runtime;test", 102 | assemblyJarName in assembly := "sample-scheduler.jar", 103 | mainClass in assembly := Some("sample.scheduler.SampleSchedulerJobApplication"), 104 | libraryDependencies ++= Seq( 105 | "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf", 106 | _postgresql, 107 | _fusionHttp, 108 | _fusionJob, 109 | _akkaHttpTestkit % Test) ++ _akkaClusters) 110 | 111 | lazy val `sample-grpc` = _project("sample-grpc") 112 | .dependsOn(`sample-common`) 113 | .enablePlugins(AkkaGrpcPlugin, JavaAgent) 114 | .settings( 115 | PB.protocVersion := "-v371", 116 | akkaGrpcCodeGeneratorSettings += "server_power_apis", 117 | javaAgents += "org.mortbay.jetty.alpn" % "jetty-alpn-agent" % "2.0.9" % "runtime;test", 118 | assemblyJarName in assembly := "sample-grpc.jar", 119 | mainClass in assembly := Some("sample.grpc.GrpcApplication"), 120 | libraryDependencies ++= Seq( 121 | "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf", 122 | _fusionHttp, 123 | _akkaDiscovery)) 124 | 125 | lazy val `sample-log` = 126 | _project("sample-log").dependsOn(`sample-common`).settings(libraryDependencies ++= Seq(_fusionLog, _fusionHttp)) 127 | 128 | lazy val `sample-http-client` = 129 | _project("sample-http-client") 130 | .dependsOn(`sample-common`) 131 | .settings(libraryDependencies ++= Seq(_fusionHttpClient, _fusionHttp)) 132 | 133 | lazy val `sample-http-server` = _project("sample-http-server") 134 | .enablePlugins(JavaAgent) 135 | .dependsOn(`sample-common`) 136 | .settings( 137 | javaAgents += "org.mortbay.jetty.alpn" % "jetty-alpn-agent" % "2.0.9" % "runtime;test", 138 | assemblyJarName in assembly := "sample-http-server.jar", 139 | mainClass in assembly := Some("sample.http.server.SampleHttpServerApplication"), 140 | libraryDependencies ++= Seq(_fusionHttp)) 141 | 142 | lazy val `sample-common` = 143 | _project("sample-common") 144 | .enablePlugins(AkkaGrpcPlugin) 145 | .settings(publishing: _*) 146 | .settings( 147 | PB.protocVersion := "-v371", 148 | libraryDependencies ++= Seq( 149 | "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf", 150 | _fusionSecurity, 151 | _fusionJson, 152 | _fusionCommon)) 153 | 154 | def _project(name: String, _base: String = null) = 155 | Project(id = name, base = file(if (_base eq null) name else _base)) 156 | .settings(basicSettings: _*) 157 | .settings(noPublish: _*) 158 | .settings(libraryDependencies ++= Seq(_fusionTest % Test)) 159 | -------------------------------------------------------------------------------- /project/Commons.scala: -------------------------------------------------------------------------------- 1 | import sbt.Keys._ 2 | import sbt._ 3 | import sbtassembly.MergeStrategy 4 | import sbtprotoc.ProtocPlugin.autoImport.PB 5 | 6 | object Commons { 7 | 8 | // import Environment.{buildEnv, BuildEnv} 9 | import sbtassembly.AssemblyKeys.assembly 10 | import sbtassembly.AssemblyKeys.assemblyMergeStrategy 11 | import sbtassembly.MergeStrategy 12 | import sbtassembly.PathList 13 | 14 | def basicSettings = 15 | Seq( 16 | organization := "helloscala", 17 | organizationName := "helloscala fusion", 18 | organizationHomepage := Some(url("https://akka-fusion.helloscala.com/")), 19 | homepage := Some(url("https://helloscala.com/")), 20 | startYear := Some(2019), 21 | scalacOptions ++= { 22 | var list = Seq( 23 | "-encoding", 24 | "UTF-8", // yes, this is 2 args 25 | "-feature", 26 | "-deprecation", 27 | "-unchecked", 28 | "-Xlint", 29 | "-Ywarn-dead-code") 30 | if (scalaVersion.value.startsWith("2.12")) { 31 | list ++= Seq("-opt:l:inline", "-opt-inline-from") 32 | } 33 | // if (buildEnv.value != BuildEnv.Developement) { 34 | // list ++= Seq("-Xelide-below", "2001") 35 | // } 36 | list 37 | }, 38 | javacOptions in Compile ++= Seq("-Xlint:unchecked", "-Xlint:deprecation"), 39 | javaOptions in run ++= Seq("-Xms128m", "-Xmx1024m", "-Djava.library.path=./target/native"), 40 | shellPrompt := { s => 41 | Project.extract(s).currentProject.id + " > " 42 | }, 43 | test in assembly := {}, 44 | assemblyMergeStrategy in assembly := { 45 | case PathList("javax", "servlet", xs @ _*) => MergeStrategy.first 46 | case PathList("io", "netty", xs @ _*) => MergeStrategy.first 47 | case PathList("jnr", xs @ _*) => MergeStrategy.first 48 | case PathList("com", "datastax", xs @ _*) => MergeStrategy.first 49 | case PathList("com", "kenai", xs @ _*) => MergeStrategy.first 50 | case PathList("org", "objectweb", xs @ _*) => MergeStrategy.first 51 | case PathList("com", "google", "protobuf", xs @ _*) => MergeStrategy.first 52 | case PathList("scalapb", "options", xs @ _*) => MergeStrategy.first 53 | case PathList(ps @ _*) if ps.last.endsWith(".html") => MergeStrategy.first 54 | case "application.conf" => MergeStrategy.concat 55 | case "module-info.class" => MergeStrategy.concat 56 | case "META-INF/io.netty.versions.properties" => MergeStrategy.first 57 | case PathList("org", "slf4j", xs @ _*) => MergeStrategy.first 58 | case "META-INF/native/libnetty-transport-native-epoll.so" => MergeStrategy.first 59 | case x => 60 | val oldStrategy = (assemblyMergeStrategy in assembly).value 61 | oldStrategy(x) 62 | }, 63 | fork in run := true, 64 | fork in Test := true, 65 | parallelExecution in Test := false) // ++ Environment.settings 66 | } 67 | 68 | object Publishing { 69 | 70 | lazy val publishing = Seq( 71 | publishTo := (if (version.value.endsWith("-SNAPSHOT")) { 72 | Some("Helloscala_sbt-public_snapshot".at( 73 | "https://artifactory.hongkazhijia.com/artifactory/sbt-release;build.timestamp=" + new java.util.Date().getTime)) 74 | } else { 75 | Some( 76 | "Helloscala_sbt-public_release".at( 77 | "https://artifactory.hongkazhijia.com/artifactory/libs-release")) 78 | }), 79 | credentials += Credentials(Path.userHome / ".ivy2" / ".credentials_ihongka")) 80 | 81 | lazy val noPublish = 82 | Seq(publish := ((): Unit), publishLocal := ((): Unit), publishTo := None) 83 | } 84 | 85 | object Environment { 86 | 87 | object BuildEnv extends Enumeration { 88 | val Production, Stage, Test, Developement = Value 89 | } 90 | 91 | lazy val buildEnv = settingKey[BuildEnv.Value]("The current build environment") 92 | 93 | lazy val settings = Seq(onLoadMessage := { 94 | // old message as well 95 | val defaultMessage = onLoadMessage.value 96 | val env = buildEnv.value 97 | s"""|$defaultMessage 98 | |Working in build environment: $env""".stripMargin 99 | }) 100 | 101 | } 102 | 103 | object Packaging { 104 | // Good example https://github.com/typesafehub/activator/blob/master/project/Packaging.scala 105 | import Environment.buildEnv 106 | import Environment.BuildEnv 107 | import com.typesafe.sbt.SbtNativePackager._ 108 | import com.typesafe.sbt.packager.Keys._ 109 | 110 | // This is dirty, but play has stolen our keys, and we must mimc them here. 111 | val stage = TaskKey[File]("stage") 112 | val dist = TaskKey[File]("dist") 113 | 114 | val settings = Seq( 115 | name in Universal := s"${name.value}", 116 | dist := (packageBin in Universal).value, 117 | mappings in Universal += { 118 | val confFile = buildEnv.value match { 119 | case BuildEnv.Developement => "dev.conf" 120 | case BuildEnv.Test => "test.conf" 121 | case BuildEnv.Stage => "stage.conf" 122 | case BuildEnv.Production => "prod.conf" 123 | } 124 | (sourceDirectory(_ / "universal" / "conf").value / confFile) -> "conf/application.conf" 125 | }, 126 | bashScriptExtraDefines ++= Seq( 127 | """addJava "-Dconfig.file=${app_home}/../conf/application.conf"""", 128 | """addJava "-Dpidfile.path=${app_home}/../run/%s.pid"""".format(name.value), 129 | """addJava "-Dlogback.configurationFile=${app_home}/../conf/logback.xml""""), 130 | bashScriptConfigLocation := Some("${app_home}/../conf/jvmopts"), 131 | scriptClasspath := Seq("*"), 132 | mappings in (Compile, packageDoc) := Seq()) 133 | 134 | // Create a new MergeStrategy for aop.xml files 135 | val aopMerge: MergeStrategy = new MergeStrategy { 136 | val name = "aopMerge" 137 | import scala.xml._ 138 | import scala.xml.dtd._ 139 | 140 | def apply(tempDir: File, path: String, files: Seq[File]): Either[String, Seq[(File, String)]] = { 141 | val dt = 142 | DocType("aspectj", PublicID("-//AspectJ//DTD//EN", "http://www.eclipse.org/aspectj/dtd/aspectj.dtd"), Nil) 143 | val file = MergeStrategy.createMergeTarget(tempDir, path) 144 | val xmls: Seq[Elem] = files.map(XML.loadFile) 145 | val aspectsChildren: Seq[Node] = xmls.flatMap(_ \\ "aspectj" \ "aspects" \ "_") 146 | val weaverChildren: Seq[Node] = xmls.flatMap(_ \\ "aspectj" \ "weaver" \ "_") 147 | val options: String = xmls.map(x => (x \\ "aspectj" \ "weaver" \ "@options").text).mkString(" ").trim 148 | val weaverAttr = if (options.isEmpty) Null else new UnprefixedAttribute("options", options, Null) 149 | val aspects = new Elem(null, "aspects", Null, TopScope, false, aspectsChildren: _*) 150 | val weaver = new Elem(null, "weaver", weaverAttr, TopScope, false, weaverChildren: _*) 151 | val aspectj = new Elem(null, "aspectj", Null, TopScope, false, aspects, weaver) 152 | XML.save(file.toString, aspectj, "UTF-8", xmlDecl = false, dt) 153 | IO.append(file, IO.Newline.getBytes(IO.defaultCharset)) 154 | Right(Seq(file -> path)) 155 | } 156 | } 157 | 158 | } 159 | 160 | object Protobufs { 161 | def protocVersion = PB.protocVersion := "-v371" 162 | 163 | def protobufSettings = 164 | Seq( 165 | protocVersion, 166 | PB.targets in Compile := Seq(scalapb.gen(flatPackage = true) -> (sourceManaged in Compile).value), 167 | libraryDependencies ++= Seq( 168 | "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf")) 169 | } 170 | -------------------------------------------------------------------------------- /project/Dependencies.scala: -------------------------------------------------------------------------------- 1 | import sbt._ 2 | 3 | object Dependencies { 4 | val versionScala = "2.13.1" 5 | val versionScalaLib = "2.13" 6 | val versionAkka = "2.6.1" 7 | val versionAkkaFusion = "2.0.1" 8 | val versionAkkaHttp = "10.1.11" 9 | val versionJwtCore = "2.1.0" 10 | val versionMySQL = "8.0.16" 11 | val versionPostgres = "42.2.9" 12 | val versionCaffeine = "2.7.0" 13 | val versionSlickPg = "0.18.0" 14 | val versionJettyAlpnAgent = "2.0.9" 15 | 16 | val _akkaHttpTestkit = ("com.typesafe.akka" %% "akka-http-testkit" % versionAkkaHttp) 17 | .excludeAll(ExclusionRule("com.typesafe.akka")) 18 | .cross(CrossVersion.binary) 19 | val _fusionHttp = "com.akka-fusion" %% "fusion-http" % versionAkkaFusion 20 | val _fusionHttpClient = "com.akka-fusion" %% "fusion-http-client" % versionAkkaFusion 21 | val _fusionHttpGateway = "com.akka-fusion" %% "fusion-http-gateway" % versionAkkaFusion 22 | val _fusionActuator = "com.akka-fusion" %% "fusion-actuator" % versionAkkaFusion 23 | val _fusionDiscoveryClient = "com.akka-fusion" %% "fusion-discovery-client" % versionAkkaFusion 24 | val _fusionSlick = "com.akka-fusion" %% "fusion-slick" % versionAkkaFusion 25 | val _fusionMongodb = "com.akka-fusion" %% "fusion-mongodb" % versionAkkaFusion 26 | val _fusionElasticsearch = "com.akka-fusion" %% "fusion-elasticsearch" % versionAkkaFusion 27 | val _fusionCassandra = "com.akka-fusion" %% "fusion-cassandra" % versionAkkaFusion 28 | val _fusionKafka = "com.akka-fusion" %% "fusion-kafka" % versionAkkaFusion 29 | val _fusionJdbc = "com.akka-fusion" %% "fusion-jdbc" % versionAkkaFusion 30 | val _fusionMail = "com.akka-fusion" %% "fusion-mail" % versionAkkaFusion 31 | val _fusionJob = "com.akka-fusion" %% "fusion-job" % versionAkkaFusion 32 | val _fusionJson = "com.akka-fusion" %% "fusion-json" % versionAkkaFusion 33 | val _fusionLog = "com.akka-fusion" %% "fusion-log" % versionAkkaFusion 34 | val _fusionCore = "com.akka-fusion" %% "fusion-core" % versionAkkaFusion 35 | val _fusionCommon = "com.akka-fusion" %% "fusion-common" % versionAkkaFusion 36 | val _fusionSecurity = "com.akka-fusion" %% "fusion-security" % versionAkkaFusion 37 | val _fusionTest = "com.akka-fusion" %% "fusion-testkit" % versionAkkaFusion 38 | val _jwtCore = "com.pauldijou" %% "jwt-core" % versionJwtCore 39 | val _postgresql = "org.postgresql" % "postgresql" % versionPostgres 40 | val _h2Database = "com.h2database" % "h2" % "1.4.199" 41 | val _akkaDiscovery = "com.typesafe.akka" %% "akka-discovery" % versionAkka 42 | 43 | val _akkaClusters = 44 | Seq( 45 | "com.typesafe.akka" %% "akka-cluster-typed" % versionAkka, 46 | "com.typesafe.akka" %% "akka-cluster-tools" % versionAkka, 47 | _akkaDiscovery) 48 | } 49 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.3.5 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | logLevel := Level.Info 2 | 3 | resolvers += Resolver.bintrayIvyRepo("2m", "sbt-plugins") 4 | 5 | addSbtPlugin("com.github.mwz" % "sbt-sonar" % "1.6.0") 6 | addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.2.0") 7 | addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.4.0") 8 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9") 9 | addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.0") 10 | addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.5.2") 11 | addSbtPlugin("io.github.jonas" % "sbt-paradox-material-theme" % "0.6.0") 12 | addSbtPlugin("com.lightbend.akka" % "sbt-paradox-akka" % "0.27") 13 | addSbtPlugin("com.dwijnand" % "sbt-dynver" % "4.0.0") 14 | addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0") 15 | addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0") 16 | addSbtPlugin("com.lightbend.sbt" % "sbt-javaagent" % "0.1.5") 17 | addSbtPlugin("com.lightbend.akka.grpc" % "sbt-akka-grpc" % "0.7.3") 18 | addSbtPlugin("com.lightbend.sbt" % "sbt-java-formatter" % "0.4.4") 19 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.2.1") 20 | addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.7-1") 21 | addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.5") 22 | //addSbtPlugin("io.kamon" % "sbt-kanela-runner" % "2.0.2") 23 | -------------------------------------------------------------------------------- /sample-common/src/main/protobuf/sample/define.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package sample; 4 | 5 | message ResultBO { 6 | int32 status = 1; 7 | string msg = 2; 8 | map data = 3; 9 | } 10 | -------------------------------------------------------------------------------- /sample-common/src/main/resources/sample-common.conf: -------------------------------------------------------------------------------- 1 | akka { 2 | loglevel = INFO 3 | stdout-loglevel = INFO 4 | loggers = ["akka.event.slf4j.Slf4jLogger"] 5 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter" 6 | 7 | actor { 8 | enable-additional-serialization-bindings = on 9 | } 10 | } 11 | akka.http { 12 | host-connection-pool { 13 | idle-timeout = 90.seconds 14 | max-connections = 32 15 | max-open-requests = 128 16 | max-retries = 0 17 | } 18 | server { 19 | preview.enable-http2 = on 20 | idle-timeout = 90.seconds 21 | request-timeout = 20.seconds 22 | } 23 | client { 24 | connecting-timeout = 10.seconds 25 | log-unencrypted-network-bytes = 1000 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /sample-discovery/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | fusion.discovery { 3 | enable = true 4 | nacos { 5 | #serverAddr = 6 | #namespace = 7 | #dataId = 8 | group = "DEFAULT_GROUP" 9 | timeoutMs = 3000 10 | #serviceName = 11 | #autoRegisterInstance = true 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /sample-discovery/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-discovery/src/test/scala/sample/discovery/SampleDiscoveryApplicationTest.scala: -------------------------------------------------------------------------------- 1 | package sample.discovery 2 | 3 | import akka.actor.typed.scaladsl.adapter._ 4 | import akka.http.scaladsl.testkit.ScalatestRouteTest 5 | import fusion.core.extension.FusionCore 6 | import fusion.test.FusionTestWordSpec 7 | import org.scalatest.Matchers 8 | 9 | /** 10 | * https://nacos.io/zh-cn/docs/quick-start.html 下载并安装 Nacos 11 | */ 12 | class SampleDiscoveryApplicationTest extends FusionTestWordSpec with ScalatestRouteTest with Matchers { 13 | private val serverAddr = "" 14 | private val namespace = "" 15 | private val dataId = "" 16 | private val serviceName = "" 17 | 18 | "SampleDiscoveryApplicationTest" should { 19 | "serviceName" in { 20 | val configuration = FusionCore(system.toTyped).configuration 21 | configuration.getBoolean("fusion.discovery.nacos.serviceName") shouldBe serviceName 22 | } 23 | } 24 | 25 | override def testConfigSource: String = s"""fusion.discovery { 26 | | enable = true 27 | | nacos { 28 | | serverAddr = $serverAddr 29 | | namespace = $namespace 30 | | dataId = $dataId 31 | | group = "DEFAULT_GROUP" 32 | | timeoutMs = 3000 33 | | serviceName = $serviceName 34 | | autoRegisterInstance = true 35 | | } 36 | |}""".stripMargin 37 | } 38 | -------------------------------------------------------------------------------- /sample-elasticsearch/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | es01: 4 | image: docker.elastic.co/elasticsearch/elasticsearch:7.3.2 5 | container_name: es01 6 | environment: 7 | - node.name=es01 8 | - discovery.seed_hosts=es02 9 | - cluster.name=docker-cluster 10 | - bootstrap.memory_lock=true 11 | - "ES_JAVA_OPTS=-Xms512m -Xmx512m" 12 | ulimits: 13 | memlock: 14 | soft: -1 15 | hard: -1 16 | nofile: 17 | soft: 65536 18 | hard: 65536 19 | volumes: 20 | - esdata01:/usr/share/elasticsearch/data 21 | ports: 22 | - 9200:9200 23 | networks: 24 | - esnet 25 | kibana: 26 | image: docker.elastic.co/kibana/kibana:7.3.2 27 | volumes: 28 | - ./kibana.yml:/usr/share/kibana/config/kibana.yml 29 | ports: 30 | - 5601:5601 31 | networks: 32 | - esnet 33 | 34 | 35 | volumes: 36 | esdata01: 37 | driver: local 38 | 39 | networks: 40 | esnet: 41 | 42 | 43 | -------------------------------------------------------------------------------- /sample-elasticsearch/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | -------------------------------------------------------------------------------- /sample-elasticsearch/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-grpc/src/main/protobuf/sample/model.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package sample; 3 | 4 | message HelloDTO { 5 | string name = 1; 6 | } 7 | 8 | message HelloBO { 9 | string name = 1; 10 | string result = 2; 11 | } 12 | -------------------------------------------------------------------------------- /sample-grpc/src/main/protobuf/sample/service.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package sample; 3 | 4 | import "sample/model.proto"; 5 | 6 | service HelloService { 7 | rpc SayHello (HelloDTO) returns (HelloBO) { 8 | } 9 | 10 | rpc KeepsHello (stream HelloDTO) returns (HelloBO) { 11 | } 12 | 13 | rpc KeepsResult (HelloDTO) returns (stream HelloBO) { 14 | } 15 | 16 | rpc StreamHellos (stream HelloDTO) returns (stream HelloBO) { 17 | } 18 | } -------------------------------------------------------------------------------- /sample-grpc/src/main/scala/sample/grpc/Routes.scala: -------------------------------------------------------------------------------- 1 | package sample.grpc 2 | 3 | import akka.actor.typed.ActorSystem 4 | import akka.http.scaladsl.server.Route 5 | import com.typesafe.scalalogging.StrictLogging 6 | import fusion.http.server.AbstractRoute 7 | import fusion.http.server.GrpcUtils 8 | 9 | class Routes()(implicit system: ActorSystem[_]) extends AbstractRoute with StrictLogging { 10 | private val grpcHandlers = GrpcUtils.contactToRoute(SampleGrpcAggregate(system).grpcHandlers: _*) 11 | 12 | override def route: Route = grpcRoute 13 | 14 | def grpcRoute: Route = extractRequest { request => 15 | grpcHandlers(request) 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /sample-grpc/src/main/scala/sample/grpc/SampleGrpcAggregate.scala: -------------------------------------------------------------------------------- 1 | package sample.grpc 2 | 3 | import akka.actor.typed.ActorSystem 4 | import akka.stream.Materializer 5 | import fusion.common.extension.{ FusionExtension, FusionExtensionId } 6 | import sample.HelloServicePowerApi 7 | import sample.HelloServicePowerApiHandler 8 | import sample.grpc.service.HelloServiceImpl 9 | 10 | class SampleGrpcAggregate(override val system: ActorSystem[_]) extends FusionExtension { 11 | implicit val mat: Materializer = Materializer(system) 12 | implicit val st = classicSystem 13 | 14 | val helloService: HelloServicePowerApi = new HelloServiceImpl()(system, mat) 15 | 16 | val grpcHandlers = List(HelloServicePowerApiHandler.partial(helloService)) 17 | } 18 | 19 | object SampleGrpcAggregate extends FusionExtensionId[SampleGrpcAggregate] { 20 | override def createExtension(system: ActorSystem[_]): SampleGrpcAggregate = new SampleGrpcAggregate(system) 21 | } 22 | -------------------------------------------------------------------------------- /sample-grpc/src/main/scala/sample/grpc/SampleGrpcApplication.scala: -------------------------------------------------------------------------------- 1 | package sample.grpc 2 | 3 | import com.typesafe.config.ConfigFactory 4 | import fusion.core.util.FusionUtils 5 | import fusion.http.FusionHttpServer 6 | 7 | object SampleGrpcApplication { 8 | def main(args: Array[String]): Unit = { 9 | implicit val system = FusionUtils.createActorSystem(ConfigFactory.load()) 10 | FusionHttpServer(system).component.startAbstractRouteSync(new Routes()) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /sample-grpc/src/main/scala/sample/grpc/service/HelloServiceImpl.scala: -------------------------------------------------------------------------------- 1 | package sample.grpc.service 2 | 3 | import java.time.OffsetDateTime 4 | 5 | import akka.NotUsed 6 | import akka.actor.typed.ActorSystem 7 | import akka.grpc.scaladsl.Metadata 8 | import akka.stream.Materializer 9 | import akka.stream.scaladsl.Sink 10 | import akka.stream.scaladsl.Source 11 | import sample.HelloBO 12 | import sample.HelloDTO 13 | import sample.HelloServicePowerApi 14 | 15 | import scala.concurrent.Future 16 | 17 | class HelloServiceImpl()(implicit system: ActorSystem[_], mat: Materializer) extends HelloServicePowerApi { 18 | import system.executionContext 19 | 20 | override def sayHello(in: HelloDTO, metadata: Metadata): Future[HelloBO] = Future { 21 | HelloBO(in.name, "result") 22 | } 23 | 24 | override def keepsHello(in: Source[HelloDTO, NotUsed], metadata: Metadata): Future[HelloBO] = { 25 | in.runWith(Sink.seq).map(hellos => HelloBO(s"Hello, ${hellos.map(_.name).mkString(", ")}", "result")) 26 | } 27 | 28 | override def keepsResult(in: HelloDTO, metadata: Metadata): Source[HelloBO, NotUsed] = { 29 | Source(s"Hello, ${in.name}".toList).map(c => HelloBO(in.name, c.toString)) 30 | } 31 | 32 | override def streamHellos(in: Source[HelloDTO, NotUsed], metadata: Metadata): Source[HelloBO, NotUsed] = { 33 | in.map(hello => HelloBO(hello.name, OffsetDateTime.now().toString)) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /sample-grpc/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | akka.grpc.client { 3 | "sample.HelloService" { 4 | service-discovery { 5 | mechanism = "static" 6 | } 7 | host = ${fusion.http.default.server.host} 8 | port = ${fusion.http.default.server.port} 9 | use-tls = false 10 | } 11 | } -------------------------------------------------------------------------------- /sample-grpc/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-grpc/src/test/scala/sample/grpc/SampleGrpcApplicationTest.scala: -------------------------------------------------------------------------------- 1 | package sample.grpc 2 | 3 | import akka.actor.testkit.typed.scaladsl.ScalaTestWithActorTestKit 4 | import akka.grpc.GrpcClientSettings 5 | import akka.stream.Materializer 6 | import fusion.core.extension.FusionCore 7 | import fusion.http.FusionHttpServer 8 | import fusion.test.FusionTestWordSpec 9 | import sample.HelloDTO 10 | import sample.HelloService 11 | import sample.HelloServiceClient 12 | 13 | class SampleGrpcApplicationTest extends ScalaTestWithActorTestKit with FusionTestWordSpec { 14 | implicit private val ec = system.executionContext 15 | implicit private val mat: Materializer = Materializer(system) 16 | 17 | "SampleGrpcApplication" should { 18 | "sayHello" in { 19 | val dto = HelloDTO("Akka Fusion") 20 | val helloBO = HelloServiceClient(grpcClientSettings).sayHello(dto).futureValue 21 | helloBO.name shouldBe dto.name 22 | helloBO.result should not be empty 23 | } 24 | } 25 | 26 | private def grpcClientSettings = { 27 | GrpcClientSettings.fromConfig(HelloService.name)(FusionCore(system).classicSystem) 28 | } 29 | 30 | override protected def beforeAll(): Unit = { 31 | super.beforeAll() 32 | FusionHttpServer(system).component.startAbstractRouteSync(new Routes()) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /sample-http-client/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | fusion.http.default { 3 | server { 4 | //port = 8000 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /sample-http-client/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-http-client/src/test/scala/sample/http/client/SampleHttpClientTest.scala: -------------------------------------------------------------------------------- 1 | package sample.http.client 2 | 3 | import akka.actor.testkit.typed.scaladsl.ScalaTestWithActorTestKit 4 | import akka.actor.typed.scaladsl.adapter._ 5 | import akka.http.scaladsl.model.HttpMethods 6 | import akka.http.scaladsl.model.StatusCodes 7 | import akka.http.scaladsl.unmarshalling.Unmarshal 8 | import akka.stream.Materializer 9 | import fusion.http.FusionHttpServer 10 | import fusion.http.client.DefaultHttpClient 11 | import fusion.json.jackson.Jackson 12 | import fusion.test.FusionTestWordSpec 13 | import org.json4s.JsonAST.JObject 14 | import org.json4s.JsonAST.JString 15 | import org.json4s.JsonAST.JValue 16 | import org.scalatest.BeforeAndAfterAll 17 | 18 | class SampleHttpClientTest extends ScalaTestWithActorTestKit with FusionTestWordSpec with BeforeAndAfterAll { 19 | implicit private val mat = Materializer(system) 20 | implicit private val ec = system.executionContext 21 | 22 | "HttpClient echo" should { 23 | "Ok" in { 24 | import fusion.json.json4s.http.Json4sSupport._ 25 | val socketAddress = FusionHttpServer(system).component.socketAddress 26 | val uri = s"http://${socketAddress.getAddress.getHostAddress}:${socketAddress.getPort}/api/echo" 27 | val entity = JObject("hello" -> JString("world")) 28 | val result = DefaultHttpClient(system.toClassic) 29 | .singleRequest(HttpMethods.POST, uri, entity = entity) 30 | .flatMap(resp => Unmarshal(resp.entity).to[JValue]) 31 | .futureValue 32 | result shouldBe entity 33 | } 34 | 35 | "NotFound" in { 36 | import fusion.json.jackson.http.JacksonSupport._ 37 | val socketAddress = FusionHttpServer(system).component.socketAddress 38 | val uri = s"http://${socketAddress.getAddress.getHostAddress}:${socketAddress.getPort}/api/not-found" 39 | val entity = Jackson.createObjectNode 40 | entity.put("hello", "world") 41 | val response = 42 | DefaultHttpClient(system.toClassic).singleRequest(HttpMethods.POST, uri, entity = entity).futureValue 43 | response.status shouldBe StatusCodes.NotFound 44 | } 45 | } 46 | 47 | override protected def beforeAll(): Unit = { 48 | import akka.http.scaladsl.server.Directives._ 49 | import fusion.json.json4s.http.Json4sSupport._ 50 | super.beforeAll() 51 | val route = pathPrefix("api") { 52 | (path("echo") & post) { 53 | entity(as[JValue]) { payload => 54 | complete(payload) 55 | } 56 | } 57 | } 58 | FusionHttpServer(system).component.startRouteSync(route) 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /sample-http-gateway/README.md: -------------------------------------------------------------------------------- 1 | # HTTP Gateway Sample 2 | 3 | *基于 [akka-fusion](https://github.com/ihongka/akka-fusion/tree/master/fusion-http-gateway) 实现,更多资料请参阅:[HTTP网关代理 · Akka Fusion](https://ihongka.github.io/akka-fusion/http-gateway/index.html) 。* 4 | 5 | Akka HTTP是一很强大的实现 6 | -------------------------------------------------------------------------------- /sample-http-gateway/src/main/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | fusion.http.default { 3 | server { 4 | port = 8097 5 | } 6 | gateway { 7 | timeout = 10.seconds 8 | upstreams { 9 | statistics { 10 | #serviceName = hongka-server-datawarehouse 11 | targets = ["10.0.0.13:8102"] 12 | } 13 | } 14 | locations { 15 | "/api/v4/statistics" { 16 | upstream = statistics 17 | } 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /sample-http-gateway/src/main/scala/sample/http/gateway/Routes.scala: -------------------------------------------------------------------------------- 1 | package sample.http.gateway 2 | 3 | import akka.actor.typed.ActorSystem 4 | import akka.http.scaladsl.server.Route 5 | import fusion.http.gateway.server.FusionHttpGateway 6 | import fusion.http.server.AbstractRoute 7 | 8 | class Routes(system: ActorSystem[_]) extends AbstractRoute { 9 | private val gatewayRoute = FusionHttpGateway(system).component.route 10 | def route: Route = gatewayRoute 11 | } 12 | -------------------------------------------------------------------------------- /sample-http-gateway/src/main/scala/sample/http/gateway/SampleHttpGatewayApplication.scala: -------------------------------------------------------------------------------- 1 | package sample.http.gateway 2 | 3 | import fusion.core.util.FusionUtils 4 | import fusion.http.FusionHttpServer 5 | import helloscala.common.Configuration 6 | 7 | object SampleHttpGatewayApplication { 8 | def main(args: Array[String]): Unit = { 9 | val configuration = Configuration.fromDiscovery() 10 | val system = FusionUtils.createActorSystem(configuration) 11 | val route = new Routes(system).route 12 | FusionHttpServer(system).component.startRouteSync(route) 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /sample-http-gateway/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | fusion.http.default { 3 | server { 4 | port = 8097 5 | } 6 | gateway { 7 | timeout = 10.seconds 8 | upstreams { 9 | api { 10 | #serviceName = service-name 11 | targets = ["127.0.0.1:18081", "127.0.0.1:18082"] 12 | } 13 | not-exists { 14 | targets = ["127.0.0.1:8832"] 15 | } 16 | } 17 | locations { 18 | "/api" { 19 | upstream = api 20 | } 21 | "/not-exists" { 22 | upstream = not-exists 23 | } 24 | } 25 | } 26 | } 27 | sample.http1 { 28 | server { 29 | port = 18081 30 | } 31 | } 32 | sample.http2 { 33 | server { 34 | port = 18082 35 | } 36 | } -------------------------------------------------------------------------------- /sample-http-gateway/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-http-gateway/src/test/scala/sample/http/gateway/SampleHttpGatewayApplicationTest.scala: -------------------------------------------------------------------------------- 1 | package sample.http.gateway 2 | 3 | import akka.actor.testkit.typed.scaladsl.ScalaTestWithActorTestKit 4 | import akka.actor.typed.scaladsl.adapter._ 5 | import akka.http.scaladsl.model.HttpMethods 6 | import akka.http.scaladsl.model.HttpResponse 7 | import akka.http.scaladsl.model.StatusCodes 8 | import akka.stream.Materializer 9 | import fusion.http.FusionHttpServer 10 | import fusion.http.client.DefaultHttpClient 11 | import fusion.test.FusionTestFunSuite 12 | 13 | class SampleHttpGatewayApplicationTest extends ScalaTestWithActorTestKit with FusionTestFunSuite { 14 | implicit private val mat = Materializer(system) 15 | 16 | test("proxy success") { 17 | val uri = FusionHttpServer(system).component.buildUri("/api/hello") 18 | val response = DefaultHttpClient(system.toClassic).singleRequest(HttpMethods.POST, uri).futureValue 19 | response.status shouldBe StatusCodes.OK 20 | } 21 | 22 | test("proxy failure") { 23 | val uri = FusionHttpServer(system).component.buildUri("/not-exists/path") 24 | val response = DefaultHttpClient(system.toClassic).singleRequest(HttpMethods.POST, uri).futureValue 25 | response.status shouldBe StatusCodes.ServiceUnavailable 26 | } 27 | 28 | override protected def beforeAll(): Unit = { 29 | import akka.http.scaladsl.server.Directives._ 30 | super.beforeAll() 31 | val route = pathPrefix("api") { 32 | extractRequest { request => 33 | complete(HttpResponse(entity = request.entity)) 34 | } 35 | } 36 | FusionHttpServer(system).component.startAbstractRouteSync(new Routes(system)) 37 | FusionHttpServer(system).components.lookup("sample.http1").startRouteSync(route) 38 | FusionHttpServer(system).components.lookup("sample.http2").startRouteSync(route) 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /sample-http-server/src/main/scala/sample/http/server/Routes.scala: -------------------------------------------------------------------------------- 1 | package sample.http.server 2 | 3 | import akka.http.scaladsl.server.Route 4 | import fusion.http.server.AbstractRoute 5 | 6 | class Routes() extends AbstractRoute { 7 | override def route: Route = pathPrefix("sample") { 8 | path("hello") { 9 | complete("hello world!") 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /sample-http-server/src/main/scala/sample/http/server/SampleHttpServerApplication.scala: -------------------------------------------------------------------------------- 1 | package sample.http.server 2 | 3 | import fusion.core.util.FusionUtils 4 | import fusion.http.FusionHttpServer 5 | 6 | object SampleHttpServerApplication { 7 | def main(args: Array[String]): Unit = { 8 | val system = FusionUtils.createFromDiscovery() 9 | FusionHttpServer(system).component.startAbstractRouteSync(new Routes()) 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /sample-http-server/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | fusion.http.default { 3 | server { 4 | //port = 8000 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /sample-http-server/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-http-server/src/test/scala/sample/http/server/SampleHttpServerApplicationTest.scala: -------------------------------------------------------------------------------- 1 | package sample.http.server 2 | 3 | import akka.actor.testkit.typed.scaladsl.ScalaTestWithActorTestKit 4 | import fusion.http.FusionHttpServer 5 | import fusion.test.FusionTestFunSuite 6 | 7 | class SampleHttpServerApplicationTest extends ScalaTestWithActorTestKit with FusionTestFunSuite { 8 | test("testSocketAddress") { 9 | val socketAddress = FusionHttpServer(system).component.socketAddress 10 | socketAddress.getPort shouldBe 8000 11 | } 12 | 13 | test("testMain") {} 14 | 15 | override protected def beforeAll(): Unit = { 16 | super.beforeAll() 17 | FusionHttpServer(system).component.startAbstractRouteSync(new Routes()) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /sample-jdbc/src/main/scala/sample/jdbc/model/User.scala: -------------------------------------------------------------------------------- 1 | package sample.jdbc.model 2 | 3 | import java.time.LocalDateTime 4 | 5 | case class User( 6 | id: Long, 7 | name: String, 8 | age: Int, 9 | sex: Option[Int], 10 | description: Option[String], 11 | createdAt: LocalDateTime) 12 | -------------------------------------------------------------------------------- /sample-jdbc/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | fusion.jdbc.default { 3 | poolName = "sample" 4 | jdbcUrl = "jdbc:h2:mem:sample;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE" 5 | username = "devuser" 6 | password = "Devpass.2019" 7 | connectionTestQuery = "select 1;" 8 | maximumPoolSize = 2 9 | } 10 | -------------------------------------------------------------------------------- /sample-jdbc/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-jdbc/src/test/scala/sample/jdbc/SampleJdbcTest.scala: -------------------------------------------------------------------------------- 1 | package sample.jdbc 2 | 3 | import akka.actor.testkit.typed.scaladsl.ScalaTestWithActorTestKit 4 | import fusion.jdbc.FusionJdbc 5 | import fusion.jdbc.JdbcTemplate 6 | import fusion.test.FusionTestWordSpec 7 | import sample.jdbc.model.User 8 | 9 | class SampleJdbcTest extends ScalaTestWithActorTestKit with FusionTestWordSpec { 10 | private val dataSource = FusionJdbc(system).component 11 | private val jdbcTemplate = JdbcTemplate(dataSource) 12 | 13 | "Sample Jdbc Test" should { 14 | "init" in { 15 | jdbcTemplate.update("""create table t_user( 16 | | id bigserial primary key, 17 | | name varchar(128) not null, 18 | | age int, 19 | | sex int, 20 | | description text, 21 | | created_at timestamp not null default now() 22 | |)""".stripMargin) 23 | jdbcTemplate.update("""insert into t_user(name, age, sex, description, created_at) values 24 | |('羊八井', 33, 1, '', now()), 25 | |('杨景', 33, 1, '', now())""".stripMargin) shouldBe 2 26 | } 27 | 28 | "count" in { 29 | jdbcTemplate.count("select count(*) from t_user") shouldBe 2 30 | } 31 | 32 | "list" in { 33 | val list = jdbcTemplate.listForMap("select * from t_user", Nil) 34 | list.size shouldBe 2 35 | val obj = list.head 36 | obj.get("age") shouldBe Some(33) 37 | } 38 | 39 | "query" in { 40 | val maybeUser = jdbcTemplate.namedFindForObject( 41 | """select id, name, age, sex, description, created_at from t_user 42 | |where name = ?name""".stripMargin, 43 | Map("name" -> "羊八井"), 44 | rs => 45 | User( 46 | rs.getLong("id"), 47 | rs.getString("name"), 48 | rs.getInt("age"), 49 | Option(rs.getInt("sex")), 50 | Option(rs.getString("description")), 51 | rs.getTimestamp("created_at").toLocalDateTime)) 52 | maybeUser should not be empty 53 | val user = maybeUser.value 54 | user.age shouldBe 33 55 | user.sex shouldBe Some(1) 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /sample-kafka/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | zookeeper: 4 | image: wurstmeister/zookeeper 5 | ports: 6 | - "2181:2181" 7 | kafka: 8 | build: . 9 | ports: 10 | - "9092:9092" 11 | environment: 12 | KAFKA_ADVERTISED_HOST_NAME: 192.168.99.100 13 | KAFKA_CREATE_TOPICS: "test:1:1" 14 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 15 | volumes: 16 | - /var/run/docker.sock:/var/run/docker.sock -------------------------------------------------------------------------------- /sample-kafka/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | -------------------------------------------------------------------------------- /sample-kafka/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-log/src/main/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | -------------------------------------------------------------------------------- /sample-log/src/main/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-log/src/main/scala/sample/log/SampleLogApplication.scala: -------------------------------------------------------------------------------- 1 | package sample.log 2 | 3 | import java.util.concurrent.TimeUnit 4 | 5 | import com.typesafe.scalalogging.StrictLogging 6 | 7 | object SampleLogApplication extends StrictLogging { 8 | def main(args: Array[String]): Unit = { 9 | logger.info("SampleLogApplication startup.") 10 | logger.debug("sleep 5 seconds") 11 | TimeUnit.SECONDS.sleep(5) 12 | logger.info("SampleLogApplication shutdown.") 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /sample-mongodb/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | -------------------------------------------------------------------------------- /sample-mongodb/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-scheduler-job/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM postgres:11 2 | 3 | RUN localedef -i zh_CN -c -f UTF-8 -A /usr/share/locale/locale.alias zh_CN.UTF-8 && ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime 4 | ENV POSTGRES_PASSWORD Postgres.2019 5 | ENV LANG="zh_CN.UTF_8" 6 | 7 | COPY docs/init.sql /docker-entrypoint-initdb.d/ 8 | -------------------------------------------------------------------------------- /sample-scheduler-job/README.md: -------------------------------------------------------------------------------- 1 | # sample-scheduler-job 2 | 3 | ## 特性 4 | 5 | - 使用Circe作为JSON工具 6 | - 基于Quartz实现作业调度 7 | - 基于Akka Cluster实现调度控制的集群化,高可用 8 | - 同时提供RESTful和Grpc两套API接口 9 | 10 | ## 使用 11 | 12 | ### Install PostgreSQL for Docker 13 | 14 | ```shell script 15 | docker build -t sample-postgres . 16 | docker run -p 55432:5432 --name sample-postgres -d sample-postgres 17 | ``` 18 | 19 | ### Init database tables 20 | 21 | ```sbtshell 22 | > sample-scheduler-job/testOnly sample.scheduler.InitTablesTest 23 | ``` 24 | 25 | ### Unit test 26 | 27 | ```sbtshell 28 | > sample-scheduler-job/testOnly sample.scheduler.InitTablesTest 29 | ``` 30 | -------------------------------------------------------------------------------- /sample-scheduler-job/docs/init.sql: -------------------------------------------------------------------------------- 1 | create user devuser nosuperuser encrypted password 'Devpass.2019'; 2 | create database sample owner devuser template=template1; 3 | create database sample_scheduler owner devuser template=template1; 4 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/protobuf/sample/scheduler/grpc/scheduler.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package sample.scheduler.grpc; 3 | 4 | import "sample/scheduler/model/scheduler.proto"; 5 | import "sample/define.proto"; 6 | 7 | service SchedulerService { 8 | // 创建作业并设置调度 9 | rpc CreateJob (sample.scheduler.model.JobDTO) returns (sample.scheduler.model.JobBO) { 10 | } 11 | 12 | // 取消作业调度 13 | rpc CancelJob (sample.scheduler.model.JobCancelDTO) returns (sample.ResultBO) { 14 | } 15 | 16 | // 暂停作业调度 17 | rpc PauseJob (sample.scheduler.model.JobPauseDTO) returns (sample.ResultBO) { 18 | } 19 | 20 | // 恢复作业调度 21 | rpc ResumeJob (sample.scheduler.model.JobResumeDTO) returns (sample.ResultBO) { 22 | } 23 | 24 | // 获取作业和作业关联的所有调度 25 | rpc GetJob (sample.scheduler.model.Key) returns (sample.scheduler.model.JobBO) { 26 | } 27 | } -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/protobuf/sample/scheduler/model/scheduler.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package sample.scheduler.model; 3 | 4 | import "google/protobuf/wrappers.proto"; 5 | import "google/protobuf/any.proto"; 6 | import "scalapb/scalapb.proto"; 7 | 8 | enum ScheduleType { 9 | SIMPLE = 0; 10 | CRON = 1; 11 | } 12 | 13 | message Key { 14 | string group = 1; 15 | string name = 2; 16 | } 17 | 18 | message TriggerSchedule { 19 | ScheduleType type = 1; 20 | google.protobuf.StringValue interval = 2; 21 | // 大于0指定重复次数,小于等于0无限执行 22 | google.protobuf.Int32Value repeatCount = 3; 23 | google.protobuf.Int32Value misfireInstruction = 4; 24 | google.protobuf.StringValue cronExpression = 5; 25 | google.protobuf.StringValue timezone = 6; 26 | google.protobuf.StringValue startAt = 7; 27 | google.protobuf.Int32Value triggerPriority = 8; 28 | } 29 | 30 | message JobDTO { 31 | string group = 1; 32 | google.protobuf.StringValue description = 2; 33 | // $$callback$$ 为内置key,此参数设置任务执行时要操作的回调地址 34 | map data = 3; 35 | TriggerSchedule schedule = 4; 36 | // 是否持久化Job,默认为true。当Job为非持久化时,若没有任何trigger关联再Job将被自动删除。 37 | google.protobuf.BoolValue durable = 5; 38 | } 39 | 40 | message JobBO { 41 | string group = 1; 42 | string name = 2; 43 | string description = 3; 44 | map data = 4; 45 | repeated JobTriggerBO triggers = 5; 46 | int64 createdAt = 6; 47 | } 48 | 49 | message JobTriggerDTO { 50 | string group = 1; 51 | Key jobKey = 2; 52 | TriggerSchedule schedule = 3; 53 | google.protobuf.StringValue startAt = 4; 54 | google.protobuf.Int32Value triggerPriority = 7; 55 | } 56 | 57 | message JobTriggerBO { 58 | string group = 1; 59 | string name = 2; 60 | Key jobKey = 3; 61 | TriggerSchedule schedule = 4; 62 | int64 createdAt = 5; 63 | // 已被触发(执行)次数 64 | int64 timesTriggered = 6; 65 | // 下一次触发时间 66 | int64 nextFireTime = 7; 67 | // 上一次触发时间 68 | int64 previousFireTime = 8; 69 | int64 endTime = 9; 70 | } 71 | 72 | // 1. 停止triggerKey指定的调度 73 | // 2. 停止jobKey关联的所有调度 74 | message JobCancelDTO { 75 | Key triggerKey = 1; 76 | Key jobKey = 2; 77 | } 78 | 79 | message JobPauseDTO { 80 | Key triggerKey = 1; 81 | Key jobKey = 2; 82 | } 83 | 84 | message JobResumeDTO { 85 | Key triggerKey = 1; 86 | Key jobKey = 2; 87 | } 88 | 89 | message JobQueryDTO { 90 | Key triggerKey = 1; 91 | Key jobKey = 2; 92 | } 93 | 94 | message JobGetDTO { 95 | Key in = 1; 96 | } 97 | 98 | message ApiResult { 99 | int32 status = 1; 100 | string msg = 2; 101 | google.protobuf.Any data = 3; 102 | int64 ctime = 4; 103 | } 104 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/resources/sample-scheduler.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | fusion.job.default { 3 | org.quartz.scheduler.instanceId = ${fusion.name} 4 | org.quartz.jobStore.class: org.quartz.impl.jdbcjobstore.JobStoreTX 5 | //# FusionJdbc id 6 | org.quartz.jobStore.dataSource: fusion.jdbc.default 7 | org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.PostgreSQLDelegate 8 | org.quartz.jobStore.tablePrefix: "qrtz_" 9 | } 10 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/scala/sample/scheduler/Routes.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler 2 | 3 | import akka.actor.typed.ActorSystem 4 | import akka.http.scaladsl.server.Route 5 | import akka.stream.Materializer 6 | import fusion.core.extension.FusionCore 7 | import fusion.http.server.AbstractRoute 8 | import sample.scheduler.grpc.SchedulerServiceHandler 9 | import sample.scheduler.route.SchedulerRoute 10 | 11 | class Routes(system: ActorSystem[_]) extends AbstractRoute { 12 | implicit private val mat = Materializer(system) 13 | implicit private val classicSystem = FusionCore(system).classicSystem 14 | private val aggregate = SchedulerAggregate(system) 15 | private val grpcHandler = SchedulerServiceHandler(aggregate.schedulerService) 16 | 17 | override def route: Route = { 18 | pathPrefix("api" / "v4") { 19 | new SchedulerRoute(system).route 20 | } ~ 21 | grpcRoute 22 | } 23 | 24 | def grpcRoute: Route = extractRequest { request => 25 | onSuccess(grpcHandler(request)) { response => 26 | complete(response) 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/scala/sample/scheduler/SampleSchedulerApplication.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler 2 | 3 | object SampleSchedulerApplication {} 4 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/scala/sample/scheduler/SchedulerAggregate.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler 2 | 3 | import akka.Done 4 | import akka.actor.typed.ActorSystem 5 | import akka.cluster.singleton.{ ClusterSingletonManager, ClusterSingletonManagerSettings } 6 | import akka.pattern._ 7 | import akka.util.Timeout 8 | import fusion.common.extension.{ FusionExtension, FusionExtensionId } 9 | import fusion.core.extension.FusionCore 10 | import sample.scheduler.grpc.SchedulerService 11 | import sample.scheduler.model.End 12 | import sample.scheduler.service.{ SchedulerActor, SchedulerServiceImpl } 13 | 14 | import scala.concurrent.duration._ 15 | 16 | class SchedulerAggregate private (override val system: ActorSystem[_]) extends FusionExtension { 17 | // 使用Akka Cluster Singleton保证调度服务Actor在集群中只启动并活跃一个 18 | private val schedulerActor = 19 | classicSystem.actorOf( 20 | ClusterSingletonManager.props(SchedulerActor.props(), End, ClusterSingletonManagerSettings(classicSystem)), 21 | "sample-scheduler") 22 | 23 | FusionCore(system).shutdowns.serviceRequestsDone("sample-scheduler") { () => 24 | implicit val timeout: Timeout = 60.seconds 25 | schedulerActor.ask(End).mapTo[Done].recover { case _ => Done }(system.executionContext) 26 | } 27 | 28 | val schedulerService: SchedulerService = new SchedulerServiceImpl(classicSystem) 29 | } 30 | 31 | object SchedulerAggregate extends FusionExtensionId[SchedulerAggregate] { 32 | override def createExtension(system: ActorSystem[_]): SchedulerAggregate = new SchedulerAggregate(system) 33 | } 34 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/scala/sample/scheduler/constant/JobConstants.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler.constant 2 | 3 | object JobConstants { 4 | val CALLBACK = "$$callback$$" 5 | } 6 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/scala/sample/scheduler/model/End.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler.model 2 | 3 | case object End 4 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/scala/sample/scheduler/route/SchedulerRoute.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler.route 2 | 3 | import akka.actor.typed.ActorSystem 4 | import akka.http.scaladsl.server.Route 5 | import fusion.http.server.AbstractRoute 6 | import sample.scheduler.SchedulerAggregate 7 | import sample.scheduler.model._ 8 | 9 | class SchedulerRoute(system: ActorSystem[_]) extends AbstractRoute { 10 | private val schedulerService = SchedulerAggregate(system).schedulerService 11 | 12 | override def route: Route = pathPrefix("scheduler") { 13 | getJobRoute ~ 14 | createJobRoute ~ 15 | pauseJobRoute ~ 16 | resumeJobRoute ~ 17 | cancelJobRoute 18 | } 19 | 20 | import fusion.json.json4s.http.Json4sSupport._ 21 | 22 | def createJobRoute: Route = pathPost("create") { 23 | entity(as[JobDTO]) { dto => 24 | complete(schedulerService.createJob(dto)) 25 | } 26 | } 27 | 28 | def pauseJobRoute: Route = pathPost("pause") { 29 | entity(as[JobPauseDTO]) { dto => 30 | complete(schedulerService.pauseJob(dto)) 31 | } 32 | } 33 | 34 | def resumeJobRoute: Route = pathPost("resume") { 35 | entity(as[JobResumeDTO]) { dto => 36 | complete(schedulerService.resumeJob(dto)) 37 | } 38 | } 39 | 40 | def cancelJobRoute: Route = pathPost("cancel") { 41 | entity(as[JobCancelDTO]) { dto => 42 | complete(schedulerService.cancelJob(dto)) 43 | } 44 | } 45 | 46 | def getJobRoute: Route = pathGet("item") { 47 | parameters(('name, 'group)) { (name, group) => 48 | complete(schedulerService.getJob(Key(group, name))) 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/scala/sample/scheduler/service/SchedulerActor.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler.service 2 | 3 | import akka.Done 4 | import akka.actor.{ Actor, ActorLogging, Props } 5 | import akka.actor.typed.scaladsl.adapter._ 6 | import fusion.job.{ FusionJob, FusionScheduler } 7 | import sample.scheduler.model._ 8 | 9 | /** 10 | * TODO 待重构,若参考 fusion-schedulerx 11 | */ 12 | class SchedulerActor extends Actor with SchedulerServiceComponent with ActorLogging { 13 | override def preStart(): Unit = { 14 | super.preStart() 15 | context.become(onMessage(FusionJob(context.system.toTyped).component)) 16 | log.info("Scheduler actor startup.") 17 | } 18 | 19 | override def postStop(): Unit = { 20 | super.postStop() 21 | log.info("Scheduler actor stopped.") 22 | } 23 | 24 | override def receive: Receive = { 25 | case End => 26 | sender() ! Done 27 | context.stop(self) 28 | case other => 29 | log.warning(s"Scheduler actor not startup, receive message is $other") 30 | sender() ! Done 31 | } 32 | 33 | private def onMessage(implicit scheduler: FusionScheduler): Receive = { 34 | case dto: JobCancelDTO => sender() ! cancelJob(dto) 35 | case dto: JobDTO => sender() ! createJob(dto) 36 | case dto: JobGetDTO => sender() ! getJob(dto) 37 | case dto: JobPauseDTO => sender() ! pauseJob(dto) 38 | case dto: JobResumeDTO => sender() ! resumeJob(dto) 39 | case End => 40 | scheduler.close() 41 | sender() ! Done 42 | context.stop(self) 43 | } 44 | } 45 | 46 | object SchedulerActor { 47 | def props(): Props = Props(new SchedulerActor) 48 | } 49 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/scala/sample/scheduler/service/SchedulerServiceComponent.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler.service 2 | 3 | import java.util.UUID 4 | 5 | import fusion.job.FusionScheduler 6 | import helloscala.common.IntStatus 7 | import org.quartz.JobKey 8 | import org.quartz.TriggerKey 9 | import sample.ResultBO 10 | import sample.scheduler.model.JobBO 11 | import sample.scheduler.model.JobCancelDTO 12 | import sample.scheduler.model.JobDTO 13 | import sample.scheduler.model.JobGetDTO 14 | import sample.scheduler.model.JobPauseDTO 15 | import sample.scheduler.model.JobResumeDTO 16 | import sample.scheduler.model.JobTriggerBO 17 | import sample.scheduler.model.Key 18 | import sample.scheduler.service.job.HongkaDefaultJob 19 | import sample.scheduler.util.JobUtils 20 | 21 | import scala.jdk.CollectionConverters._ 22 | 23 | trait SchedulerServiceComponent { 24 | def cancelJob(dto: JobCancelDTO)(implicit scheduler: FusionScheduler): ResultBO = { 25 | var ret = false 26 | dto.triggerKey.foreach { key => 27 | ret = scheduler.unscheduleJob(TriggerKey.triggerKey(key.name, key.group)) 28 | } 29 | dto.jobKey.foreach { key => 30 | val triggerKeys = scheduler.getTriggersOfJob(JobKey.jobKey(key.name, key.group)).map(_.getKey) 31 | ret = scheduler.unscheduleJobs(triggerKeys) 32 | } 33 | ResultBO(if (ret) IntStatus.OK else IntStatus.BAD_REQUEST) 34 | } 35 | 36 | def createJob(dto: JobDTO)(implicit scheduler: FusionScheduler): JobBO = { 37 | val jobDetail = JobUtils.toJobBuilder(dto).ofType(classOf[HongkaDefaultJob]).build() 38 | val triggerKey = TriggerKey.triggerKey(jobDetail.getKey.getName, dto.group) 39 | val trigger = JobUtils.toTriggerBuilder(dto, Some(triggerKey)).build() 40 | val createdAt = scheduler.scheduleJob(jobDetail, trigger) 41 | val data = jobDetail.getJobDataMap.asScala.map { case (key, value) => key.toString -> value.toString }.toMap 42 | val triggerBO = JobTriggerBO( 43 | trigger.getKey.getGroup, 44 | trigger.getKey.getName, 45 | Some(Key(jobDetail.getKey.getGroup, jobDetail.getKey.getName)), 46 | dto.schedule, 47 | createdAt.toEpochMilli) 48 | JobBO(dto.group, jobDetail.getKey.getName, jobDetail.getDescription, data, List(triggerBO), createdAt.toEpochMilli) 49 | } 50 | 51 | def getJob(dto: JobGetDTO)(implicit scheduler: FusionScheduler): JobBO = { 52 | val in = dto.in.get 53 | val jobKey = JobKey.jobKey(in.name, in.group) 54 | Option(scheduler.getJobDetail(jobKey)) match { 55 | case Some(jobDetail) => 56 | val triggers = scheduler.getTriggersOfJob(jobKey).map { trigger => 57 | JobTriggerBO( 58 | trigger.getKey.getGroup, 59 | trigger.getKey.getName, 60 | Some(in), 61 | Some(JobUtils.toTriggerSchedule(trigger)), 62 | UUID.fromString(trigger.getKey.getName).timestamp(), 63 | JobUtils.getTimesTriggered(trigger), 64 | Option(trigger.getNextFireTime).map(_.getTime).getOrElse(0L), 65 | Option(trigger.getPreviousFireTime).map(_.getTime).getOrElse(0L), 66 | Option(trigger.getEndTime).map(_.getTime).getOrElse(0L)) 67 | } 68 | JobBO( 69 | jobKey.getGroup, 70 | jobKey.getName, 71 | Option(jobDetail.getDescription).getOrElse(""), 72 | jobDetail.getJobDataMap.asScala.mapValues(_.toString).toMap, 73 | triggers, 74 | UUID.fromString(jobKey.getName).timestamp()) 75 | case _ => JobBO(jobKey.getGroup, jobKey.getName) 76 | } 77 | } 78 | 79 | def pauseJob(in: JobPauseDTO)(implicit scheduler: FusionScheduler): ResultBO = { 80 | in.triggerKey.foreach(key => scheduler.pauseTrigger(TriggerKey.triggerKey(key.name, key.group))) 81 | in.jobKey.foreach(key => scheduler.pauseJob(JobKey.jobKey(key.name, key.group))) 82 | ResultBO(IntStatus.OK) 83 | } 84 | 85 | def resumeJob(in: JobResumeDTO)(implicit scheduler: FusionScheduler): ResultBO = { 86 | in.triggerKey.foreach(key => scheduler.resumeTrigger(TriggerKey.triggerKey(key.name, key.group))) 87 | in.jobKey.foreach(key => scheduler.resumeJob(JobKey.jobKey(key.name, key.group))) 88 | ResultBO(IntStatus.OK) 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/scala/sample/scheduler/service/SchedulerServiceImpl.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler.service 2 | 3 | import akka.cluster.singleton.ClusterSingletonProxy 4 | import akka.cluster.singleton.ClusterSingletonProxySettings 5 | import akka.pattern._ 6 | import akka.util.Timeout 7 | import akka.{ actor => classic } 8 | import sample.ResultBO 9 | import sample.scheduler.grpc.SchedulerService 10 | import sample.scheduler.model._ 11 | 12 | import scala.concurrent.Future 13 | import scala.concurrent.duration._ 14 | 15 | class SchedulerServiceImpl(system: classic.ActorSystem) extends SchedulerService { 16 | implicit private val timeout = Timeout(10.seconds) 17 | 18 | private val proxy = 19 | system.actorOf(ClusterSingletonProxy.props("/user/sample-scheduler", ClusterSingletonProxySettings(system))) 20 | 21 | override def cancelJob(dto: JobCancelDTO): Future[ResultBO] = proxy.ask(dto).mapTo[ResultBO] 22 | 23 | override def createJob(dto: JobDTO): Future[JobBO] = proxy.ask(dto).mapTo[JobBO] 24 | 25 | override def getJob(in: Key): Future[JobBO] = proxy.ask(JobGetDTO(Some(in))).mapTo[JobBO] 26 | 27 | override def pauseJob(in: JobPauseDTO): Future[ResultBO] = proxy.ask(in).mapTo[ResultBO] 28 | 29 | override def resumeJob(in: JobResumeDTO): Future[ResultBO] = proxy.ask(in).mapTo[ResultBO] 30 | } 31 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/scala/sample/scheduler/service/job/HongkaDefaultJob.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler.service.job 2 | 3 | import akka.actor.typed.scaladsl.adapter._ 4 | import akka.http.scaladsl.Http 5 | import akka.http.scaladsl.model.ContentTypes 6 | import akka.http.scaladsl.model.HttpEntity 7 | import akka.http.scaladsl.model.HttpMethods 8 | import akka.http.scaladsl.model.HttpRequest 9 | import com.typesafe.scalalogging.StrictLogging 10 | import fusion.core.util.FusionUtils 11 | import fusion.job.ScheduleJob 12 | import fusion.json.JsonUtils 13 | import helloscala.common.util.StringUtils 14 | import org.json4s.Extraction 15 | import org.json4s.JsonAST.JObject 16 | import org.quartz.JobExecutionContext 17 | import sample.scheduler.constant.JobConstants 18 | 19 | import scala.jdk.CollectionConverters._ 20 | import scala.util.Failure 21 | import scala.util.Success 22 | 23 | class HongkaDefaultJob extends ScheduleJob with StrictLogging { 24 | override def execute(context: JobExecutionContext): Unit = { 25 | performCallback(context) 26 | } 27 | 28 | private def performCallback(context: JobExecutionContext): Unit = { 29 | import JsonUtils.defaultFormats 30 | 31 | val dataMap = context.getMergedJobDataMap.asScala.mapValues(_.toString) 32 | val callback = dataMap.getOrElse(JobConstants.CALLBACK, "") 33 | 34 | if (StringUtils.isNoneBlank(callback) && callback.startsWith("http")) { 35 | implicit val system = FusionUtils.actorSystem().toClassic 36 | import system.dispatcher 37 | 38 | val data = JObject( 39 | "data" -> Extraction.decompose(dataMap), 40 | "jobKey" -> Extraction.decompose(context.getJobDetail.getKey), 41 | "triggerKey" -> Extraction.decompose(context.getTrigger.getKey)) 42 | 43 | val request = 44 | HttpRequest( 45 | HttpMethods.POST, 46 | callback, 47 | entity = HttpEntity(ContentTypes.`application/json`, JsonUtils.compact(data))) 48 | 49 | val responseF = Http().singleRequest(request) 50 | 51 | responseF.onComplete { 52 | case Success(response) => 53 | logger.debug(s"向远程服务发送回调错误完成,[${detailTrigger(context)}] callback地址:$callback。响应:$response") 54 | case Failure(e) => 55 | logger.error(s"向远程服务发送回调错误,[${detailTrigger(context)}] callback地址:$callback", e) 56 | } 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/main/scala/sample/scheduler/util/JobUtils.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler.util 2 | 3 | import java.util.TimeZone 4 | import java.util.concurrent.TimeUnit 5 | 6 | import helloscala.common.exception.HSBadRequestException 7 | import helloscala.common.util.TimeUtils 8 | import helloscala.common.util.Utils 9 | import org.quartz._ 10 | import sample.scheduler.model.JobDTO 11 | import sample.scheduler.model.ScheduleType 12 | import sample.scheduler.model.TriggerSchedule 13 | 14 | import scala.jdk.CollectionConverters._ 15 | import scala.concurrent.duration.Duration 16 | 17 | object JobUtils { 18 | def getTimesTriggered(trigger: Trigger): Long = { 19 | trigger match { 20 | case simple: SimpleTrigger => simple.getTimesTriggered 21 | case _ => 0 22 | } 23 | } 24 | 25 | def toTriggerSchedule(trigger: Trigger): TriggerSchedule = { 26 | val schedule = trigger match { 27 | case cron: CronTrigger => 28 | TriggerSchedule( 29 | ScheduleType.CRON, 30 | cronExpression = Option(cron.getCronExpression), 31 | timezone = Option(cron.getTimeZone.toZoneId.toString)) 32 | case simple: SimpleTrigger => 33 | TriggerSchedule( 34 | ScheduleType.CRON, 35 | Option(Duration(simple.getRepeatInterval, TimeUnit.MILLISECONDS).toString()), 36 | Option(simple.getRepeatCount)) 37 | } 38 | schedule.copy( 39 | misfireInstruction = Option(trigger.getMisfireInstruction), 40 | startAt = Some(TimeUtils.toLocalDateTime(trigger.getStartTime).format(TimeUtils.formatterDateTime)), 41 | triggerPriority = Some(trigger.getPriority)) 42 | } 43 | 44 | def toJobBuilder(dto: JobDTO): JobBuilder = { 45 | JobBuilder 46 | .newJob() 47 | .storeDurably(dto.durable.getOrElse(true)) 48 | .withIdentity(JobKey.jobKey(Utils.timeBasedUuid().toString, dto.group)) 49 | .withDescription(dto.description.getOrElse("")) 50 | .setJobData(Option(dto.data).map(data => new JobDataMap(data.asJava)).getOrElse(new JobDataMap())) 51 | } 52 | 53 | def toTriggerBuilder(dto: JobDTO, triggerKey: Option[TriggerKey] = None): TriggerBuilder[Trigger] = { 54 | val b = TriggerBuilder 55 | .newTrigger() 56 | .withIdentity(triggerKey.getOrElse(TriggerKey.triggerKey(Utils.timeBasedUuid().toString, dto.group))) 57 | .withDescription(dto.description.getOrElse("")) 58 | 59 | val schedule = dto.schedule.getOrElse(throw HSBadRequestException("调度配置未设置:schedule")) 60 | val builder = schedule.`type` match { 61 | case ScheduleType.SIMPLE => generateSimpleSchedule(schedule) 62 | case ScheduleType.CRON => generateCronSchedule(schedule) 63 | case other => throw HSBadRequestException(s"无效的Schedule调度配置: $other") 64 | } 65 | b.withSchedule(builder) 66 | 67 | schedule.startAt match { 68 | case None => 69 | b.startNow() 70 | case Some(startAt) => 71 | val startTime = java.util.Date.from(TimeUtils.toLocalDateTime(startAt).toInstant(TimeUtils.ZONE_CHINA_OFFSET)) 72 | b.startAt(startTime) 73 | } 74 | 75 | schedule.triggerPriority.foreach(b.withPriority) 76 | 77 | b 78 | } 79 | 80 | private def generateCronSchedule(cron: TriggerSchedule): CronScheduleBuilder = { 81 | val ss = CronScheduleBuilder.cronSchedule( 82 | cron.cronExpression.getOrElse(throw HSBadRequestException("未设置日历调度表达示,schedule.cronExpression"))) 83 | cron.timezone.foreach { timezone => 84 | val tz = TimeZone.getTimeZone(timezone) 85 | ss.inTimeZone(tz) 86 | } 87 | cron.misfireInstruction.foreach { 88 | case Trigger.MISFIRE_INSTRUCTION_IGNORE_MISFIRE_POLICY => ss.withMisfireHandlingInstructionIgnoreMisfires() 89 | case CronTrigger.MISFIRE_INSTRUCTION_DO_NOTHING => ss.withMisfireHandlingInstructionDoNothing() 90 | case CronTrigger.MISFIRE_INSTRUCTION_FIRE_ONCE_NOW => ss.withMisfireHandlingInstructionFireAndProceed() 91 | case other => throw HSBadRequestException(s"CronSchedule 无效的MISFIRE值:$other") 92 | } 93 | ss 94 | } 95 | 96 | private def generateSimpleSchedule(simple: TriggerSchedule): SimpleScheduleBuilder = { 97 | val ss = SimpleScheduleBuilder.simpleSchedule() 98 | simple.interval match { 99 | case Some(interval) => ss.withIntervalInMilliseconds(Duration(interval).toMillis) 100 | case _ if simple.repeatCount.contains(0) => ss.withIntervalInMilliseconds(1L) 101 | case _ => throw HSBadRequestException("interval 未指定时必需设置 repeatCount 为 0") 102 | } 103 | if (simple.repeatCount.exists(_ > -1)) { 104 | ss.withRepeatCount(simple.repeatCount.get) 105 | } else { 106 | ss.repeatForever() 107 | } 108 | simple.misfireInstruction.foreach { 109 | case SimpleTrigger.MISFIRE_INSTRUCTION_FIRE_NOW => ss.withMisfireHandlingInstructionFireNow() 110 | case Trigger.MISFIRE_INSTRUCTION_IGNORE_MISFIRE_POLICY => ss.withMisfireHandlingInstructionIgnoreMisfires() 111 | case SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_EXISTING_COUNT => 112 | ss.withMisfireHandlingInstructionNextWithExistingCount() 113 | case SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_REMAINING_COUNT => 114 | ss.withMisfireHandlingInstructionNextWithRemainingCount() 115 | case SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NOW_WITH_EXISTING_REPEAT_COUNT => 116 | ss.withMisfireHandlingInstructionNowWithExistingCount() 117 | case SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NOW_WITH_REMAINING_REPEAT_COUNT => 118 | ss.withMisfireHandlingInstructionNowWithRemainingCount() 119 | case other => throw HSBadRequestException(s"SimpleSchedule 无效的MISFIRE值:$other") 120 | } 121 | ss 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-scheduler.conf" 2 | akka { 3 | actor { 4 | provider = "cluster" 5 | } 6 | remote { 7 | log-remote-lifecycle-events = off 8 | artery { 9 | enabled = on 10 | transport = tcp # See Selecting a transport below 11 | canonical.hostname = "127.0.0.1" 12 | canonical.port = 25520 13 | } 14 | } 15 | cluster { 16 | seed-nodes = [ 17 | "akka://sample-scheduler@127.0.0.1:25520" 18 | ] 19 | } 20 | } 21 | fusion.name = sample-scheduler 22 | fusion.jdbc.default { 23 | poolName = "sample" 24 | jdbcUrl = "jdbc:postgresql://localhost:55432/sample_scheduler?reWriteBatchedInserts=true" 25 | username = "devuser" 26 | password = "Devpass.2019" 27 | connectionTestQuery = "select 1;" 28 | maximumPoolSize = 2 29 | } 30 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/test/resources/sql/scheduler.sql: -------------------------------------------------------------------------------- 1 | create table qrtz_job_details 2 | ( 3 | sched_name varchar(120) not null, 4 | job_name varchar(200) not null, 5 | job_group varchar(200) not null, 6 | description varchar(250), 7 | job_class_name varchar(250) not null, 8 | is_durable boolean not null, 9 | is_nonconcurrent boolean not null, 10 | is_update_data boolean not null, 11 | requests_recovery boolean not null, 12 | job_data bytea, 13 | created_at timestamp default CURRENT_TIMESTAMP not null, 14 | constraint qrtz_job_details_pkey 15 | primary key (sched_name, job_name, job_group) 16 | ); 17 | 18 | create index idx_qrtz_j_req_recovery 19 | on qrtz_job_details (sched_name, requests_recovery); 20 | 21 | create index idx_qrtz_j_grp 22 | on qrtz_job_details (sched_name, job_group); 23 | 24 | create table qrtz_triggers 25 | ( 26 | sched_name varchar(120) not null, 27 | trigger_name varchar(200) not null, 28 | trigger_group varchar(200) not null, 29 | job_name varchar(200) not null, 30 | job_group varchar(200) not null, 31 | description varchar(250), 32 | next_fire_time bigint, 33 | prev_fire_time bigint, 34 | priority integer, 35 | trigger_state varchar(16) not null, 36 | trigger_type varchar(8) not null, 37 | start_time bigint not null, 38 | end_time bigint, 39 | calendar_name varchar(200), 40 | misfire_instr smallint, 41 | job_data bytea, 42 | created_at timestamp default CURRENT_TIMESTAMP not null, 43 | constraint qrtz_triggers_pkey 44 | primary key (sched_name, trigger_name, trigger_group), 45 | constraint qrtz_triggers_sched_name_fkey 46 | foreign key (sched_name, job_name, job_group) references qrtz_job_details 47 | ); 48 | 49 | create index idx_qrtz_t_j 50 | on qrtz_triggers (sched_name, job_name, job_group); 51 | 52 | create index idx_qrtz_t_jg 53 | on qrtz_triggers (sched_name, job_group); 54 | 55 | create index idx_qrtz_t_c 56 | on qrtz_triggers (sched_name, calendar_name); 57 | 58 | create index idx_qrtz_t_g 59 | on qrtz_triggers (sched_name, trigger_group); 60 | 61 | create index idx_qrtz_t_state 62 | on qrtz_triggers (sched_name, trigger_state); 63 | 64 | create index idx_qrtz_t_n_state 65 | on qrtz_triggers (sched_name, trigger_name, trigger_group, trigger_state); 66 | 67 | create index idx_qrtz_t_n_g_state 68 | on qrtz_triggers (sched_name, trigger_group, trigger_state); 69 | 70 | create index idx_qrtz_t_next_fire_time 71 | on qrtz_triggers (sched_name, next_fire_time); 72 | 73 | create index idx_qrtz_t_nft_st 74 | on qrtz_triggers (sched_name, trigger_state, next_fire_time); 75 | 76 | create index idx_qrtz_t_nft_misfire 77 | on qrtz_triggers (sched_name, misfire_instr, next_fire_time); 78 | 79 | create index idx_qrtz_t_nft_st_misfire 80 | on qrtz_triggers (sched_name, misfire_instr, next_fire_time, trigger_state); 81 | 82 | create index idx_qrtz_t_nft_st_misfire_grp 83 | on qrtz_triggers (sched_name, misfire_instr, next_fire_time, trigger_group, trigger_state); 84 | 85 | create table qrtz_simple_triggers 86 | ( 87 | sched_name varchar(120) not null, 88 | trigger_name varchar(200) not null, 89 | trigger_group varchar(200) not null, 90 | repeat_count bigint not null, 91 | repeat_interval bigint not null, 92 | times_triggered bigint not null, 93 | constraint qrtz_simple_triggers_pkey 94 | primary key (sched_name, trigger_name, trigger_group), 95 | constraint qrtz_simple_triggers_sched_name_fkey 96 | foreign key (sched_name, trigger_name, trigger_group) references qrtz_triggers 97 | ); 98 | 99 | create table qrtz_cron_triggers 100 | ( 101 | sched_name varchar(120) not null, 102 | trigger_name varchar(200) not null, 103 | trigger_group varchar(200) not null, 104 | cron_expression varchar(120) not null, 105 | time_zone_id varchar(80), 106 | constraint qrtz_cron_triggers_pkey 107 | primary key (sched_name, trigger_name, trigger_group), 108 | constraint qrtz_cron_triggers_sched_name_fkey 109 | foreign key (sched_name, trigger_name, trigger_group) references qrtz_triggers 110 | ); 111 | 112 | create table qrtz_simprop_triggers 113 | ( 114 | sched_name varchar(120) not null, 115 | trigger_name varchar(200) not null, 116 | trigger_group varchar(200) not null, 117 | str_prop_1 varchar(512), 118 | str_prop_2 varchar(512), 119 | str_prop_3 varchar(512), 120 | int_prop_1 integer, 121 | int_prop_2 integer, 122 | long_prop_1 bigint, 123 | long_prop_2 bigint, 124 | dec_prop_1 numeric(13,4), 125 | dec_prop_2 numeric(13,4), 126 | bool_prop_1 boolean, 127 | bool_prop_2 boolean, 128 | constraint qrtz_simprop_triggers_pkey 129 | primary key (sched_name, trigger_name, trigger_group), 130 | constraint qrtz_simprop_triggers_sched_name_fkey 131 | foreign key (sched_name, trigger_name, trigger_group) references qrtz_triggers 132 | ); 133 | 134 | create table qrtz_blob_triggers 135 | ( 136 | sched_name varchar(120) not null, 137 | trigger_name varchar(200) not null, 138 | trigger_group varchar(200) not null, 139 | blob_data bytea, 140 | constraint qrtz_blob_triggers_pkey 141 | primary key (sched_name, trigger_name, trigger_group), 142 | constraint qrtz_blob_triggers_sched_name_fkey 143 | foreign key (sched_name, trigger_name, trigger_group) references qrtz_triggers 144 | ); 145 | 146 | create table qrtz_calendars 147 | ( 148 | sched_name varchar(120) not null, 149 | calendar_name varchar(200) not null, 150 | calendar bytea not null, 151 | constraint qrtz_calendars_pkey 152 | primary key (sched_name, calendar_name) 153 | ); 154 | 155 | create table qrtz_paused_trigger_grps 156 | ( 157 | sched_name varchar(120) not null, 158 | trigger_group varchar(200) not null, 159 | constraint qrtz_paused_trigger_grps_pkey 160 | primary key (sched_name, trigger_group) 161 | ); 162 | 163 | create table qrtz_fired_triggers 164 | ( 165 | sched_name varchar(120) not null, 166 | entry_id varchar(95) not null, 167 | trigger_name varchar(200) not null, 168 | trigger_group varchar(200) not null, 169 | instance_name varchar(200) not null, 170 | fired_time bigint not null, 171 | sched_time bigint not null, 172 | priority integer not null, 173 | state varchar(16) not null, 174 | job_name varchar(200), 175 | job_group varchar(200), 176 | is_nonconcurrent boolean, 177 | requests_recovery boolean, 178 | constraint qrtz_fired_triggers_pkey 179 | primary key (sched_name, entry_id) 180 | ); 181 | 182 | create index idx_qrtz_ft_trig_inst_name 183 | on qrtz_fired_triggers (sched_name, instance_name); 184 | 185 | create index idx_qrtz_ft_inst_job_req_rcvry 186 | on qrtz_fired_triggers (sched_name, instance_name, requests_recovery); 187 | 188 | create index idx_qrtz_ft_j_g 189 | on qrtz_fired_triggers (sched_name, job_name, job_group); 190 | 191 | create index idx_qrtz_ft_jg 192 | on qrtz_fired_triggers (sched_name, job_group); 193 | 194 | create index idx_qrtz_ft_t_g 195 | on qrtz_fired_triggers (sched_name, trigger_name, trigger_group); 196 | 197 | create index idx_qrtz_ft_tg 198 | on qrtz_fired_triggers (sched_name, trigger_group); 199 | 200 | create table qrtz_scheduler_state 201 | ( 202 | sched_name varchar(120) not null, 203 | instance_name varchar(200) not null, 204 | last_checkin_time bigint not null, 205 | checkin_interval bigint not null, 206 | constraint qrtz_scheduler_state_pkey 207 | primary key (sched_name, instance_name) 208 | ); 209 | 210 | create table qrtz_locks 211 | ( 212 | sched_name varchar(120) not null, 213 | lock_name varchar(40) not null, 214 | constraint qrtz_locks_pkey 215 | primary key (sched_name, lock_name) 216 | ); 217 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/test/scala/sample/scheduler/InitTablesTest.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler 2 | 3 | import akka.actor.testkit.typed.scaladsl.ScalaTestWithActorTestKit 4 | import fusion.jdbc.FusionJdbc 5 | import fusion.jdbc.JdbcTemplate 6 | import fusion.test.FusionTestFunSuite 7 | 8 | class InitTablesTest extends ScalaTestWithActorTestKit with FusionTestFunSuite { 9 | private val dataSource = FusionJdbc(system).component 10 | private val jdbcTemplate = JdbcTemplate(dataSource) 11 | 12 | test("create tables") { 13 | val sqlText = scala.io.Source.fromResource("sql/scheduler.sql").getLines().mkString("\n") 14 | jdbcTemplate.update(sqlText) 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /sample-scheduler-job/src/test/scala/sample/scheduler/SchedulerApplicationTest.scala: -------------------------------------------------------------------------------- 1 | package sample.scheduler 2 | 3 | import java.util.concurrent.TimeUnit 4 | 5 | import akka.actor.ActorSystem 6 | import akka.actor.typed.scaladsl.adapter._ 7 | import akka.http.scaladsl.model.StatusCodes 8 | import akka.http.scaladsl.model.Uri 9 | import akka.http.scaladsl.testkit.RouteTestTimeout 10 | import akka.http.scaladsl.testkit.ScalatestRouteTest 11 | import fusion.test.FusionTestSuite 12 | import org.scalatest.BeforeAndAfterAll 13 | import org.scalatest.FunSuite 14 | import org.scalatest.Matchers 15 | import sample.scheduler.model._ 16 | import sample.scheduler.route.SchedulerRoute 17 | 18 | import scala.concurrent.duration._ 19 | 20 | class SchedulerApplicationTest 21 | extends FunSuite 22 | with ScalatestRouteTest 23 | with FusionTestSuite 24 | with BeforeAndAfterAll 25 | with Matchers { 26 | import fusion.json.json4s.http.Json4sSupport._ 27 | private val route = new SchedulerRoute(system.toTyped).route 28 | implicit private val timeout = RouteTestTimeout(10.seconds) 29 | 30 | private var jobs: List[Key] = Nil 31 | private var triggerKeys: List[Key] = Nil 32 | 33 | override protected def createActorSystem(): ActorSystem = ActorSystem("sample-scheduler") 34 | 35 | test("Create Simple Job") { 36 | val payload = JobDTO( 37 | "test", 38 | data = Map("schedule" -> "Simple"), 39 | schedule = Some(TriggerSchedule(ScheduleType.SIMPLE, Some("5.seconds")))) 40 | Post("/scheduler/create", payload) ~> route ~> check { 41 | val jobBO = responseAs[JobBO] 42 | println(jobBO) 43 | status shouldBe StatusCodes.OK 44 | jobs ::= Key(jobBO.group, jobBO.name) 45 | triggerKeys ++= jobBO.triggers.map(bo => Key(bo.group, bo.name)) 46 | } 47 | } 48 | 49 | test("Create Cron Job") { 50 | val payload = JobDTO( 51 | "test", 52 | data = Map("schedule" -> "Cron"), 53 | schedule = Some(TriggerSchedule(ScheduleType.CRON, cronExpression = Some("0/5 * * * * ?")))) 54 | Post("/scheduler/create", payload) ~> route ~> check { 55 | val jobBO = responseAs[JobBO] 56 | println(jobBO) 57 | status shouldBe StatusCodes.OK 58 | jobs ::= Key(jobBO.group, jobBO.name) 59 | triggerKeys ++= jobBO.triggers.map(bo => Key(bo.group, bo.name)) 60 | } 61 | } 62 | 63 | test("Get Jobs") { 64 | jobs.foreach { jobKey => 65 | val uri = Uri("/scheduler/item").withQuery(Uri.Query("group" -> jobKey.group, "name" -> jobKey.name)) 66 | Get(uri) ~> route ~> check { 67 | val jobBO = responseAs[JobBO] 68 | println(jobBO) 69 | status shouldBe StatusCodes.OK 70 | jobBO.group shouldBe jobKey.group 71 | jobBO.name shouldBe jobKey.name 72 | } 73 | } 74 | } 75 | 76 | override protected def afterAll(): Unit = { 77 | TimeUnit.SECONDS.sleep(15) 78 | pause() 79 | super.afterAll() 80 | } 81 | 82 | private def pause(): Unit = { 83 | triggerKeys.foreach { triggerKey => 84 | Post("/scheduler/cancel", JobCancelDTO(triggerKey = Some(triggerKey))) ~> route ~> check { 85 | status shouldBe StatusCodes.OK 86 | } 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /sample-slick/src/main/scala/sample/slick/model/Person.scala: -------------------------------------------------------------------------------- 1 | package sample.slick.model 2 | 3 | import java.sql.Timestamp 4 | import java.time.Instant 5 | 6 | case class Person( 7 | id: Long, 8 | name: String, 9 | age: Option[Int] = None, 10 | sex: Option[Int] = None, 11 | description: Option[String] = None, 12 | createdAt: Timestamp = Timestamp.from(Instant.now())) 13 | -------------------------------------------------------------------------------- /sample-slick/src/main/scala/sample/slick/schema/PersonTable.scala: -------------------------------------------------------------------------------- 1 | package sample.slick.schema 2 | 3 | import java.sql.Timestamp 4 | 5 | import sample.slick.model.Person 6 | import sample.slick.schema.CustomProfile.api._ 7 | 8 | trait PersonTable { 9 | class PersonTable(tag: Tag) extends Table[Person](tag, "t_person") { 10 | val id = column[Long]("id", O.AutoInc, O.PrimaryKey) 11 | val name = column[String]("name") 12 | val age = column[Option[Int]]("age") 13 | val sex = column[Option[Int]]("sex") 14 | val description = column[Option[String]]("description", O.SqlType("text")) 15 | val createdAt = column[Timestamp]("created_at") 16 | override def * = (id, name, age, sex, description, createdAt).mapTo[Person] 17 | } 18 | 19 | val personTable = TableQuery[PersonTable] 20 | } 21 | -------------------------------------------------------------------------------- /sample-slick/src/main/scala/sample/slick/schema/SlickSchema.scala: -------------------------------------------------------------------------------- 1 | package sample.slick.schema 2 | 3 | import akka.actor.typed.ActorSystem 4 | import fusion.jdbc.FusionJdbc 5 | import fusion.slick.FusionJdbcProfile 6 | import slick.jdbc.PostgresProfile 7 | 8 | trait CustomProfile extends PostgresProfile with FusionJdbcProfile { 9 | override val api = MyAPI 10 | object MyAPI extends API with FusionImplicits {} 11 | } 12 | 13 | object CustomProfile extends CustomProfile 14 | 15 | import sample.slick.schema.CustomProfile.api._ 16 | 17 | class SlickSchema(system: ActorSystem[_]) extends PersonTable { 18 | val db = databaseForDataSource(FusionJdbc(system).component) 19 | 20 | val ddl = personTable.schema 21 | } 22 | -------------------------------------------------------------------------------- /sample-slick/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | include "sample-common.conf" 2 | fusion.jdbc.default { 3 | poolName = "sample" 4 | jdbcUrl = "jdbc:h2:mem:sample;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE" 5 | username = "devuser" 6 | password = "Devpass.2019" 7 | connectionTestQuery = "select 1;" 8 | maximumPoolSize = 2 9 | } 10 | -------------------------------------------------------------------------------- /sample-slick/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | ${fusion.log.logback.console-pattern:-%d %-5level [%thread] %logger{36} %line - %msg%n%exception} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /sample-slick/src/test/scala/sample/jdbc/SampleJdbcTest.scala: -------------------------------------------------------------------------------- 1 | package sample.jdbc 2 | 3 | import akka.actor.testkit.typed.scaladsl.ScalaTestWithActorTestKit 4 | import fusion.jdbc.FusionJdbc 5 | import fusion.jdbc.JdbcTemplate 6 | import fusion.test.FusionTestWordSpec 7 | import sample.jdbc.model.User 8 | 9 | class SampleJdbcTest extends ScalaTestWithActorTestKit with FusionTestWordSpec { 10 | private val dataSource = FusionJdbc(system).component 11 | private val jdbcTemplate = JdbcTemplate(dataSource) 12 | 13 | "Sample Jdbc Test" should { 14 | "init" in { 15 | jdbcTemplate.update("""create table t_user( 16 | | id bigserial primary key, 17 | | name varchar(128) not null, 18 | | age int, 19 | | sex int, 20 | | description text, 21 | | created_at timestamp not null default now() 22 | |)""".stripMargin) 23 | jdbcTemplate.update("""insert into t_user(name, age, sex, description, created_at) values 24 | |('羊八井', 33, 1, '', now()), 25 | |('杨景', 33, 1, '', now())""".stripMargin) shouldBe 2 26 | } 27 | 28 | "count" in { 29 | jdbcTemplate.count("select count(*) from t_user") shouldBe 2 30 | } 31 | 32 | "list" in { 33 | val list = jdbcTemplate.listForMap("select * from t_user", Nil) 34 | list.size shouldBe 2 35 | val obj = list.head 36 | obj.get("age") shouldBe Some(33) 37 | } 38 | 39 | "query" in { 40 | val maybeUser = jdbcTemplate.namedFindForObject( 41 | """select id, name, age, sex, description, created_at from t_user 42 | |where name = ?name""".stripMargin, 43 | Map("name" -> "羊八井"), 44 | rs => 45 | User( 46 | rs.getLong("id"), 47 | rs.getString("name"), 48 | rs.getInt("age"), 49 | Option(rs.getInt("sex")), 50 | Option(rs.getString("description")), 51 | rs.getTimestamp("created_at").toLocalDateTime)) 52 | maybeUser should not be empty 53 | val user = maybeUser.value 54 | user.age shouldBe 33 55 | user.sex shouldBe Some(1) 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /sample-slick/src/test/scala/sample/slick/schema/SlickSchemaTest.scala: -------------------------------------------------------------------------------- 1 | package sample.slick.schema 2 | 3 | import java.sql.Timestamp 4 | import java.time.Instant 5 | 6 | import akka.actor.testkit.typed.scaladsl.ScalaTestWithActorTestKit 7 | import fusion.test.FusionTestWordSpec 8 | import helloscala.common.exception.HSNotFoundException 9 | import sample.slick.model.Person 10 | 11 | class SlickSchemaTest extends ScalaTestWithActorTestKit with FusionTestWordSpec { 12 | private val schema = new SlickSchema(system) 13 | import sample.slick.schema.CustomProfile.api._ 14 | import schema._ 15 | 16 | "ddl" should { 17 | "print" in { 18 | val ddls = schema.ddl.createStatements.toList 19 | ddls should not be empty 20 | ddls.foreach(println) 21 | } 22 | "execute" in { 23 | db.run(schema.ddl.create.transactionally).futureValue 24 | } 25 | } 26 | 27 | "dml" should { 28 | val now = Timestamp.from(Instant.now()) 29 | 30 | "insert" in { 31 | val action = personTable += Person(0, "羊八井", createdAt = now) 32 | db.run(action.transactionally).futureValue shouldBe 1 33 | } 34 | 35 | "query" in { 36 | val query = personTable.filter(t => t.name === "羊八井").result.headOption 37 | val maybe = db.run(query).futureValue 38 | maybe should not be empty 39 | val person = maybe.value 40 | person.id shouldBe 1 41 | person.name shouldBe "羊八井" 42 | person.createdAt shouldBe now 43 | } 44 | 45 | "update" in { 46 | implicit val ec = system.executionContext 47 | val id = 1L 48 | val query = personTable.filter(_.id === id) 49 | val action = query.result.headOption.flatMap { 50 | case Some(u) => 51 | val payload = u.copy(name = "杨景", age = Some(33)) 52 | query.update(payload) 53 | case _ => DBIO.failed(HSNotFoundException(s"用户未找到,ID: $id")) 54 | } 55 | val ret = db.run(action.transactionally).futureValue 56 | ret shouldBe 1 57 | } 58 | 59 | "delete" in { 60 | val ret = db.run(personTable.filter(_.id === 1L).delete.transactionally).futureValue 61 | ret shouldBe 1 62 | } 63 | 64 | "count" in { 65 | db.run(personTable.size.result).futureValue shouldBe 0 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /sbt: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ./sbt-dist/bin/sbt "$@" -------------------------------------------------------------------------------- /sbt-dist/bin/java9-rt-export.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akka-fusion/akka-fusion-samples/27c3a2024b7a44896652eba4530e71e94b0ea0dd/sbt-dist/bin/java9-rt-export.jar -------------------------------------------------------------------------------- /sbt-dist/bin/sbt: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set +e 4 | declare -a residual_args 5 | declare -a java_args 6 | declare -a scalac_args 7 | declare -a sbt_commands 8 | declare -a sbt_options 9 | declare -a print_sbt_version 10 | declare java_cmd=java 11 | declare java_version 12 | declare init_sbt_version=_to_be_replaced 13 | declare sbt_default_mem=1024 14 | declare -r default_sbt_opts="" 15 | declare -r default_java_opts="-Dfile.encoding=UTF-8" 16 | 17 | ### ------------------------------- ### 18 | ### Helper methods for BASH scripts ### 19 | ### ------------------------------- ### 20 | 21 | # Bash reimplementation of realpath to return the absolute path 22 | realpathish () { 23 | ( 24 | TARGET_FILE="$1" 25 | FIX_CYGPATH="$2" 26 | 27 | cd "$(dirname "$TARGET_FILE")" 28 | TARGET_FILE=$(basename "$TARGET_FILE") 29 | 30 | COUNT=0 31 | while [ -L "$TARGET_FILE" -a $COUNT -lt 100 ] 32 | do 33 | TARGET_FILE=$(readlink "$TARGET_FILE") 34 | cd "$(dirname "$TARGET_FILE")" 35 | TARGET_FILE=$(basename "$TARGET_FILE") 36 | COUNT=$(($COUNT + 1)) 37 | done 38 | 39 | # make sure we grab the actual windows path, instead of cygwin's path. 40 | if [[ "x$FIX_CYGPATH" != "x" ]]; then 41 | echo "$(cygwinpath "$(pwd -P)/$TARGET_FILE")" 42 | else 43 | echo "$(pwd -P)/$TARGET_FILE" 44 | fi 45 | ) 46 | } 47 | 48 | # Uses uname to detect if we're in the odd cygwin environment. 49 | is_cygwin() { 50 | local os=$(uname -s) 51 | case "$os" in 52 | CYGWIN*) return 0 ;; 53 | MINGW*) return 0 ;; 54 | MSYS*) return 0 ;; 55 | *) return 1 ;; 56 | esac 57 | } 58 | 59 | # TODO - Use nicer bash-isms here. 60 | CYGWIN_FLAG=$(if is_cygwin; then echo true; else echo false; fi) 61 | 62 | # This can fix cygwin style /cygdrive paths so we get the 63 | # windows style paths. 64 | cygwinpath() { 65 | local file="$1" 66 | if [[ "$CYGWIN_FLAG" == "true" ]]; then #" 67 | echo $(cygpath -w $file) 68 | else 69 | echo $file 70 | fi 71 | } 72 | 73 | 74 | declare -r sbt_bin_dir="$(dirname "$(realpathish "$0")")" 75 | declare -r sbt_home="$(dirname "$sbt_bin_dir")" 76 | 77 | echoerr () { 78 | echo 1>&2 "$@" 79 | } 80 | vlog () { 81 | [[ $verbose || $debug ]] && echoerr "$@" 82 | } 83 | dlog () { 84 | [[ $debug ]] && echoerr "$@" 85 | } 86 | 87 | jar_file () { 88 | echo "$(cygwinpath "${sbt_home}/bin/sbt-launch.jar")" 89 | } 90 | 91 | acquire_sbt_jar () { 92 | sbt_jar="$(jar_file)" 93 | 94 | if [[ ! -f "$sbt_jar" ]]; then 95 | echoerr "Could not find launcher jar: $sbt_jar" 96 | exit 2 97 | fi 98 | } 99 | 100 | rt_export_file () { 101 | echo "${sbt_bin_dir}/java9-rt-export.jar" 102 | } 103 | 104 | execRunner () { 105 | # print the arguments one to a line, quoting any containing spaces 106 | [[ $verbose || $debug ]] && echo "# Executing command line:" && { 107 | for arg; do 108 | if printf "%s\n" "$arg" | grep -q ' '; then 109 | printf "\"%s\"\n" "$arg" 110 | else 111 | printf "%s\n" "$arg" 112 | fi 113 | done 114 | echo "" 115 | } 116 | 117 | # This used to be exec, but we loose the ability to re-hook stty then 118 | # for cygwin... Maybe we should flag the feature here... 119 | "$@" 120 | } 121 | 122 | addJava () { 123 | dlog "[addJava] arg = '$1'" 124 | java_args=( "${java_args[@]}" "$1" ) 125 | } 126 | addSbt () { 127 | dlog "[addSbt] arg = '$1'" 128 | sbt_commands=( "${sbt_commands[@]}" "$1" ) 129 | } 130 | addResidual () { 131 | dlog "[residual] arg = '$1'" 132 | residual_args=( "${residual_args[@]}" "$1" ) 133 | } 134 | addDebugger () { 135 | addJava "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=$1" 136 | } 137 | 138 | addMemory () { 139 | dlog "[addMemory] arg = '$1'" 140 | # evict memory related options 141 | local xs=("${java_args[@]}") 142 | java_args=() 143 | for i in "${xs[@]}"; do 144 | if ! [[ "${i}" == *-Xmx* ]] && ! [[ "${i}" == *-Xms* ]] && ! [[ "${i}" == *-XX:MaxPermSize* ]] && ! [[ "${i}" == *-XX:MaxMetaspaceSize* ]] && ! [[ "${i}" == *-XX:ReservedCodeCacheSize* ]]; then 145 | java_args+=("${i}") 146 | fi 147 | done 148 | local ys=("${sbt_options[@]}") 149 | sbt_options=() 150 | for i in "${ys[@]}"; do 151 | if ! [[ "${i}" == *-Xmx* ]] && ! [[ "${i}" == *-Xms* ]] && ! [[ "${i}" == *-XX:MaxPermSize* ]] && ! [[ "${i}" == *-XX:MaxMetaspaceSize* ]] && ! [[ "${i}" == *-XX:ReservedCodeCacheSize* ]]; then 152 | sbt_options+=("${i}") 153 | fi 154 | done 155 | # a ham-fisted attempt to move some memory settings in concert 156 | local mem=$1 157 | local codecache=$(( $mem / 8 )) 158 | (( $codecache > 128 )) || codecache=128 159 | (( $codecache < 512 )) || codecache=512 160 | local class_metadata_size=$(( $codecache * 2 )) 161 | if [[ -z $java_version ]]; then 162 | java_version=$(jdk_version) 163 | fi 164 | local class_metadata_opt="MaxPermSize" 165 | 166 | addJava "-Xms${mem}m" 167 | addJava "-Xmx${mem}m" 168 | addJava "-Xss4M" 169 | addJava "-XX:ReservedCodeCacheSize=${codecache}m" 170 | if [[ (( $java_version < 8 )) ]]; then 171 | addJava "-XX:${class_metadata_opt}=${class_metadata_size}m" 172 | fi 173 | } 174 | 175 | addDefaultMemory() { 176 | # if we detect any of these settings in ${JAVA_OPTS} or ${JAVA_TOOL_OPTIONS} we need to NOT output our settings. 177 | # The reason is the Xms/Xmx, if they don't line up, cause errors. 178 | if [[ "${java_args[@]}" == *-Xmx* ]] || [[ "${java_args[@]}" == *-Xms* ]]; then 179 | : 180 | elif [[ "${JAVA_TOOL_OPTIONS}" == *-Xmx* ]] || [[ "${JAVA_TOOL_OPTIONS}" == *-Xms* ]]; then 181 | : 182 | elif [[ "${sbt_options[@]}" == *-Xmx* ]] || [[ "${sbt_options[@]}" == *-Xms* ]]; then 183 | : 184 | else 185 | addMemory $sbt_default_mem 186 | fi 187 | } 188 | 189 | get_gc_opts () { 190 | local older_than_9=$(( $java_version < 9 )) 191 | 192 | if [[ "$older_than_9" == "1" ]]; then 193 | # don't need to worry about gc 194 | echo "" 195 | elif [[ "${JAVA_OPTS}" =~ Use.*GC ]] || [[ "${JAVA_TOOL_OPTIONS}" =~ Use.*GC ]] || [[ "${SBT_OPTS}" =~ Use.*GC ]] ; then 196 | # GC arg has been passed in - don't change 197 | echo "" 198 | else 199 | # Java 9+ so revert to old 200 | echo "-XX:+UseParallelGC" 201 | fi 202 | } 203 | 204 | require_arg () { 205 | local type="$1" 206 | local opt="$2" 207 | local arg="$3" 208 | if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then 209 | echo "$opt requires <$type> argument" 210 | exit 1 211 | fi 212 | } 213 | 214 | is_function_defined() { 215 | declare -f "$1" > /dev/null 216 | } 217 | 218 | # parses JDK version from the -version output line. 219 | # 8 for 1.8.0_nn, 9 for 9-ea etc, and "no_java" for undetected 220 | jdk_version() { 221 | local result 222 | local lines=$("$java_cmd" -Xms32M -Xmx32M -version 2>&1 | tr '\r' '\n') 223 | local IFS=$'\n' 224 | for line in $lines; do 225 | if [[ (-z $result) && ($line = *"version \""*) ]] 226 | then 227 | local ver=$(echo $line | sed -e 's/.*version "\(.*\)"\(.*\)/\1/; 1q') 228 | # on macOS sed doesn't support '?' 229 | if [[ $ver = "1."* ]] 230 | then 231 | result=$(echo $ver | sed -e 's/1\.\([0-9]*\)\(.*\)/\1/; 1q') 232 | else 233 | result=$(echo $ver | sed -e 's/\([0-9]*\)\(.*\)/\1/; 1q') 234 | fi 235 | fi 236 | done 237 | if [[ -z $result ]] 238 | then 239 | result=no_java 240 | fi 241 | echo "$result" 242 | } 243 | 244 | # Extracts the preloaded directory from either -Dsbt.preloaded or -Dsbt.global.base 245 | # properties by looking at: 246 | # - _JAVA_OPTIONS environment variable, 247 | # - SBT_OPTS environment variable, 248 | # - JAVA_OPTS environment variable and 249 | # - properties set by command-line options 250 | # in that order. The last one will be chosen such that `sbt.preloaded` is 251 | # always preferred over `sbt.global.base`. 252 | getPreloaded() { 253 | local -a _java_options_array 254 | local -a sbt_opts_array 255 | local -a java_opts_array 256 | read -a _java_options_array <<< "$_JAVA_OPTIONS" 257 | read -a sbt_opts_array <<< "$SBT_OPTS" 258 | read -a java_opts_array <<< "$JAVA_OPTS" 259 | 260 | local args_to_check=( 261 | "${_java_options_array[@]}" 262 | "${sbt_opts_array[@]}" 263 | "${java_opts_array[@]}" 264 | "${java_args[@]}") 265 | local via_global_base="$HOME/.sbt/preloaded" 266 | local via_explicit="" 267 | 268 | for opt in "${args_to_check[@]}"; do 269 | if [[ "$opt" == -Dsbt.preloaded=* ]]; then 270 | via_explicit="${opt#-Dsbt.preloaded=}" 271 | elif [[ "$opt" == -Dsbt.global.base=* ]]; then 272 | via_global_base="${opt#-Dsbt.global.base=}/preloaded" 273 | fi 274 | done 275 | 276 | echo "${via_explicit:-${via_global_base}}" 277 | } 278 | 279 | syncPreloaded() { 280 | local source_preloaded="$sbt_home/lib/local-preloaded/" 281 | local target_preloaded="$(getPreloaded)" 282 | if [[ "$init_sbt_version" == "" ]]; then 283 | # FIXME: better $init_sbt_version detection 284 | init_sbt_version="$(ls -1 "$source_preloaded/org/scala-sbt/sbt/")" 285 | fi 286 | [[ -f "$target_preloaded/org.scala-sbt/sbt/$init_sbt_version/jars/sbt.jar" ]] || { 287 | # lib/local-preloaded exists (This is optional) 288 | [[ -d "$source_preloaded" ]] && { 289 | command -v rsync >/dev/null 2>&1 && { 290 | mkdir -p "$target_preloaded" 291 | rsync -a --ignore-existing "$source_preloaded" "$target_preloaded" 292 | } 293 | } 294 | } 295 | } 296 | 297 | # Detect that we have java installed. 298 | checkJava() { 299 | local required_version="$1" 300 | # Now check to see if it's a good enough version 301 | local good_enough="$(expr $java_version ">=" $required_version)" 302 | if [[ "$java_version" == "" ]]; then 303 | echo 304 | echo "No Java Development Kit (JDK) installation was detected." 305 | echo Please go to http://www.oracle.com/technetwork/java/javase/downloads/ and download. 306 | echo 307 | exit 1 308 | elif [[ "$good_enough" != "1" ]]; then 309 | echo 310 | echo "The Java Development Kit (JDK) installation you have is not up to date." 311 | echo $script_name requires at least version $required_version+, you have 312 | echo version $java_version 313 | echo 314 | echo Please go to http://www.oracle.com/technetwork/java/javase/downloads/ and download 315 | echo a valid JDK and install before running $script_name. 316 | echo 317 | exit 1 318 | fi 319 | } 320 | 321 | copyRt() { 322 | local at_least_9="$(expr $java_version ">=" 9)" 323 | if [[ "$at_least_9" == "1" ]]; then 324 | rtexport=$(rt_export_file) 325 | # The grep for java9-rt-ext- matches the filename prefix printed in Export.java 326 | java9_ext=$("$java_cmd" ${sbt_options[@]} ${java_args[@]} \ 327 | -jar "$rtexport" --rt-ext-dir | grep java9-rt-ext-) 328 | java9_rt=$(echo "$java9_ext/rt.jar") 329 | vlog "[copyRt] java9_rt = '$java9_rt'" 330 | if [[ ! -f "$java9_rt" ]]; then 331 | echo Copying runtime jar. 332 | mkdir -p "$java9_ext" 333 | execRunner "$java_cmd" \ 334 | ${sbt_options[@]} \ 335 | ${java_args[@]} \ 336 | -jar "$rtexport" \ 337 | "${java9_rt}" 338 | fi 339 | addJava "-Dscala.ext.dirs=${java9_ext}" 340 | fi 341 | } 342 | 343 | run() { 344 | local retarr=() 345 | java_args=($JAVA_OPTS) 346 | sbt_options0=(${SBT_OPTS:-$default_sbt_opts}) 347 | miniscript=$(map_args "${sbt_options0[@]}") && eval ${miniscript/retarr/sbt_options} 348 | miniscript=$(map_args "$@") && eval ${miniscript/retarr/args1} 349 | # process the combined args, then reset "$@" to the residuals 350 | process_args "${args1[@]}" 351 | vlog "[sbt_options] $(declare -p sbt_options)" 352 | addDefaultMemory 353 | set -- "${residual_args[@]}" 354 | argumentCount=$# 355 | 356 | # Copy preloaded repo to user's preloaded directory 357 | syncPreloaded 358 | 359 | # no jar? download it. 360 | [[ -f "$sbt_jar" ]] || acquire_sbt_jar "$sbt_version" || { 361 | # still no jar? uh-oh. 362 | echo "Download failed. Obtain the sbt-launch.jar manually and place it at $sbt_jar" 363 | exit 1 364 | } 365 | 366 | # TODO - java check should be configurable... 367 | checkJava "6" 368 | 369 | # Java 9 support 370 | copyRt 371 | 372 | #If we're in cygwin, we should use the windows config, and terminal hacks 373 | if [[ "$CYGWIN_FLAG" == "true" ]]; then #" 374 | stty -icanon min 1 -echo > /dev/null 2>&1 375 | addJava "-Djline.terminal=jline.UnixTerminal" 376 | addJava "-Dsbt.cygwin=true" 377 | fi 378 | 379 | if [[ $print_sbt_version ]]; then 380 | # print sbtVersion 381 | execRunner "$java_cmd" -jar "$sbt_jar" "sbtVersion" | tail -1 | sed -e 's/\[info\]/sbt version in this project:/g' 382 | else 383 | # run sbt 384 | execRunner "$java_cmd" \ 385 | $(get_gc_opts) \ 386 | ${java_args[@]} \ 387 | ${sbt_options[@]} \ 388 | -jar "$sbt_jar" \ 389 | "${sbt_commands[@]}" \ 390 | "${residual_args[@]}" 391 | fi 392 | 393 | exit_code=$? 394 | 395 | # Clean up the terminal from cygwin hacks. 396 | if [[ "$CYGWIN_FLAG" == "true" ]]; then #" 397 | stty icanon echo > /dev/null 2>&1 398 | fi 399 | exit $exit_code 400 | } 401 | 402 | declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy" 403 | declare -r sbt_opts_file=".sbtopts" 404 | declare -r etc_sbt_opts_file="/etc/sbt/sbtopts" 405 | declare -r dist_sbt_opts_file="${sbt_home}/conf/sbtopts" 406 | declare -r win_sbt_opts_file="${sbt_home}/conf/sbtconfig.txt" 407 | 408 | usage() { 409 | cat < path to global settings/plugins directory (default: ~/.sbt) 425 | --sbt-boot path to shared boot directory (default: ~/.sbt/boot in 0.11 series) 426 | --ivy path to local Ivy repository (default: ~/.ivy2) 427 | --mem set memory options (default: $sbt_default_mem) 428 | --no-share use all local caches; no sharing 429 | --no-global uses global caches, but does not use global ~/.sbt directory. 430 | --jvm-debug Turn on JVM debugging, open at the given port. 431 | --batch disable interactive mode 432 | 433 | # sbt version (default: from project/build.properties if present, else latest release) 434 | --sbt-version use the specified version of sbt 435 | --sbt-jar use the specified jar as the sbt launcher 436 | --sbt-rc use an RC version of sbt 437 | --sbt-snapshot use a snapshot version of sbt 438 | 439 | # java version (default: java from PATH, currently $(java -version 2>&1 | grep version)) 440 | --java-home alternate JAVA_HOME 441 | 442 | # jvm options and output control 443 | JAVA_OPTS environment variable, if unset uses "$default_java_opts" 444 | .jvmopts if this file exists in the current directory, its contents 445 | are appended to JAVA_OPTS 446 | SBT_OPTS environment variable, if unset uses "$default_sbt_opts" 447 | .sbtopts if this file exists in the current directory, its contents 448 | are prepended to the runner args 449 | /etc/sbt/sbtopts if this file exists, it is prepended to the runner args 450 | -Dkey=val pass -Dkey=val directly to the java runtime 451 | -J-X pass option -X directly to the java runtime 452 | (-J is stripped) 453 | -S-X add -X to sbt's scalacOptions (-S is stripped) 454 | 455 | In the case of duplicated or conflicting options, the order above 456 | shows precedence: JAVA_OPTS lowest, command line options highest. 457 | EOM 458 | } 459 | 460 | process_my_args () { 461 | while [[ $# -gt 0 ]]; do 462 | case "$1" in 463 | -batch|--batch) exec 464 | 465 | -sbt-create|--sbt-create) sbt_create=true && shift ;; 466 | 467 | new) sbt_new=true && addResidual "$1" && shift ;; 468 | 469 | *) addResidual "$1" && shift ;; 470 | esac 471 | done 472 | 473 | # Now, ensure sbt version is used. 474 | [[ "${sbt_version}XXX" != "XXX" ]] && addJava "-Dsbt.version=$sbt_version" 475 | 476 | # Confirm a user's intent if the current directory does not look like an sbt 477 | # top-level directory and neither the -sbt-create option nor the "new" 478 | # command was given. 479 | [[ -f ./build.sbt || -d ./project || -n "$sbt_create" || -n "$sbt_new" ]] || { 480 | echo "[warn] Neither build.sbt nor a 'project' directory in the current directory: $(pwd)" 481 | while true; do 482 | echo 'c) continue' 483 | echo 'q) quit' 484 | 485 | read -p '? ' || exit 1 486 | case "$REPLY" in 487 | c|C) break ;; 488 | q|Q) exit 1 ;; 489 | esac 490 | done 491 | } 492 | } 493 | 494 | ## map over argument array. this is used to process both command line arguments and SBT_OPTS 495 | map_args () { 496 | local retarr=() 497 | while [[ $# -gt 0 ]]; do 498 | case "$1" in 499 | -no-colors|--no-colors) retarr=( "${retarr[@]}" "-Dsbt.log.noformat=true" ) && shift ;; 500 | -timings|--timings) retarr=( "${retarr[@]}" "-Dsbt.task.timings=true" "-Dsbt.task.timings.on.shutdown=true" ) && shift ;; 501 | -traces|--traces) retarr=( "${retarr[@]}" "-Dsbt.traces=true" ) && shift ;; 502 | --supershell=*) retarr=( "${retarr[@]}" "-Dsbt.supershell=${1:13}" ) && shift ;; 503 | -supershell=*) retarr=( "${retarr[@]}" "-Dsbt.supershell=${1:12}" ) && shift ;; 504 | --color=*) retarr=( "${retarr[@]}" "-Dsbt.color=${1:8}" ) && shift ;; 505 | -color=*) retarr=( "${retarr[@]}" "-Dsbt.color=${1:7}" ) && shift ;; 506 | -no-share|--no-share) retarr=( "${retarr[@]}" "$noshare_opts" ) && shift ;; 507 | -no-global|--no-global) retarr=( "${retarr[@]}" "-Dsbt.global.base=$(pwd)/project/.sbtboot" ) && shift ;; 508 | -sbt-boot|--sbt-boot) require_arg path "$1" "$2" && retarr=( "${retarr[@]}" "-Dsbt.boot.directory=$2" ) && shift 2 ;; 509 | -sbt-dir|--sbt-dir) require_arg path "$1" "$2" && retarr=( "${retarr[@]}" "-Dsbt.global.base=$2" ) && shift 2 ;; 510 | -debug-inc|--debug-inc) retarr=( "${retarr[@]}" "-Dxsbt.inc.debug=true" ) && shift ;; 511 | *) retarr=( "${retarr[@]}" "$1" ) && shift ;; 512 | esac 513 | done 514 | declare -p retarr 515 | } 516 | 517 | process_args () { 518 | while [[ $# -gt 0 ]]; do 519 | case "$1" in 520 | -h|-help) usage; exit 1 ;; 521 | -v|-verbose) verbose=1 && shift ;; 522 | -V|-version) print_sbt_version=1 && shift ;; 523 | -d|-debug) debug=1 && addSbt "-debug" && shift ;; 524 | 525 | -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; 526 | -mem) require_arg integer "$1" "$2" && addMemory "$2" && shift 2 ;; 527 | -jvm-debug) require_arg port "$1" "$2" && addDebugger $2 && shift 2 ;; 528 | -batch) exec &2 "$@" 20 | } 21 | vlog () { 22 | [[ $verbose || $debug ]] && echoerr "$@" 23 | } 24 | dlog () { 25 | [[ $debug ]] && echoerr "$@" 26 | } 27 | 28 | jar_file () { 29 | echo "$(cygwinpath "${sbt_home}/bin/sbt-launch.jar")" 30 | } 31 | 32 | acquire_sbt_jar () { 33 | sbt_jar="$(jar_file)" 34 | 35 | if [[ ! -f "$sbt_jar" ]]; then 36 | echoerr "Could not find launcher jar: $sbt_jar" 37 | exit 2 38 | fi 39 | } 40 | 41 | execRunner () { 42 | # print the arguments one to a line, quoting any containing spaces 43 | [[ $verbose || $debug ]] && echo "# Executing command line:" && { 44 | for arg; do 45 | if printf "%s\n" "$arg" | grep -q ' '; then 46 | printf "\"%s\"\n" "$arg" 47 | else 48 | printf "%s\n" "$arg" 49 | fi 50 | done 51 | echo "" 52 | } 53 | 54 | # THis used to be exec, but we loose the ability to re-hook stty then 55 | # for cygwin... Maybe we should flag the feature here... 56 | "$@" 57 | } 58 | 59 | addJava () { 60 | dlog "[addJava] arg = '$1'" 61 | java_args=( "${java_args[@]}" "$1" ) 62 | } 63 | addSbt () { 64 | dlog "[addSbt] arg = '$1'" 65 | sbt_commands=( "${sbt_commands[@]}" "$1" ) 66 | } 67 | addResidual () { 68 | dlog "[residual] arg = '$1'" 69 | residual_args=( "${residual_args[@]}" "$1" ) 70 | } 71 | addDebugger () { 72 | addJava "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=$1" 73 | } 74 | 75 | get_mem_opts () { 76 | # if we detect any of these settings in ${JAVA_OPTS} or ${JAVA_TOOL_OPTIONS} we need to NOT output our settings. 77 | # The reason is the Xms/Xmx, if they don't line up, cause errors. 78 | if [[ "${JAVA_OPTS}" == *-Xmx* ]] || [[ "${JAVA_OPTS}" == *-Xms* ]] || [[ "${JAVA_OPTS}" == *-XX:MaxPermSize* ]] || [[ "${JAVA_OPTS}" == *-XX:MaxMetaspaceSize* ]] || [[ "${JAVA_OPTS}" == *-XX:ReservedCodeCacheSize* ]]; then 79 | echo "" 80 | elif [[ "${JAVA_TOOL_OPTIONS}" == *-Xmx* ]] || [[ "${JAVA_TOOL_OPTIONS}" == *-Xms* ]] || [[ "${JAVA_TOOL_OPTIONS}" == *-XX:MaxPermSize* ]] || [[ "${JAVA_TOOL_OPTIONS}" == *-XX:MaxMetaspaceSize* ]] || [[ "${JAVA_TOOL_OPTIONS}" == *-XX:ReservedCodeCacheSize* ]]; then 81 | echo "" 82 | elif [[ "${SBT_OPTS}" == *-Xmx* ]] || [[ "${SBT_OPTS}" == *-Xms* ]] || [[ "${SBT_OPTS}" == *-XX:MaxPermSize* ]] || [[ "${SBT_OPTS}" == *-XX:MaxMetaspaceSize* ]] || [[ "${SBT_OPTS}" == *-XX:ReservedCodeCacheSize* ]]; then 83 | echo "" 84 | else 85 | # a ham-fisted attempt to move some memory settings in concert 86 | # so they need not be messed around with individually. 87 | local mem=${1:-1024} 88 | local codecache=$(( $mem / 8 )) 89 | (( $codecache > 128 )) || codecache=128 90 | (( $codecache < 512 )) || codecache=512 91 | local class_metadata_size=$(( $codecache * 2 )) 92 | local class_metadata_opt=$([[ "$java_version" < "1.8" ]] && echo "MaxPermSize" || echo "MaxMetaspaceSize") 93 | 94 | local arg_xms=$([[ "${java_args[@]}" == *-Xms* ]] && echo "" || echo "-Xms${mem}m") 95 | local arg_xmx=$([[ "${java_args[@]}" == *-Xmx* ]] && echo "" || echo "-Xmx${mem}m") 96 | local arg_rccs=$([[ "${java_args[@]}" == *-XX:ReservedCodeCacheSize* ]] && echo "" || echo "-XX:ReservedCodeCacheSize=${codecache}m") 97 | local arg_meta=$([[ "${java_args[@]}" == *-XX:${class_metadata_opt}* ]] && echo "" || echo "-XX:${class_metadata_opt}=${class_metadata_size}m") 98 | 99 | echo "${arg_xms} ${arg_xmx} ${arg_rccs} ${arg_meta}" 100 | fi 101 | } 102 | 103 | require_arg () { 104 | local type="$1" 105 | local opt="$2" 106 | local arg="$3" 107 | if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then 108 | echo "$opt requires <$type> argument" 109 | exit 1 110 | fi 111 | } 112 | 113 | is_function_defined() { 114 | declare -f "$1" > /dev/null 115 | } 116 | 117 | process_args () { 118 | while [[ $# -gt 0 ]]; do 119 | case "$1" in 120 | -h|-help) usage; exit 1 ;; 121 | -v|-verbose) verbose=1 && shift ;; 122 | -d|-debug) debug=1 && shift ;; 123 | 124 | -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; 125 | -mem) require_arg integer "$1" "$2" && sbt_mem="$2" && shift 2 ;; 126 | -jvm-debug) require_arg port "$1" "$2" && addDebugger $2 && shift 2 ;; 127 | -batch) exec &1 | awk -F '"' '/version/ {print $2}') 146 | vlog "[process_args] java_version = '$java_version'" 147 | } 148 | 149 | # Detect that we have java installed. 150 | checkJava() { 151 | local required_version="$1" 152 | # Now check to see if it's a good enough version 153 | if [[ "$java_version" == "" ]]; then 154 | echo 155 | echo No java installations was detected. 156 | echo Please go to http://www.java.com/getjava/ and download 157 | echo 158 | exit 1 159 | elif [[ ! "$java_version" > "$required_version" ]]; then 160 | echo 161 | echo The java installation you have is not up to date 162 | echo $script_name requires at least version $required_version+, you have 163 | echo version $java_version 164 | echo 165 | echo Please go to http://www.java.com/getjava/ and download 166 | echo a valid Java Runtime and install before running $script_name. 167 | echo 168 | exit 1 169 | fi 170 | } 171 | 172 | 173 | run() { 174 | # no jar? download it. 175 | [[ -f "$sbt_jar" ]] || acquire_sbt_jar "$sbt_version" || { 176 | # still no jar? uh-oh. 177 | echo "Download failed. Obtain the sbt-launch.jar manually and place it at $sbt_jar" 178 | exit 1 179 | } 180 | 181 | # process the combined args, then reset "$@" to the residuals 182 | process_args "$@" 183 | set -- "${residual_args[@]}" 184 | argumentCount=$# 185 | 186 | # TODO - java check should be configurable... 187 | checkJava "1.6" 188 | 189 | #If we're in cygwin, we should use the windows config, and terminal hacks 190 | if [[ "$CYGWIN_FLAG" == "true" ]]; then 191 | stty -icanon min 1 -echo > /dev/null 2>&1 192 | addJava "-Djline.terminal=jline.UnixTerminal" 193 | addJava "-Dsbt.cygwin=true" 194 | fi 195 | 196 | # run sbt 197 | execRunner "$java_cmd" \ 198 | $(get_mem_opts $sbt_mem) \ 199 | ${JAVA_OPTS} \ 200 | ${SBT_OPTS:-$default_sbt_opts} \ 201 | ${java_args[@]} \ 202 | -jar "$sbt_jar" \ 203 | "${sbt_commands[@]}" \ 204 | "${residual_args[@]}" 205 | 206 | exit_code=$? 207 | 208 | # Clean up the terminal from cygwin hacks. 209 | if [[ "$CYGWIN_FLAG" == "true" ]]; then 210 | stty icanon echo > /dev/null 2>&1 211 | fi 212 | exit $exit_code 213 | } 214 | -------------------------------------------------------------------------------- /sbt-dist/bin/sbt-launch.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akka-fusion/akka-fusion-samples/27c3a2024b7a44896652eba4530e71e94b0ea0dd/sbt-dist/bin/sbt-launch.jar -------------------------------------------------------------------------------- /sbt-dist/bin/sbt.bat: -------------------------------------------------------------------------------- 1 | @REM SBT launcher script 2 | @REM 3 | @REM Environment: 4 | @REM JAVA_HOME - location of a JDK home dir (mandatory) 5 | @REM SBT_OPTS - JVM options (optional) 6 | @REM Configuration: 7 | @REM sbtconfig.txt found in the SBT_HOME. 8 | 9 | @REM ZOMG! We need delayed expansion to build up CFG_OPTS later 10 | @setlocal enabledelayedexpansion 11 | 12 | @echo off 13 | set SBT_HOME=%~dp0 14 | set SBT_ARGS= 15 | 16 | set DEFAULT_JAVA_OPTS=-Dfile.encoding=UTF-8 17 | 18 | rem FIRST we load the config file of extra options. 19 | set FN=%SBT_HOME%\..\conf\sbtconfig.txt 20 | set CFG_OPTS= 21 | FOR /F "tokens=* eol=# usebackq delims=" %%i IN ("%FN%") DO ( 22 | set DO_NOT_REUSE_ME=%%i 23 | rem ZOMG (Part #2) WE use !! here to delay the expansion of 24 | rem CFG_OPTS, otherwise it remains "" for this loop. 25 | set CFG_OPTS=!CFG_OPTS! !DO_NOT_REUSE_ME! 26 | ) 27 | 28 | rem poor man's jenv (which is not available on Windows) 29 | IF DEFINED JAVA_HOMES ( 30 | IF EXIST .java-version FOR /F %%A IN (.java-version) DO ( 31 | SET JAVA_HOME=%JAVA_HOMES%\%%A 32 | SET JDK_HOME=%JAVA_HOMES%\%%A 33 | ) 34 | ) 35 | rem must set PATH or wrong javac is used for java projects 36 | IF DEFINED JAVA_HOME SET "PATH=%JAVA_HOME%\bin;%PATH%" 37 | 38 | rem users can set JAVA_OPTS via .jvmopts (sbt-extras style) 39 | IF EXIST .jvmopts FOR /F %%A IN (.jvmopts) DO ( 40 | SET _jvmopts_line=%%A 41 | IF NOT "!_jvmopts_line:~0,1!"=="#" ( 42 | SET JAVA_OPTS=%%A !JAVA_OPTS! 43 | ) 44 | ) 45 | rem We use the value of the JAVACMD environment variable if defined 46 | set _JAVACMD=%JAVACMD% 47 | 48 | if "%_JAVACMD%"=="" ( 49 | if not "%JAVA_HOME%"=="" ( 50 | if exist "%JAVA_HOME%\bin\java.exe" set "_JAVACMD=%JAVA_HOME%\bin\java.exe" 51 | ) 52 | ) 53 | 54 | if "%_JAVACMD%"=="" set _JAVACMD=java 55 | 56 | rem We use the value of the JAVA_OPTS environment variable if defined, rather than the config. 57 | set _JAVA_OPTS=%JAVA_OPTS% 58 | if "%_JAVA_OPTS%"=="" set _JAVA_OPTS=%CFG_OPTS% 59 | 60 | if "%_JAVA_OPTS%"=="" set _JAVA_OPTS=%DEFAULT_JAVA_OPTS% 61 | 62 | set INIT_SBT_VERSION=1.3.0 63 | 64 | :args_loop 65 | if "%~1" == "" goto args_end 66 | 67 | if "%~1" == "-jvm-debug" set JVM_DEBUG=true 68 | if "%~1" == "--jvm-debug" set JVM_DEBUG=true 69 | 70 | if "%~1" == "-java-home" set SET_JAVA_HOME=true 71 | if "%~1" == "--java-home" set SET_JAVA_HOME=true 72 | 73 | if "%JVM_DEBUG%" == "true" ( 74 | set /a JVM_DEBUG_PORT=5005 2>nul >nul 75 | ) else if "!JVM_DEBUG!" == "true" ( 76 | set /a JVM_DEBUG_PORT=%1 2>nul >nul 77 | if not "%~1" == "!JVM_DEBUG_PORT!" ( 78 | set SBT_ARGS=!SBT_ARGS! %1 79 | ) 80 | ) else if /I "%~1" == "new" ( 81 | set sbt_new=true 82 | set SBT_ARGS=!SBT_ARGS! %1 83 | ) else ( 84 | set SBT_ARGS=!SBT_ARGS! %1 85 | ) 86 | 87 | if "%SET_JAVA_HOME%" == "true" ( 88 | set SET_JAVA_HOME= 89 | if NOT "%~2" == "" ( 90 | if exist "%~2\bin\java.exe" ( 91 | set _JAVACMD="%~2\bin\java.exe" 92 | set JAVA_HOME="%~2" 93 | set JDK_HOME="%~2" 94 | shift 95 | ) else ( 96 | echo Directory "%~2" for JAVA_HOME is not valid 97 | goto error 98 | ) 99 | ) else ( 100 | echo Second argument for --java-home missing 101 | goto error 102 | ) 103 | ) 104 | 105 | shift 106 | goto args_loop 107 | :args_end 108 | 109 | rem Confirm a user's intent if the current directory does not look like an sbt 110 | rem top-level directory and the "new" command was not given. 111 | if not exist build.sbt ( 112 | if not exist project\ ( 113 | if not defined sbt_new ( 114 | echo [warn] Neither build.sbt nor a 'project' directory in the current directory: %CD% 115 | setlocal 116 | :confirm 117 | echo c^) continue 118 | echo q^) quit 119 | 120 | set /P reply=?^ 121 | if /I "!reply!" == "c" ( 122 | goto confirm_end 123 | ) else if /I "!reply!" == "q" ( 124 | exit /B 1 125 | ) 126 | 127 | goto confirm 128 | :confirm_end 129 | endlocal 130 | ) 131 | ) 132 | ) 133 | 134 | call :process 135 | 136 | call :checkjava 137 | 138 | call :copyrt 139 | 140 | if defined JVM_DEBUG_PORT ( 141 | set _JAVA_OPTS=!_JAVA_OPTS! -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=!JVM_DEBUG_PORT! 142 | ) 143 | 144 | call :sync_preloaded 145 | 146 | call :run %SBT_ARGS% 147 | 148 | if ERRORLEVEL 1 goto error 149 | goto end 150 | 151 | :run 152 | 153 | "%_JAVACMD%" %_JAVA_OPTS% %SBT_OPTS% -cp "%SBT_HOME%sbt-launch.jar" xsbt.boot.Boot %* 154 | goto :eof 155 | 156 | :process 157 | rem Parses x out of 1.x; for example 8 out of java version 1.8.0_xx 158 | rem Otherwise, parses the major version; 9 out of java version 9-ea 159 | set JAVA_VERSION=0 160 | for /f "tokens=3" %%g in ('"%_JAVACMD%" -Xms32M -Xmx32M -version 2^>^&1 ^| findstr /i version') do ( 161 | set JAVA_VERSION=%%g 162 | ) 163 | set JAVA_VERSION=%JAVA_VERSION:"=% 164 | for /f "delims=.-_ tokens=1-2" %%v in ("%JAVA_VERSION%") do ( 165 | if /I "%%v" EQU "1" ( 166 | set JAVA_VERSION=%%w 167 | ) else ( 168 | set JAVA_VERSION=%%v 169 | ) 170 | ) 171 | exit /B 0 172 | 173 | :checkjava 174 | set required_version=6 175 | if /I %JAVA_VERSION% GEQ %required_version% ( 176 | exit /B 0 177 | ) 178 | echo. 179 | echo The Java Development Kit (JDK) installation you have is not up to date. 180 | echo sbt requires at least version %required_version%+, you have 181 | echo version %JAVA_VERSION% 182 | echo. 183 | echo Please go to http://www.oracle.com/technetwork/java/javase/downloads/ and download 184 | echo a valid JDK and install before running sbt. 185 | echo. 186 | exit /B 1 187 | 188 | :copyrt 189 | if /I %JAVA_VERSION% GEQ 9 ( 190 | set rtexport=!SBT_HOME!java9-rt-export.jar 191 | 192 | "%_JAVACMD%" %_JAVA_OPTS% %SBT_OPTS% -jar "!rtexport!" --rt-ext-dir > "%TEMP%.\rtext.txt" 193 | set /p java9_ext= < "%TEMP%.\rtext.txt" 194 | set java9_rt=!java9_ext!\rt.jar 195 | 196 | if not exist "!java9_rt!" ( 197 | mkdir "!java9_ext!" 198 | "%_JAVACMD%" %_JAVA_OPTS% %SBT_OPTS% -jar "!rtexport!" "!java9_rt!" 199 | ) 200 | set _JAVA_OPTS=!_JAVA_OPTS! -Dscala.ext.dirs="!java9_ext!" 201 | 202 | rem check to see if a GC has been set in the opts 203 | echo !_JAVA_OPTS! | findstr /r "Use.*GC" >nul 204 | if ERRORLEVEL 1 ( 205 | rem don't have a GC set - revert to old GC 206 | set _JAVA_OPTS=!_JAVA_OPTS! -XX:+UseParallelGC 207 | ) 208 | ) 209 | exit /B 0 210 | 211 | :sync_preloaded 212 | if "%INIT_SBT_VERSION%"=="" ( 213 | rem FIXME: better %INIT_SBT_VERSION% detection 214 | FOR /F "tokens=* USEBACKQ" %%F IN (`dir /b "%SBT_HOME%\..\lib\local-preloaded\org\scala-sbt\sbt" /B`) DO ( 215 | SET INIT_SBT_VERSION=%%F 216 | ) 217 | ) 218 | set PRELOAD_SBT_JAR="%UserProfile%\.sbt\preloaded\org\scala-sbt\sbt\%INIT_SBT_VERSION%\" 219 | if /I %JAVA_VERSION% GEQ 8 ( 220 | where robocopy >nul 2>nul 221 | if %ERRORLEVEL% equ 0 ( 222 | REM echo %PRELOAD_SBT_JAR% 223 | if not exist %PRELOAD_SBT_JAR% ( 224 | if exist "%SBT_HOME%\..\lib\local-preloaded\" ( 225 | echo "about to robocopy" 226 | robocopy "%SBT_HOME%\..\lib\local-preloaded" "%UserProfile%\.sbt\preloaded" /E 227 | ) 228 | ) 229 | ) 230 | ) 231 | exit /B 0 232 | 233 | :error 234 | @endlocal 235 | exit /B 1 236 | 237 | :end 238 | @endlocal 239 | exit /B 0 240 | -------------------------------------------------------------------------------- /sbt-dist/conf/sbtconfig.txt: -------------------------------------------------------------------------------- 1 | # Set the java args to high 2 | 3 | -Xmx512M 4 | 5 | -XX:MaxPermSize=256m 6 | 7 | -XX:ReservedCodeCacheSize=128m 8 | 9 | 10 | 11 | # Set the extra SBT options 12 | 13 | -Dsbt.log.format=true 14 | 15 | -------------------------------------------------------------------------------- /sbt-dist/conf/sbtopts: -------------------------------------------------------------------------------- 1 | # ------------------------------------------------ # 2 | # The SBT Configuration file. # 3 | # ------------------------------------------------ # 4 | 5 | 6 | # Disable ANSI color codes 7 | # 8 | #-no-colors 9 | 10 | # Starts sbt even if the current directory contains no sbt project. 11 | # 12 | -sbt-create 13 | 14 | # Path to global settings/plugins directory (default: ~/.sbt) 15 | # 16 | #-sbt-dir /etc/sbt 17 | 18 | # Path to shared boot directory (default: ~/.sbt/boot in 0.11 series) 19 | # 20 | #-sbt-boot ~/.sbt/boot 21 | 22 | # Path to local Ivy repository (default: ~/.ivy2) 23 | # 24 | #-ivy ~/.ivy2 25 | 26 | # set memory options 27 | # 28 | #-mem 29 | 30 | # Use local caches for projects, no sharing. 31 | # 32 | #-no-share 33 | 34 | # Put SBT in offline mode. 35 | # 36 | #-offline 37 | 38 | # Sets the SBT version to use. 39 | #-sbt-version 0.11.3 40 | 41 | # Scala version (default: latest release) 42 | # 43 | #-scala-home 44 | #-scala-version 45 | 46 | # java version (default: java from PATH, currently $(java -version |& grep version)) 47 | # 48 | #-java-home 49 | 50 | -------------------------------------------------------------------------------- /sbt.bat: -------------------------------------------------------------------------------- 1 | @REM SBT launcher script 2 | 3 | .\sbt-dist\bin\sbt.bat %* 4 | -------------------------------------------------------------------------------- /version.sbt: -------------------------------------------------------------------------------- 1 | version in ThisBuild := "1.0.0-SNAPSHOT" 2 | --------------------------------------------------------------------------------