├── .editorconfig ├── .github └── workflows │ └── ci.yml ├── .gitignore ├── .scalafmt.conf ├── LICENSE ├── README.md ├── admin ├── app │ ├── Application.scala │ └── com │ │ └── lucidchart │ │ └── piezo │ │ ├── GeneratorClassLoader.scala │ │ └── admin │ │ ├── RequestStatCollector.scala │ │ ├── controllers │ │ ├── ApplicationController.scala │ │ ├── ErrorLogging.scala │ │ ├── HealthCheck.scala │ │ ├── JobDataHelper.scala │ │ ├── JobFormHelper.scala │ │ ├── Jobs.scala │ │ ├── TriggerFormHelper.scala │ │ ├── TriggerHelper.scala │ │ └── Triggers.scala │ │ ├── models │ │ ├── ModelComponents.scala │ │ ├── MonitoringTeams.scala │ │ └── TriggerType.scala │ │ ├── utils │ │ ├── CronHelper.scala │ │ ├── JobDetailHelper.scala │ │ └── JobUtils.scala │ │ └── views │ │ ├── FormHelpers.scala │ │ ├── editJob.scala.html │ │ ├── editTrigger.scala.html │ │ ├── errors │ │ ├── error.scala.html │ │ └── notfound.scala.html │ │ ├── helpers │ │ └── fieldConstructor.scala.html │ │ ├── index.scala.html │ │ ├── job.scala.html │ │ ├── jobs.scala.html │ │ ├── jobsLayout.scala.html │ │ ├── main.scala.html │ │ ├── trigger.scala.html │ │ ├── triggers.scala.html │ │ └── triggersLayout.scala.html ├── build.sbt ├── conf │ ├── application.conf │ ├── logger.xml │ ├── quartz.properties │ └── routes ├── public │ ├── bootstrap-3.3.6 │ │ ├── css │ │ │ ├── bootstrap-theme.css │ │ │ ├── bootstrap-theme.css.map │ │ │ ├── bootstrap-theme.min.css │ │ │ ├── bootstrap-theme.min.css.map │ │ │ ├── bootstrap.css │ │ │ ├── bootstrap.css.map │ │ │ ├── bootstrap.min.css │ │ │ └── bootstrap.min.css.map │ │ ├── fonts │ │ │ ├── glyphicons-halflings-regular.eot │ │ │ ├── glyphicons-halflings-regular.svg │ │ │ ├── glyphicons-halflings-regular.ttf │ │ │ ├── glyphicons-halflings-regular.woff │ │ │ └── glyphicons-halflings-regular.woff2 │ │ └── js │ │ │ ├── bootstrap.js │ │ │ ├── bootstrap.min.js │ │ │ └── npm.js │ ├── img │ │ ├── LucidLogo.png │ │ ├── PiezoLogo.png │ │ └── favicon.ico │ ├── js │ │ ├── jobData.js │ │ ├── jquery-2.0.3.js │ │ ├── jquery-2.0.3.min.js │ │ ├── jquery-2.0.3.min.map │ │ ├── jquery.tokeninput.js │ │ ├── triggerMonitoring.js │ │ └── typeAhead.js │ └── stylesheets │ │ └── main.css └── test │ ├── IntegrationSpec.scala │ ├── com │ └── lucidchart │ │ └── piezo │ │ └── admin │ │ ├── controllers │ │ ├── HealthCheckTest.scala │ │ ├── JobsControllerTest.scala │ │ ├── JobsService.scala │ │ ├── TestUtil.scala │ │ └── TriggersService.scala │ │ └── util │ │ ├── CronHelperTest.scala │ │ └── DummyClassGeneratorTest.scala │ └── resources │ └── quartz_test.properties ├── build.sbt ├── documentation └── piezo_project_architecture.png ├── project ├── build.properties └── build.sbt └── worker ├── build.sbt └── src ├── main ├── resources │ ├── blank.xml │ ├── piezo_mysql_0.sql │ ├── piezo_mysql_1.sql │ ├── piezo_mysql_2.sql │ ├── piezo_mysql_3.sql │ ├── piezo_mysql_4.sql │ ├── piezo_mysql_5.sql │ ├── piezo_mysql_6.sql │ ├── piezo_mysql_7.sql │ ├── piezo_mysql_8.sql │ ├── quartz_mysql_0.sql │ ├── run-sql.sh │ └── run_me_first.sql └── scala │ └── com │ └── lucidchart │ └── piezo │ ├── BeanConnectionProvider.scala │ ├── JobHistoryModel.scala │ ├── PiezoConnectionProvider.scala │ ├── TriggerHistoryModel.scala │ ├── TriggerMonitoringModel.scala │ ├── Worker.scala │ ├── WorkerJobListener.scala │ ├── WorkerSchedulerFactory.scala │ ├── WorkerTriggerListener.scala │ ├── jobs │ ├── cleanup │ │ └── JobHistoryCleanup.scala │ ├── exec │ │ └── RunExec.scala │ └── monitoring │ │ └── HeartBeat.scala │ └── util │ ├── DummyClassGenerator.scala │ └── SourceFromString.scala ├── run ├── quartz.properties └── resources │ └── logback.xml └── test ├── resources ├── quartz_test.properties └── quartz_test_mysql.properties └── scala └── com └── lucidchart └── piezo ├── ModelTest.scala └── WorkerTest.scala /.editorconfig: -------------------------------------------------------------------------------- 1 | # top-most EditorConfig file 2 | root = true 3 | 4 | # defaults for all files 5 | [*] 6 | charset = utf-8 7 | indent_style = space 8 | indent_size = 4 9 | end_of_line = lf 10 | trim_trailing_whitespace = true 11 | insert_final_newline = true 12 | 13 | [*.rb] 14 | indent_size = 2 15 | indent_style = space 16 | 17 | [*.php] 18 | insert_final_newline = false 19 | 20 | [*.scala] 21 | indent_style = space 22 | indent_size = 2 23 | 24 | [*.scala.html] 25 | indent_style = space 26 | indent_size = 4 27 | 28 | [*.scala.txt] 29 | indent_style = space 30 | 31 | [Makefile] 32 | indent_style = tab 33 | 34 | [*.makefile] 35 | indent_style = tab 36 | 37 | [*.yml] 38 | indent_size = 2 39 | 40 | [makefile] 41 | indent_style = tab 42 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - master 7 | tags: 8 | - "*.*.*" 9 | 10 | jobs: 11 | build: 12 | runs-on: ubuntu-24.04 13 | steps: 14 | - name: Startup MySQL service 15 | run: sudo /etc/init.d/mysql start 16 | - uses: actions/checkout@v4 17 | - uses: actions/setup-java@v4 18 | with: 19 | java-version: '21' 20 | distribution: 'corretto' 21 | - uses: sbt/setup-sbt@v1 22 | - name: Set Env 23 | if: ${{ github.repository == 'lucidsoftware/piezo' }} 24 | run: | 25 | if [[ $GITHUB_REF == refs/tags/* ]]; then 26 | version="${GITHUB_REF#refs/tags/}" 27 | else 28 | version="${GITHUB_REF##*/}-SNAPSHOT" 29 | fi 30 | echo "VERSION=$version" >> $GITHUB_ENV 31 | echo "SBT_OPTS=-Dbuild.version=$version" >> $GITHUB_ENV 32 | - name: Test 33 | run: sbt compile test scalafmtCheck doc Debian/packageBin 34 | - name: Publish to Sonatype 35 | if: ${{ github.repository == 'lucidsoftware/piezo' && github.event_name != 'pull_request' }} 36 | env: 37 | PGP_SECRET: ${{ secrets.PGP_SECRET }} 38 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} 39 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} 40 | run: | 41 | echo "$PGP_SECRET" | base64 --decode | gpg --import 42 | if [[ $GITHUB_REF == refs/tags/* ]]; then 43 | sbt '; publishSigned; sonatypeBundleRelease' 44 | else 45 | sbt publishSigned 46 | fi 47 | # TODO: publish deb somewhere? 48 | - name: Upload assets to Github 49 | if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }} 50 | env: 51 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 52 | run: | 53 | set -x 54 | sbt Universal/packageBin 55 | gh release create -t "Release $VERSION" --generate-notes $VERSION {admin,worker}/target/**/*.jar admin/target/*.deb admin/target/universal/piezo-admin*.zip 56 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | *.log 3 | 4 | # Created when running admin service 5 | HeartbeatTestFile 6 | 7 | # sbt specific 8 | dist/* 9 | target/ 10 | target-*/ 11 | lib_managed/ 12 | src_managed/ 13 | project/boot/ 14 | project/plugins/project/ 15 | project/project/ 16 | project/metals.sbt 17 | 18 | # Scala-IDE specific 19 | .scala_dependencies 20 | .idea 21 | .idea_modules 22 | .bsp 23 | .metals 24 | .bloop 25 | .vscode 26 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = "3.8.2" 2 | runner.dialect = scala3 3 | align.preset = none 4 | align.openParenCallSite = false 5 | align.openParenDefnSite = false 6 | indent.defnSite = 2 7 | danglingParentheses.preset = true 8 | docstrings.style = Asterisk 9 | importSelectors = singleLine 10 | maxColumn = 120 11 | newlines.implicitParamListModifierPrefer = after 12 | rewrite.redundantBraces.stringInterpolation = true 13 | rewrite.rules = [ 14 | # AvoidInfix, 15 | # RedundantBraces, 16 | RedundantParens, 17 | PreferCurlyFors, 18 | SortImports 19 | ] 20 | runner.fatalWarnings = true 21 | trailingCommas = always 22 | newlines.afterCurlyLambdaParams = keep 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, and 10 | distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by the copyright 13 | owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all other entities 16 | that control, are controlled by, or are under common control with that entity. 17 | For the purposes of this definition, "control" means (i) the power, direct or 18 | indirect, to cause the direction or management of such entity, whether by 19 | contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the 20 | outstanding shares, or (iii) beneficial ownership of such entity. 21 | 22 | "You" (or "Your") shall mean an individual or Legal Entity exercising 23 | permissions granted by this License. 24 | 25 | "Source" form shall mean the preferred form for making modifications, including 26 | but not limited to software source code, documentation source, and configuration 27 | files. 28 | 29 | "Object" form shall mean any form resulting from mechanical transformation or 30 | translation of a Source form, including but not limited to compiled object code, 31 | generated documentation, and conversions to other media types. 32 | 33 | "Work" shall mean the work of authorship, whether in Source or Object form, made 34 | available under the License, as indicated by a copyright notice that is included 35 | in or attached to the work (an example is provided in the Appendix below). 36 | 37 | "Derivative Works" shall mean any work, whether in Source or Object form, that 38 | is based on (or derived from) the Work and for which the editorial revisions, 39 | annotations, elaborations, or other modifications represent, as a whole, an 40 | original work of authorship. For the purposes of this License, Derivative Works 41 | shall not include works that remain separable from, or merely link (or bind by 42 | name) to the interfaces of, the Work and Derivative Works thereof. 43 | 44 | "Contribution" shall mean any work of authorship, including the original version 45 | of the Work and any modifications or additions to that Work or Derivative Works 46 | thereof, that is intentionally submitted to Licensor for inclusion in the Work 47 | by the copyright owner or by an individual or Legal Entity authorized to submit 48 | on behalf of the copyright owner. For the purposes of this definition, 49 | "submitted" means any form of electronic, verbal, or written communication sent 50 | to the Licensor or its representatives, including but not limited to 51 | communication on electronic mailing lists, source code control systems, and 52 | issue tracking systems that are managed by, or on behalf of, the Licensor for 53 | the purpose of discussing and improving the Work, but excluding communication 54 | that is conspicuously marked or otherwise designated in writing by the copyright 55 | owner as "Not a Contribution." 56 | 57 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf 58 | of whom a Contribution has been received by Licensor and subsequently 59 | incorporated within the Work. 60 | 61 | 2. Grant of Copyright License. 62 | 63 | Subject to the terms and conditions of this License, each Contributor hereby 64 | grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, 65 | irrevocable copyright license to reproduce, prepare Derivative Works of, 66 | publicly display, publicly perform, sublicense, and distribute the Work and such 67 | Derivative Works in Source or Object form. 68 | 69 | 3. Grant of Patent License. 70 | 71 | Subject to the terms and conditions of this License, each Contributor hereby 72 | grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, 73 | irrevocable (except as stated in this section) patent license to make, have 74 | made, use, offer to sell, sell, import, and otherwise transfer the Work, where 75 | such license applies only to those patent claims licensable by such Contributor 76 | that are necessarily infringed by their Contribution(s) alone or by combination 77 | of their Contribution(s) with the Work to which such Contribution(s) was 78 | submitted. If You institute patent litigation against any entity (including a 79 | cross-claim or counterclaim in a lawsuit) alleging that the Work or a 80 | Contribution incorporated within the Work constitutes direct or contributory 81 | patent infringement, then any patent licenses granted to You under this License 82 | for that Work shall terminate as of the date such litigation is filed. 83 | 84 | 4. Redistribution. 85 | 86 | You may reproduce and distribute copies of the Work or Derivative Works thereof 87 | in any medium, with or without modifications, and in Source or Object form, 88 | provided that You meet the following conditions: 89 | 90 | You must give any other recipients of the Work or Derivative Works a copy of 91 | this License; and 92 | You must cause any modified files to carry prominent notices stating that You 93 | changed the files; and 94 | You must retain, in the Source form of any Derivative Works that You distribute, 95 | all copyright, patent, trademark, and attribution notices from the Source form 96 | of the Work, excluding those notices that do not pertain to any part of the 97 | Derivative Works; and 98 | If the Work includes a "NOTICE" text file as part of its distribution, then any 99 | Derivative Works that You distribute must include a readable copy of the 100 | attribution notices contained within such NOTICE file, excluding those notices 101 | that do not pertain to any part of the Derivative Works, in at least one of the 102 | following places: within a NOTICE text file distributed as part of the 103 | Derivative Works; within the Source form or documentation, if provided along 104 | with the Derivative Works; or, within a display generated by the Derivative 105 | Works, if and wherever such third-party notices normally appear. The contents of 106 | the NOTICE file are for informational purposes only and do not modify the 107 | License. You may add Your own attribution notices within Derivative Works that 108 | You distribute, alongside or as an addendum to the NOTICE text from the Work, 109 | provided that such additional attribution notices cannot be construed as 110 | modifying the License. 111 | You may add Your own copyright statement to Your modifications and may provide 112 | additional or different license terms and conditions for use, reproduction, or 113 | distribution of Your modifications, or for any such Derivative Works as a whole, 114 | provided Your use, reproduction, and distribution of the Work otherwise complies 115 | with the conditions stated in this License. 116 | 117 | 5. Submission of Contributions. 118 | 119 | Unless You explicitly state otherwise, any Contribution intentionally submitted 120 | for inclusion in the Work by You to the Licensor shall be under the terms and 121 | conditions of this License, without any additional terms or conditions. 122 | Notwithstanding the above, nothing herein shall supersede or modify the terms of 123 | any separate license agreement you may have executed with Licensor regarding 124 | such Contributions. 125 | 126 | 6. Trademarks. 127 | 128 | This License does not grant permission to use the trade names, trademarks, 129 | service marks, or product names of the Licensor, except as required for 130 | reasonable and customary use in describing the origin of the Work and 131 | reproducing the content of the NOTICE file. 132 | 133 | 7. Disclaimer of Warranty. 134 | 135 | Unless required by applicable law or agreed to in writing, Licensor provides the 136 | Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, 137 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, 138 | including, without limitation, any warranties or conditions of TITLE, 139 | NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are 140 | solely responsible for determining the appropriateness of using or 141 | redistributing the Work and assume any risks associated with Your exercise of 142 | permissions under this License. 143 | 144 | 8. Limitation of Liability. 145 | 146 | In no event and under no legal theory, whether in tort (including negligence), 147 | contract, or otherwise, unless required by applicable law (such as deliberate 148 | and grossly negligent acts) or agreed to in writing, shall any Contributor be 149 | liable to You for damages, including any direct, indirect, special, incidental, 150 | or consequential damages of any character arising as a result of this License or 151 | out of the use or inability to use the Work (including but not limited to 152 | damages for loss of goodwill, work stoppage, computer failure or malfunction, or 153 | any and all other commercial damages or losses), even if such Contributor has 154 | been advised of the possibility of such damages. 155 | 156 | 9. Accepting Warranty or Additional Liability. 157 | 158 | While redistributing the Work or Derivative Works thereof, You may choose to 159 | offer, and charge a fee for, acceptance of support, warranty, indemnity, or 160 | other liability obligations and/or rights consistent with this License. However, 161 | in accepting such obligations, You may act only on Your own behalf and on Your 162 | sole responsibility, not on behalf of any other Contributor, and only if You 163 | agree to indemnify, defend, and hold each Contributor harmless for any liability 164 | incurred by, or claims asserted against, such Contributor by reason of your 165 | accepting any such warranty or additional liability. 166 | 167 | END OF TERMS AND CONDITIONS 168 | 169 | APPENDIX: How to apply the Apache License to your work 170 | 171 | To apply the Apache License to your work, attach the following boilerplate 172 | notice, with the fields enclosed by brackets "[]" replaced with your own 173 | identifying information. (Don't include the brackets!) The text should be 174 | enclosed in the appropriate comment syntax for the file format. We also 175 | recommend that a file or class name and description of purpose be included on 176 | the same "printed page" as the copyright notice for easier identification within 177 | third-party archives. 178 | 179 | Copyright [yyyy] [name of copyright owner] 180 | 181 | Licensed under the Apache License, Version 2.0 (the "License"); 182 | you may not use this file except in compliance with the License. 183 | You may obtain a copy of the License at 184 | 185 | http://www.apache.org/licenses/LICENSE-2.0 186 | 187 | Unless required by applicable law or agreed to in writing, software 188 | distributed under the License is distributed on an "AS IS" BASIS, 189 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 190 | See the License for the specific language governing permissions and 191 | limitations under the License. 192 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Piezo 2 | ===== 3 | 4 | Piezo is a system for operating and managing a [Quartz Scheduler](http://quartz-scheduler.org/documentation/quartz-2.2.x/quick-start) cluster. The first component is the Worker, which is a driver or main class for running a Quartz instance. The second is the Admin, which is a web interface for administrating a Quartz cluster, including managing which jobs run, and viewing a history of what processing the cluster has completed. The third project in the diagram below is your library containing the actual jobs to run. 5 | 6 | ![Piezo project architecture](documentation/piezo_project_architecture.png "Project Architecture") 7 | 8 | ## Worker 9 | Worker is a process that runs a [Quartz Scheduler](http://quartz-scheduler.org/documentation/quartz-2.2.x/quick-start) instance. 10 | 11 | Worker provides a java main() function for running a quartz scheduler as a daemon. It writes a PID file for help with start and stop (e.g. init.d) scripts. It handles the runtime shutdown event and graceful exits when it receives a shutdown signal (`ctrl-c/SIGINT`). 12 | 13 | Worker also expands the set of tables that quartz uses with additional tables to track historical job execution data. 14 | 15 | ### Setup 16 | 17 | #### Fast way 18 | 19 | 1. Run the [sql setup script](worker/src/main/resources/run-sql.sh) 20 | 2. Modify the included [sample quartz.properties](/worker/src/main/resources/quartz.properties) to point to your database (see [Quartz scheduler library config file](http://quartz-scheduler.org/documentation/quartz-2.2.x/configuration/)). 21 | 3. Run Piezo as specified in [Running](#running). 22 | 23 | #### Full setup 24 | 1. Create a database. Piezo includes a [sample database creation script](worker/src/main/resources/run_me_first.sql) 25 | 2. Create the standard [job store](http://quartz-scheduler.org/documentation/quartz-2.2.x/tutorials/tutorial-lesson-09) using ONE of the following methods: 26 | 1. Use the sample scripts included in [worker/src/main/resources](worker/src/main/resources) that start with quartz (easiest method). 27 | 2. See the [quartz job store documentation](http://quartz-scheduler.org/documentation/quartz-2.2.x/tutorials/tutorial-lesson-09) for the complete set of options. 28 | 3. From the documentation: 29 | "JDBCJobStore works with nearly any database, it has been used widely with Oracle, PostgreSQL, MySQL, MS SQLServer, HSQLDB, and DB2. To use JDBCJobStore, you must first create a set of database tables for Quartz to use. You can find table-creation SQL scripts in the 'docs/dbTables' directory of the Quartz distribution. If there is not already a script for your database type, just look at one of the existing ones, and modify it in any way necessary for your DB." 30 | 3. Create the Piezo job history tables. Use SQL scripts beginning with "piezo" in [worker/src/main/resources](/worker/src/main/resources). 31 | 4. Modify the included [sample quartz.properties](/worker/src/test/resources/quartz_test.properties) to point to your database (see [Quartz scheduler library config file](http://quartz-scheduler.org/documentation/quartz-2.2.x/configuration/)). 32 | 5. Run Piezo as specified in [Running](#running). 33 | 34 | ### Building 35 | You must have [sbt](http://www.scala-sbt.org/) 0.13. 36 | 37 | `sbt worker/compile` compiles sources. 38 | 39 | `sbt worker/packageBin` creates a JAR. 40 | 41 | ### Configuration 42 | #### JVM properties 43 | * `org.quartz.properties` - [Quartz scheduler library config file](http://quartz-scheduler.org/documentation/quartz-2.2.x/configuration/) 44 | * `pidfile.path` - path to file where PID should be written on startup 45 | 46 | ### Running 47 | 48 | When developing 49 | 50 | ```sh 51 | sbt worker/run 52 | ``` 53 | 54 | This uses the quartz.properties file at the repo root and runs a heatbeat job. 55 | 56 | To use to run jobs add `com.lucidchart:piezo-worker:` as a dependency to your project, and then run the 57 | com.lucidchart.piezo.Worker class. For example, 58 | 59 | ``` 60 | java -Dorg.quartz.properties= -Dpidfile.path= -cp com.lucidchart.piezo.Worker 61 | ``` 62 | 63 | ### Stats 64 | Worker reports statistics to a [StatsD](https://github.com/etsy/statsd/) server if available. 65 | 66 | It also stores historical job execution data in a pair of database tables defined in [create_history_tables.sql](worker/src/main/resources/create_history_tables.sql). These tables should be added to the same datasource as the standard quartz tables. 67 | 68 | ## Admin 69 | 70 | Admin is a web interface for viewing and managing the scheduled jobs. 71 | 72 | ### Setup 73 | 1. Follow the steps for the Worker [Setup](#setup) above. 74 | 75 | ### Building 76 | You must have [sbt](http://www.scala-sbt.org/) 0.13. 77 | 78 | `sbt admin/debian:packageBin` creates a .deb that includes all library dependencies, and installs piezo-admin as an Upstart service running as `piezo-admin`. 79 | 80 | ### Configuration 81 | #### JVM properties 82 | * `org.quartz.properties` - [Quartz scheduler library config file](http://quartz-scheduler.org/documentation/quartz-2.2.x/configuration/) 83 | * `logback.configurationFile` - [Logback config file](http://logback.qos.ch/manual/configuration.html) 84 | * `pidfile.path` - path to file where PID should be written on startup 85 | * `http.port[s]` - [Play Framework production configuration](http://www.playframework.com/documentation/2.1.1/ProductionConfiguration) 86 | 87 | #### org.quartz.properties 88 | 89 | The properties file must have `org.quartz.scheduler.classLoadHelper.class: com.lucidchart.piezo.GeneratorClassLoader`. 90 | 91 | ### Running 92 | 93 | When developing, 94 | 95 | ```sh 96 | sbt admin/run 97 | ``` 98 | 99 | Then go to [http://localhost:8001/](http://localhost:8001/) to view the admin. 100 | 101 | ### Debian install 102 | 103 | Piezo admin can be installed as a service from a .deb (see [Building](#adminBuilding)). 104 | 105 | Starting with version 2.0.0, the deb is available on the release page. (See also [Issue #91](https://github.com/lucidsoftware/piezo/issues/91).) 106 | 107 | You can install by running 108 | 109 | ```sh 110 | sudo dpkg --install piezo-admin*.deb 111 | ``` 112 | 113 | By default, it will use /etc/piezo-admin/quartz.properties. Adjust runtime options using /etc/piezo-admin/application.ini: 114 | 115 | ``` 116 | # -J for for Java options 117 | -J-Xmx1g 118 | # -D for system propertis 119 | -Dorg.quartz.properties=path/to/quartz.properties 120 | # -jvm-debug to enable JVM debugging 121 | -jvm-debug 5005 122 | ``` 123 | 124 | 125 | ### Generic install 126 | 127 | For deploying to non-Debian platforms, you can use the zip file on the releases page. This is a zip of JARs and contains launcher 128 | scripts in the `bin` folder. 129 | 130 | 131 | You can also create this zip yourself by runing `sbt admin/universal:packageBin` which produces the zip at admin/target/universal. 132 | -------------------------------------------------------------------------------- /admin/app/Application.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin 2 | 3 | import com.softwaremill.macwire.* 4 | import play.api.ApplicationLoader.Context 5 | import play.api.* 6 | import play.api.i18n.* 7 | import play.api.http.* 8 | import play.api.mvc.* 9 | import play.api.mvc.Results.* 10 | import play.api.Mode 11 | import play.api.routing.Router 12 | import router.Routes 13 | import scala.concurrent.Future 14 | import com.lucidchart.piezo.admin.models.* 15 | import com.lucidchart.piezo.admin.controllers.* 16 | import com.lucidchart.piezo.WorkerSchedulerFactory 17 | import org.quartz.Scheduler 18 | import _root_.controllers.AssetsComponents 19 | 20 | /** 21 | * Application loader that wires up the application dependencies using Macwire 22 | */ 23 | class PiezoAdminApplicationLoader extends ApplicationLoader { 24 | def load(context: Context): Application = new PiezoAdminComponents(context).application 25 | } 26 | 27 | class PiezoAdminComponents(context: Context) 28 | extends BuiltInComponentsFromContext(context) 29 | with I18nComponents 30 | with AssetsComponents { 31 | 32 | lazy val schedulerFactory: WorkerSchedulerFactory = new WorkerSchedulerFactory() 33 | private lazy val quartzScheduler: Scheduler = schedulerFactory.getScheduler() 34 | lazy val jobFormHelper: JobFormHelper = wire[JobFormHelper] 35 | lazy val monitoringTeams: MonitoringTeams = MonitoringTeams(configuration) 36 | 37 | private lazy val modelComponents: ModelComponents = { 38 | val props = schedulerFactory.props 39 | var source = props.getProperty("com.lucidchart.piezo.dataSource") 40 | if (source == null) { 41 | source = props.getProperty("org.quartz.jobStore.dataSource") 42 | } 43 | ModelComponents.forDataSource(source) 44 | } 45 | 46 | lazy val triggers: Triggers = wire[Triggers] 47 | lazy val jobs: Jobs = wire[Jobs] 48 | lazy val healthCheck: HealthCheck = wire[HealthCheck] 49 | lazy val applicationController: ApplicationController = wire[ApplicationController] 50 | 51 | lazy val jobView: views.html.job = wire[views.html.job] 52 | 53 | override val httpFilters: Seq[EssentialFilter] = Seq(new RequestStatCollector(controllerComponents.executionContext)) 54 | val logger: Logger = Logger("com.lucidchart.piezo.Global") 55 | 56 | override lazy val httpErrorHandler: HttpErrorHandler = 57 | new DefaultHttpErrorHandler(environment, configuration, devContext.map(_.sourceMapper), Some(router)) { 58 | 59 | /** 60 | * Invoked when a handler or resource is not found. 61 | * 62 | * @param request 63 | * The request that no handler was found to handle. 64 | * @param message 65 | * A message. 66 | */ 67 | override protected def onNotFound(request: RequestHeader, message: String): Future[Result] = { 68 | logger.error("Request handler not found for URL: " + request.uri) 69 | Future.successful(NotFound(com.lucidchart.piezo.admin.views.html.errors.notfound(None)(request))) 70 | } 71 | 72 | override def onServerError(request: RequestHeader, exception: Throwable): Future[Result] = { 73 | logger.error("Error handling request for URL: " + request.uri, exception) 74 | if (environment.mode == Mode.Dev) { 75 | super.onServerError(request, exception) 76 | } else { 77 | Future.successful( 78 | InternalServerError( 79 | com.lucidchart.piezo.admin.views.html.errors.error(Option(exception.getMessage))(request), 80 | ), 81 | ) 82 | } 83 | } 84 | } 85 | // set up logger 86 | LoggerConfigurator(context.environment.classLoader).foreach { 87 | _.configure(context.environment, context.initialConfiguration, Map.empty) 88 | } 89 | 90 | lazy val router: Router = wire[Routes] 91 | } 92 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/GeneratorClassLoader.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import org.objectweb.asm.{ClassWriter, Opcodes, Type} 4 | import org.quartz.{Job, JobExecutionContext} 5 | import org.quartz.spi.ClassLoadHelper 6 | import org.slf4j.LoggerFactory 7 | import org.slf4j.Logger 8 | 9 | class DummyJob extends Job { 10 | def execute(context: JobExecutionContext): Unit = { 11 | throw new UnsupportedOperationException() 12 | } 13 | } 14 | 15 | class GeneratorClassLoader extends ClassLoader(classOf[GeneratorClassLoader].getClassLoader) with ClassLoadHelper { 16 | val logger: Logger = LoggerFactory.getLogger(this.getClass) 17 | 18 | private[this] def generate(name: String) = { 19 | val classWriter = new ClassWriter(ClassWriter.COMPUTE_MAXS) 20 | classWriter.visit( 21 | Opcodes.V1_8, 22 | Opcodes.ACC_PUBLIC, 23 | name.replace('.', '/'), 24 | null, 25 | Type.getInternalName(classOf[DummyJob]), 26 | null, 27 | ) 28 | 29 | // Minimal constructor that just calls the super constructor and returns. 30 | val constructorWriter = classWriter.visitMethod(Opcodes.ACC_PUBLIC, "", "()V", null, null) 31 | constructorWriter.visitVarInsn(Opcodes.ALOAD, 0) 32 | constructorWriter.visitMethodInsn( 33 | Opcodes.INVOKESPECIAL, 34 | Type.getInternalName(classOf[Object]), 35 | "", 36 | "()V", 37 | false, 38 | ) 39 | constructorWriter.visitInsn(Opcodes.RETURN) 40 | constructorWriter.visitMaxs(0, 0) 41 | constructorWriter.visitEnd() 42 | 43 | classWriter.visitEnd() 44 | 45 | classWriter.toByteArray 46 | } 47 | 48 | def getClassLoader: GeneratorClassLoader = this 49 | 50 | def loadClass[T](name: String, clazz: Class[T]): Class[? <: T] = loadClass(name).asInstanceOf[Class[? <: T]] 51 | 52 | def initialize() = () 53 | 54 | override def loadClass(name: String): Class[_] = try { 55 | super.loadClass(name) 56 | } catch { 57 | case _: ClassNotFoundException => 58 | logger.info(s"Dynamically generated dummy job for $name") 59 | val bytes = generate(name) 60 | defineClass(name, bytes, 0, bytes.length) 61 | } 62 | 63 | } 64 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/RequestStatCollector.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin 2 | 3 | import play.api.mvc.* 4 | import play.api.Logging 5 | import scala.concurrent.ExecutionContext 6 | 7 | class RequestStatCollector(ec: ExecutionContext) extends EssentialFilter with Logging { 8 | 9 | private def recordStats(request: RequestHeader, start: Long)(result: Result): Result = { 10 | val time = System.currentTimeMillis - start 11 | logger.info(s"${request.method} ${request.uri} took ${time}ms and returned ${result.header.status}") 12 | result 13 | } 14 | 15 | def apply(next: EssentialAction): EssentialAction = EssentialAction { (request: RequestHeader) => 16 | val start = System.currentTimeMillis 17 | next(request).map { value => 18 | recordStats(request, start)(value) 19 | }(ec) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/controllers/ApplicationController.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import play.api.mvc.* 4 | 5 | class ApplicationController(cc: ControllerComponents) extends AbstractController(cc) { 6 | 7 | def index: Action[AnyContent] = Action { implicit request => 8 | Ok(com.lucidchart.piezo.admin.views.html.index()(request)) 9 | } 10 | 11 | } 12 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/controllers/ErrorLogging.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import play.api.Logging 4 | 5 | trait ErrorLogging { self: Logging => 6 | def logExceptions[T](value: => T): T = { 7 | try { 8 | value 9 | } catch { 10 | case t: Throwable => 11 | logger.error("Caught exception initializing class", t) 12 | throw t 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/controllers/HealthCheck.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import org.joda.time.{DateTime, Minutes} 4 | import org.joda.time.format.ISODateTimeFormat 5 | import play.api.* 6 | import play.api.libs.json.* 7 | import play.api.Logging 8 | import play.api.mvc.* 9 | import scala.io.Source 10 | 11 | class HealthCheck(configuration: Configuration, cc: ControllerComponents) extends AbstractController(cc) with Logging { 12 | 13 | val heartbeatFilename: String = configuration.getOptional[String]("com.lucidchart.piezo.heartbeatFile").getOrElse { 14 | logger.warn("heartbeat file not specified") 15 | "" 16 | } 17 | val minutesBetweenBeats: Int = configuration.getOptional[Int]("healthCheck.worker.minutesBetween").getOrElse { 18 | logger.warn("minutes between heartbeats not specified. Defaulting to 5") 19 | 5 20 | } 21 | 22 | def main: Action[AnyContent] = cc.actionBuilder { requests => 23 | val workerHealth = areWorkersHealthy() 24 | val responseBody = Json.toJson(Map("HeartbeatTime" -> Json.toJson(workerHealth._2))) 25 | if (workerHealth._1) { 26 | Ok(responseBody) 27 | } else { 28 | ServiceUnavailable(responseBody) 29 | } 30 | } 31 | 32 | def areWorkersHealthy(): (Boolean, String) = { 33 | val heartbeatFile = Source.fromFile(heartbeatFilename) 34 | try { 35 | val heartbeatFileLines = heartbeatFile.getLines().toList 36 | val heartbeatTimestamp = heartbeatFileLines(0) 37 | val formatter = ISODateTimeFormat.dateTimeNoMillis().withZoneUTC() 38 | val heartbeatTime = formatter.parseDateTime(heartbeatTimestamp) 39 | val currentTime = new DateTime 40 | val isTimestampRecent = Minutes.minutesBetween(heartbeatTime, currentTime).getMinutes < minutesBetweenBeats 41 | (isTimestampRecent, formatter.print(heartbeatTime)) 42 | } finally { 43 | heartbeatFile.close() 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/controllers/JobDataHelper.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import org.quartz.JobDataMap 4 | import play.api.libs.json.* 5 | import play.api.data.Forms.* 6 | import play.api.data.Mapping 7 | 8 | case class DataMap(key: String, value: String) 9 | 10 | object DataMap { 11 | implicit val writes: Writes[DataMap] = Writes { dataMap => 12 | Json.obj( 13 | "key" -> dataMap.key, 14 | "value" -> dataMap.value, 15 | ) 16 | } 17 | } 18 | 19 | trait JobDataHelper { 20 | 21 | private def mapToJobData(dataMap: List[DataMap]): JobDataMap = { 22 | dataMap.foldLeft(new JobDataMap()) { (sofar, next) => 23 | sofar.put(next.key, next.value) 24 | sofar 25 | } 26 | } 27 | 28 | protected def jobDataToMap(jobData: JobDataMap): List[DataMap] = { 29 | jobData.getKeys.foldLeft(List[DataMap]())((sofar, key) => { 30 | sofar :+ DataMap(key, jobData.get(key).toString) 31 | }) 32 | } 33 | 34 | implicit def jobDataMap: Mapping[Option[JobDataMap]] = 35 | optional( 36 | list(mapping("key" -> text, "value" -> text)(DataMap.apply)(data => Some((data.key, data.value)))) 37 | .transform(mapToJobData, jobDataToMap), 38 | ) 39 | 40 | } 41 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/controllers/JobFormHelper.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import org.quartz.* 4 | import play.api.data.Form 5 | import play.api.data.Forms.* 6 | import com.lucidchart.piezo.GeneratorClassLoader 7 | 8 | class JobFormHelper extends JobDataHelper { 9 | def jobFormApply( 10 | name: String, 11 | group: String, 12 | jobClass: String, 13 | description: String, 14 | durable: Boolean, 15 | requestRecovery: Boolean, 16 | jobData: Option[JobDataMap], 17 | ): JobDetail = { 18 | 19 | val classLoader = new GeneratorClassLoader() 20 | classLoader.initialize 21 | val jobClassObject = classLoader.loadClass(jobClass) 22 | 23 | val newJob: JobDetail = JobBuilder 24 | .newJob(jobClassObject.asSubclass(classOf[Job])) 25 | .withIdentity(name, group) 26 | .withDescription(description) 27 | .requestRecovery(requestRecovery) 28 | .storeDurably(durable) 29 | .usingJobData(jobData.getOrElse(new JobDataMap())) 30 | .build() 31 | newJob 32 | } 33 | 34 | def jobFormUnapply(job: JobDetail): Option[(String, String, String, String, Boolean, Boolean, Option[JobDataMap])] = { 35 | val description = if (job.getDescription() == null) "" else job.getDescription() 36 | 37 | Some( 38 | ( 39 | job.getKey.getName(), 40 | job.getKey.getGroup(), 41 | job.getJobClass.toString.replace("class ", ""), 42 | description, 43 | job.isDurable(), 44 | job.requestsRecovery(), 45 | Some(job.getJobDataMap), 46 | ), 47 | ) 48 | } 49 | 50 | def buildJobForm: Form[JobDetail] = Form[JobDetail]( 51 | mapping( 52 | "name" -> nonEmptyText(), 53 | "group" -> nonEmptyText(), 54 | "class" -> nonEmptyText(), 55 | "description" -> text(), 56 | "durable" -> boolean, 57 | "requests-recovery" -> boolean, 58 | "job-data-map" -> jobDataMap, 59 | )(jobFormApply)(jobFormUnapply), 60 | ) 61 | } 62 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/controllers/TriggerFormHelper.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import com.lucidchart.piezo.TriggerMonitoringPriority 4 | import com.lucidchart.piezo.TriggerMonitoringPriority.TriggerMonitoringPriority 5 | import com.lucidchart.piezo.admin.models.MonitoringTeams 6 | import com.lucidchart.piezo.admin.utils.CronHelper 7 | import java.text.ParseException 8 | import org.quartz.* 9 | import play.api.data.{Form, FormError} 10 | import play.api.data.Forms.* 11 | import play.api.data.format.Formats.parsing 12 | import play.api.data.format.Formatter 13 | import play.api.data.validation.{Constraint, Constraints, Invalid, Valid, ValidationError} 14 | import scala.language.existentials 15 | 16 | case class TriggerFormValue( 17 | trigger: Trigger, 18 | priority: TriggerMonitoringPriority, 19 | maxErrorTime: Int, 20 | monitoringTeam: Option[String], 21 | ) 22 | class TriggerFormHelper(scheduler: Scheduler, monitoringTeams: MonitoringTeams) extends JobDataHelper { 23 | 24 | private def simpleScheduleFormApply(repeatCount: Int, repeatInterval: Int): SimpleScheduleBuilder = { 25 | SimpleScheduleBuilder 26 | .simpleSchedule() 27 | .withRepeatCount(repeatCount) 28 | .withIntervalInSeconds(repeatInterval) 29 | } 30 | 31 | private def simpleScheduleFormUnapply(simple: SimpleScheduleBuilder) = { 32 | val simpleTrigger = simple.build().asInstanceOf[SimpleTrigger] 33 | Some((simpleTrigger.getRepeatCount, simpleTrigger.getRepeatInterval.toInt / 1000)) 34 | } 35 | 36 | private def cronScheduleFormApply(cronExpression: String): CronScheduleBuilder = { 37 | CronScheduleBuilder.cronSchedule(cronExpression) 38 | } 39 | 40 | private def cronScheduleFormUnapply(cron: CronScheduleBuilder) = { 41 | val cronTrigger = cron.build().asInstanceOf[CronTrigger] 42 | Some(cronTrigger.getCronExpression()) 43 | } 44 | 45 | private def triggerFormApply( 46 | triggerType: String, 47 | group: String, 48 | name: String, 49 | jobGroup: String, 50 | jobName: String, 51 | description: String, 52 | simple: Option[SimpleScheduleBuilder], 53 | cron: Option[CronScheduleBuilder], 54 | jobDataMap: Option[JobDataMap], 55 | triggerMonitoringPriority: String, 56 | triggerMaxErrorTime: Int, 57 | triggerMonitoringTeam: Option[String], 58 | ): TriggerFormValue = { 59 | val newTrigger: Trigger = TriggerBuilder 60 | .newTrigger() 61 | .withIdentity(name, group) 62 | .withDescription(description) 63 | .withSchedule((triggerType match { 64 | case "cron" => cron.get 65 | case "simple" => simple.get 66 | }): ScheduleBuilder[?]) 67 | .forJob(jobName, jobGroup) 68 | .usingJobData(jobDataMap.getOrElse(new JobDataMap())) 69 | .build() 70 | TriggerFormValue( 71 | newTrigger, 72 | TriggerMonitoringPriority.withName(triggerMonitoringPriority), 73 | triggerMaxErrorTime, 74 | triggerMonitoringTeam, 75 | ) 76 | } 77 | 78 | private def triggerFormUnapply(value: TriggerFormValue): Option[ 79 | ( 80 | String, 81 | String, 82 | String, 83 | String, 84 | String, 85 | String, 86 | Option[SimpleScheduleBuilder], 87 | Option[CronScheduleBuilder], 88 | Option[JobDataMap], 89 | String, 90 | Int, 91 | Option[String], 92 | ), 93 | ] = { 94 | val trigger = value.trigger 95 | val (triggerType: String, simple, cron) = trigger match { 96 | case cron: CronTrigger => ("cron", None, Some(cron.getScheduleBuilder)) 97 | case simple: SimpleTrigger => ("simple", Some(simple.getScheduleBuilder), None) 98 | case _ => throw new MatchError(trigger) 99 | } 100 | val description = if (trigger.getDescription() == null) "" else trigger.getDescription() 101 | Some( 102 | ( 103 | triggerType, 104 | trigger.getKey.getGroup(), 105 | trigger.getKey.getName(), 106 | trigger.getJobKey.getGroup(), 107 | trigger.getJobKey.getName(), 108 | description, 109 | simple.asInstanceOf[Option[SimpleScheduleBuilder]], 110 | cron.asInstanceOf[Option[CronScheduleBuilder]], 111 | Some(trigger.getJobDataMap), 112 | value.priority.toString, 113 | value.maxErrorTime, 114 | value.monitoringTeam, 115 | ), 116 | ) 117 | } 118 | 119 | private def getCronParseError(cronExpression: String): String = { 120 | try { 121 | new CronExpression(cronExpression).getCronExpression() 122 | } catch { 123 | case e: ParseException => e.getMessage() 124 | } 125 | } 126 | 127 | def isValidCronExpression(cronExpression: String): Boolean = { 128 | try { 129 | new CronExpression(cronExpression) 130 | true 131 | } catch { 132 | case e: ParseException => false 133 | } 134 | } 135 | 136 | def validCronExpression: Constraint[String] = Constraint[String]("Invalid cron expression") { cronExpression => 137 | if (!isValidCronExpression(cronExpression)) { 138 | Invalid(ValidationError(getCronParseError(cronExpression))) 139 | } else { 140 | Valid 141 | } 142 | } 143 | 144 | def buildTriggerForm: Form[TriggerFormValue] = Form( 145 | mapping( 146 | "triggerType" -> nonEmptyText(), 147 | "group" -> nonEmptyText(), 148 | "name" -> nonEmptyText(), 149 | "jobGroup" -> nonEmptyText(), 150 | "jobName" -> nonEmptyText(), 151 | "description" -> text(), 152 | "simple" -> optional( 153 | mapping( 154 | "repeatCount" -> number(), 155 | "repeatInterval" -> number(), 156 | )(simpleScheduleFormApply)(simpleScheduleFormUnapply), 157 | ), 158 | "cron" -> optional( 159 | mapping( 160 | "cronExpression" -> nonEmptyText().verifying(validCronExpression), 161 | )(cronScheduleFormApply)(cronScheduleFormUnapply), 162 | ), 163 | "job-data-map" -> jobDataMap, 164 | "triggerMonitoringPriority" -> nonEmptyText(), 165 | "triggerMaxErrorTime" -> of(MaxSecondsBetweenSuccessesFormatter).verifying(Constraints.min(0)), 166 | "triggerMonitoringTeam" -> optional(text()), 167 | )(triggerFormApply)(triggerFormUnapply) 168 | .verifying( 169 | "Job does not exist", 170 | fields => { 171 | scheduler.checkExists(fields.trigger.getJobKey) 172 | }, 173 | ) 174 | .verifying( 175 | "A valid team is required if monitoring is on", 176 | fields => { 177 | !monitoringTeams.teamsDefined || fields.priority == TriggerMonitoringPriority.Off || fields.monitoringTeam 178 | .exists(monitoringTeams.value.contains[String]) 179 | }, 180 | ), 181 | ) 182 | } 183 | 184 | object MaxSecondsBetweenSuccessesFormatter extends Formatter[Int] { 185 | override val format: Option[(String, Seq[Any])] = Some(("format.triggerMaxErrorTime", Nil)) 186 | override def bind(key: String, data: Map[String, String]): Either[Seq[FormError], Int] = { 187 | for { 188 | maxSecondsBetweenSuccesses <- parsing(_.toInt, "Numeric value expected", Nil)(key, data) 189 | maxIntervalTime <- { 190 | if (data.contains("cron.cronExpression")) { 191 | parsing(expr => CronHelper.getMaxInterval(expr), "try again.", Nil)( 192 | "cron.cronExpression", 193 | data, 194 | ) 195 | } else { 196 | parsing(_.toLong, "try again.", Nil)("simple.repeatInterval", data) 197 | } 198 | } 199 | _ <- Either.cond( 200 | maxSecondsBetweenSuccesses > maxIntervalTime, 201 | maxSecondsBetweenSuccesses, 202 | List( 203 | FormError( 204 | "triggerMaxErrorTime", 205 | s"Must be greater than the maximum trigger interval ($maxIntervalTime seconds)", 206 | ), 207 | ), 208 | ) 209 | } yield maxSecondsBetweenSuccesses 210 | } 211 | override def unbind(key: String, value: Int): Map[String, String] = Map(key -> value.toString) 212 | } 213 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/controllers/TriggerHelper.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import com.lucidchart.piezo.{TriggerMonitoringModel, TriggerMonitoringPriority} 4 | import com.lucidchart.piezo.admin.models.TriggerType 5 | import scala.jdk.CollectionConverters.* 6 | 7 | import org.quartz.TriggerKey 8 | import org.quartz.{CronTrigger, SimpleTrigger} 9 | import org.quartz.impl.matchers.GroupMatcher 10 | import org.quartz.Scheduler 11 | import org.quartz.Trigger 12 | import play.api.libs.json.* 13 | 14 | import scala.collection.mutable 15 | 16 | object TriggerHelper { 17 | def getTriggersByGroup(scheduler: Scheduler): mutable.Buffer[(String, List[TriggerKey])] = { 18 | val triggersByGroup = 19 | for (groupName <- scheduler.getTriggerGroupNames.asScala) yield { 20 | val triggers: List[TriggerKey] = 21 | scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals(groupName)).asScala.toList 22 | val sortedTriggers: List[TriggerKey] = triggers.sortBy(triggerKey => triggerKey.getName) 23 | (groupName, sortedTriggers) 24 | } 25 | triggersByGroup.sortBy(groupList => groupList._1) 26 | } 27 | 28 | def writesTrigger(monitoringModel: TriggerMonitoringModel): Writes[Trigger] = Writes { trigger => 29 | val triggerKey = trigger.getKey 30 | val triggerType = TriggerType(trigger) 31 | val schedule = triggerType match { 32 | case TriggerType.Cron => { 33 | val cronTrigger = trigger.asInstanceOf[CronTrigger] 34 | Json.obj( 35 | "cron" -> 36 | Json.obj( 37 | "cronExpression" -> cronTrigger.getCronExpression, 38 | ), 39 | ) 40 | 41 | } 42 | case TriggerType.Simple => { 43 | val simpleTrigger = trigger.asInstanceOf[SimpleTrigger] 44 | Json.obj( 45 | "simple" -> Json.obj( 46 | "repeatInterval" -> simpleTrigger.getRepeatInterval, 47 | "repeatCount" -> simpleTrigger.getRepeatCount, 48 | ), 49 | ) 50 | } 51 | case _ => Json.obj() 52 | } 53 | 54 | val (monitoringPriority, maxSecondsInError, monitoringTeam) = monitoringModel 55 | .getTriggerMonitoringRecord( 56 | trigger.getKey, 57 | ) 58 | .map { monitoringRecord => 59 | (monitoringRecord.priority, monitoringRecord.maxSecondsInError, monitoringRecord.monitoringTeam) 60 | } 61 | .getOrElse((TriggerMonitoringPriority.Low, 300, None)) 62 | val jobDataMap = trigger.getJobDataMap 63 | val job = trigger.getJobKey 64 | Json.obj( 65 | "triggerType" -> triggerType.toString, 66 | "jobGroup" -> job.getGroup, 67 | "jobName" -> job.getName, 68 | "group" -> triggerKey.getGroup, 69 | "name" -> triggerKey.getName, 70 | "description" -> trigger.getDescription, 71 | "job-data-map" -> JsObject(jobDataMap.getKeys.toSeq.map(key => key -> JsString(jobDataMap.getString(key)))), 72 | "triggerMonitoringPriority" -> monitoringPriority.name, 73 | "triggerMaxErrorTime" -> maxSecondsInError, 74 | "triggerMonitoringTeam" -> monitoringTeam, 75 | ) ++ schedule 76 | } 77 | 78 | def writesTriggerSeq(monitoringModel: TriggerMonitoringModel): Writes[Seq[Trigger]] = 79 | Writes.seq(writesTrigger(monitoringModel)) 80 | } 81 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/models/ModelComponents.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.models 2 | 3 | import com.lucidchart.piezo.* 4 | import org.quartz.utils.DBConnectionManager 5 | 6 | /** 7 | * Components for all of the DB models 8 | */ 9 | class ModelComponents(getConnection: () => java.sql.Connection) { 10 | lazy val triggerHistoryModel: TriggerHistoryModel = new TriggerHistoryModel( 11 | getConnection, 12 | ) 13 | lazy val jobHistoryModel: JobHistoryModel = new JobHistoryModel(getConnection) 14 | lazy val triggerMonitoringModel: TriggerMonitoringModel = 15 | new TriggerMonitoringModel(getConnection) 16 | } 17 | 18 | object ModelComponents { 19 | 20 | def forDataSource(dataSource: String): ModelComponents = { 21 | val manager = DBConnectionManager.getInstance() 22 | new ModelComponents(() => manager.getConnection(dataSource)) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/models/MonitoringTeams.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.models 2 | 3 | import play.api.Configuration 4 | import play.api.libs.json.Json 5 | import scala.util.Try 6 | import java.io.FileInputStream 7 | import play.api.libs.json.JsArray 8 | import play.api.Logging 9 | import scala.util.control.NonFatal 10 | import scala.util.Failure 11 | 12 | case class MonitoringTeams(value: Seq[String]) { 13 | def teamsDefined: Boolean = value.nonEmpty 14 | } 15 | object MonitoringTeams extends Logging { 16 | def apply(configuration: Configuration): MonitoringTeams = { 17 | val path = configuration.getOptional[String]("com.lucidchart.piezo.admin.monitoringTeams.path") 18 | 19 | val value = path 20 | .flatMap { p => 21 | Try { 22 | Json 23 | .parse(new FileInputStream(p)) 24 | .as[JsArray] 25 | .value 26 | .map(entry => (entry \ "name").as[String]) 27 | .toSeq 28 | }.recoverWith { case NonFatal(e) => 29 | logger.error(s"Error reading monitoring teams from $p", e) 30 | Failure(e) 31 | }.toOption 32 | } 33 | .getOrElse(Seq.empty) 34 | 35 | MonitoringTeams(value) 36 | } 37 | 38 | def empty: MonitoringTeams = MonitoringTeams(Seq.empty) 39 | } 40 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/models/TriggerType.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.models 2 | import org.quartz.Trigger 3 | 4 | object TriggerType extends Enumeration { 5 | type TriggerType = Value 6 | val Cron: Value = Value(0, "cron") 7 | val Simple: Value = Value(1, "simple") 8 | val Unknown: Value = Value(2, "unknown") 9 | 10 | def apply(trigger: Trigger): Value = { 11 | if (trigger.isInstanceOf[org.quartz.CronTrigger]) { 12 | TriggerType.Cron 13 | } else if (trigger.isInstanceOf[org.quartz.SimpleTrigger]) { 14 | TriggerType.Simple 15 | } else { 16 | TriggerType.Unknown 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/utils/CronHelper.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.utils 2 | 3 | import java.time.temporal.{ChronoUnit, TemporalUnit} 4 | import java.time.{Instant, LocalDate, Month, ZoneOffset} 5 | import java.util.{Date, TimeZone} 6 | import org.quartz.CronExpression 7 | import play.api.Logging 8 | import scala.annotation.tailrec 9 | import scala.util.control.NonFatal 10 | 11 | object CronHelper extends Logging { 12 | val IMPOSSIBLE_MAX_INTERVAL: Long = Long.MaxValue 13 | val DEFAULT_MAX_INTERVAL = 0 14 | val NON_EXISTENT: Int = -1 15 | 16 | /** 17 | * Approximates the largest interval between two trigger events for a given cron expression. This is a difficult 18 | * problem to solve perfectly, so this represents a "best effort approach" - the goal is to handle the most 19 | * expressions with the least amount of complexity. 20 | * 21 | * Known limitations: 22 | * 1. Daylight savings 23 | * 1. Complex year subexpressions 24 | * @param cronExpression 25 | */ 26 | def getMaxInterval(cronExpression: String): Long = { 27 | try { 28 | val (secondsMinutesHourStrings, dayStrings) = cronExpression.split("\\s+").splitAt(3) 29 | val subexpressions = getSubexpressions(secondsMinutesHourStrings :+ dayStrings.mkString(" ")).reverse 30 | 31 | // find the largest subexpression that is not continuously triggering (*) 32 | val outermostIndex = subexpressions.indexWhere(!_.isContinuouslyTriggering) 33 | if (outermostIndex == NON_EXISTENT) 1 34 | else { 35 | // get the max interval for this expression 36 | val outermost = subexpressions(outermostIndex) 37 | if (outermost.maxInterval == IMPOSSIBLE_MAX_INTERVAL) IMPOSSIBLE_MAX_INTERVAL 38 | else { 39 | // subtract the inner intervals of the smaller, nested subexpressions 40 | val nested = subexpressions.slice(outermostIndex + 1, subexpressions.size) 41 | val innerIntervalsOfNested = nested.collect { case expr: BoundSubexpression => expr.innerInterval }.sum 42 | outermost.maxInterval - innerIntervalsOfNested 43 | } 44 | } 45 | 46 | } catch { 47 | case NonFatal(e) => 48 | logger.error("Failed to validate cron expression", e) 49 | DEFAULT_MAX_INTERVAL 50 | } 51 | } 52 | 53 | private def getSubexpressions(parts: Array[String]): IndexedSeq[Subexpression] = { 54 | parts 55 | .zip(List(Seconds.apply, Minutes.apply, Hours.apply, Days.apply)) 56 | .map { case (str, cronType) => cronType(str) } 57 | .toIndexedSeq 58 | } 59 | } 60 | 61 | case class Seconds(str: String) extends BoundSubexpression(str, x => s"$x * * ? * *", ChronoUnit.SECONDS, 60) 62 | case class Minutes(str: String) extends BoundSubexpression(str, x => s"0 $x * ? * *", ChronoUnit.MINUTES, 60) 63 | case class Hours(str: String) extends BoundSubexpression(str, x => s"0 0 $x ? * *", ChronoUnit.HOURS, 24) 64 | case class Days(str: String) extends UnboundSubexpression(str, x => s"0 0 0 $x", 400) 65 | 66 | abstract class Subexpression(str: String, getSimplifiedCron: String => String) { 67 | def maxInterval: Long 68 | def isContinuouslyTriggering: Boolean 69 | 70 | protected def startDate: Date 71 | final protected lazy val cron: CronExpression = { 72 | val newCron = new CronExpression(getSimplifiedCron(str)) 73 | newCron.setTimeZone(TimeZone.getTimeZone("UTC")) // use a timezone without daylight savings 74 | newCron 75 | } 76 | } 77 | 78 | /** 79 | * Represents a subexpression in which the range over which the triggers occur is bound or fixed. For example, seconds 80 | * always occur within a minute, minutes always occur within an hour, and hours always occur within a day. Because the 81 | * range is fixed, we can determine all possibilities by sampling over the entire range. 82 | */ 83 | abstract class BoundSubexpression( 84 | str: String, 85 | getSimplifiedCron: String => String, 86 | temporalUnit: TemporalUnit, 87 | val numUnitsInContainer: Long, 88 | ) extends Subexpression(str, getSimplifiedCron) { 89 | 90 | final override protected val startDate = new Date(BoundSubexpression.startInstant.toEpochMilli) 91 | final protected val endDate: Date = Date.from( 92 | BoundSubexpression.startInstant.plus(numUnitsInContainer, temporalUnit), 93 | ) 94 | final override lazy val maxInterval: Long = getMaxInterval(cron, startDate, endDate, 0) 95 | final override lazy val isContinuouslyTriggering: Boolean = maxInterval == temporalUnit.getDuration.getSeconds 96 | 97 | /** 98 | * The interval between the first and last trigger within the range, or "everything but the ends". Should encompass 99 | * every trigger produced by the subexpression. 100 | */ 101 | final lazy val innerInterval: Long = getInnerInterval(cron, startDate, endDate) 102 | 103 | @tailrec 104 | private def getMaxInterval(expr: CronExpression, prev: Date, end: Date, maxInterval: Long): Long = { 105 | Option(expr.getTimeAfter(prev)) match { 106 | case Some(curr) if !prev.after(end) => // iterate once past the "end" in order to wrap around 107 | val currentInterval = (curr.getTime - prev.getTime) / 1000 108 | val newMax = Math.max(currentInterval, maxInterval) 109 | getMaxInterval(expr, curr, end, newMax) 110 | case _ => maxInterval 111 | } 112 | } 113 | 114 | private def getInnerInterval(expr: CronExpression, prev: Date, end: Date): Long = { 115 | Option(expr.getTimeAfter(prev)).fold(Long.MaxValue) { firstTriggerDate => 116 | val firstTriggerTime = firstTriggerDate.getTime / 1000 117 | val lastTriggerTime = getLastTriggerTime(expr, firstTriggerDate, end) 118 | lastTriggerTime - firstTriggerTime 119 | } 120 | } 121 | 122 | @tailrec 123 | private def getLastTriggerTime(expr: CronExpression, prev: Date, end: Date): Long = { 124 | Option(expr.getTimeAfter(prev)) match { // stop iterating before going past the "end" 125 | case Some(curr) if !curr.after(end) => getLastTriggerTime(expr, curr, end) 126 | case _ => prev.getTime / 1000 127 | } 128 | } 129 | } 130 | 131 | object BoundSubexpression { 132 | final protected val startInstant: Instant = LocalDate 133 | .of(2010, Month.SEPTEMBER, 3) 134 | .atStartOfDay 135 | .toInstant(ZoneOffset.UTC) 136 | .minus(1, ChronoUnit.SECONDS) 137 | } 138 | 139 | /** 140 | * Represents a subexpression that is unbound, meaning that the range over which triggers occur is unknown, or is 141 | * variable. For example, days can occur within a week, month, or year, and each of these ranges can vary in size. 142 | * Because we can't determine the range over which days are triggered, we estimate the max interval by sampling a 143 | * certain number of times. The larger the number of samples, the more accurate the estimate. 144 | */ 145 | abstract class UnboundSubexpression( 146 | str: String, 147 | getSimplifiedCron: String => String, 148 | val maxNumSamples: Long, 149 | ) extends Subexpression(str, getSimplifiedCron) 150 | with Logging { 151 | 152 | final override protected val startDate = new Date 153 | final override lazy val maxInterval: Long = getSampledMaxInterval(startDate, maxNumSamples, cron) 154 | final override lazy val isContinuouslyTriggering: Boolean = str.split(" ").forall(expr => expr == "*" || expr == "?") 155 | 156 | @tailrec 157 | private def getSampledMaxInterval(prev: Date, numSamples: Long, expr: CronExpression, maxInterval: Long = 0): Long = { 158 | Option(expr.getTimeAfter(prev)) match { 159 | case Some(next) if numSamples > 0 => 160 | val intervalInSeconds = (next.getTime - prev.getTime) / 1000 161 | if (intervalInSeconds > maxInterval) { 162 | val sampleId = maxNumSamples - numSamples 163 | logger.debug(s"Seconds:$intervalInSeconds Sample:$sampleId Interval:$prev -> $next") 164 | } 165 | getSampledMaxInterval(next, numSamples - 1, expr, Math.max(intervalInSeconds, maxInterval)) 166 | case _ => if (prev.equals(startDate)) CronHelper.IMPOSSIBLE_MAX_INTERVAL else maxInterval 167 | } 168 | } 169 | } 170 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/utils/JobDetailHelper.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.utils 2 | 3 | import com.lucidchart.piezo.TriggerMonitoringModel 4 | import com.lucidchart.piezo.admin.controllers.{JobDataHelper, JobFormHelper, TriggerHelper} 5 | import org.quartz.* 6 | import play.api.libs.json.* 7 | 8 | object JobDetailHelper extends JobDataHelper { 9 | lazy val jobFormHelper = new JobFormHelper() 10 | 11 | implicit def jobDetailWrites( 12 | triggers: Seq[Trigger], 13 | triggerMonitoringModel: TriggerMonitoringModel, 14 | ): Writes[JobDetail] = Writes[JobDetail] { jobDetail => 15 | val jobDataMap = jobDetail.getJobDataMap 16 | 17 | val jobKey = jobDetail.getKey 18 | Json.obj( 19 | "group" -> jobKey.getGroup, 20 | "name" -> jobKey.getName, 21 | "description" -> jobDetail.getDescription, 22 | "class" -> jobDetail.getJobClass.getName, 23 | "concurrent" -> jobDetail.isConcurrentExecutionDisallowed, 24 | "durable" -> jobDetail.isDurable, 25 | "requests-recovery" -> jobDetail.requestsRecovery, 26 | "job-data-map" -> Json.toJson(jobDataToMap(jobDataMap)), 27 | "triggers" -> Json.toJson(triggers)(TriggerHelper.writesTriggerSeq(triggerMonitoringModel)), 28 | ) 29 | } 30 | 31 | implicit def jobDetailSeqWrites( 32 | triggers: Seq[Trigger], 33 | triggerMonitoringModel: TriggerMonitoringModel, 34 | ): Writes[Seq[JobDetail]] = 35 | Writes.seq(jobDetailWrites(triggers, triggerMonitoringModel)) 36 | } 37 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/utils/JobUtils.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.utils 2 | 3 | import org.quartz.JobDetail 4 | 5 | object JobUtils { 6 | 7 | def cleanup(job: JobDetail): JobDetail = { 8 | if (job.getJobDataMap.containsKey("")) { 9 | job.getJobDataMap.remove("") 10 | } 11 | job 12 | } 13 | 14 | } 15 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/FormHelpers.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.views 2 | 3 | import _root_.views.html.helper.FieldConstructor 4 | 5 | object FormHelpers { 6 | implicit val myFields: FieldConstructor = FieldConstructor( 7 | com.lucidchart.piezo.admin.views.html.helpers.fieldConstructor.f, 8 | ) 9 | } 10 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/editJob.scala.html: -------------------------------------------------------------------------------- 1 | @( 2 | jobsByGroup: scala.collection.mutable.Buffer[(String, scala.collection.immutable.List[org.quartz.JobKey])], 3 | jobForm: Form[org.quartz.JobDetail], 4 | submitValue: String, 5 | formAction: play.api.mvc.Call, 6 | existing: Boolean, 7 | errorMessage: Option[String] = None, 8 | scripts: List[String] = List[String]("js/jobData.js", "js/typeAhead.js") 9 | )( 10 | implicit 11 | request: play.api.mvc.Request[AnyContent], 12 | messagesProvider: play.api.i18n.MessagesProvider 13 | ) 14 | 15 | @import com.lucidchart.piezo.admin.views.FormHelpers._ 16 | 17 | @com.lucidchart.piezo.admin.views.html.jobsLayout(jobsByGroup, None, scripts) { 18 | @if(!errorMessage.isEmpty) { 19 |

@errorMessage.get

20 | } 21 | 22 | @if(existing) { 23 |

Edit Job

24 | } else { 25 |

New Job

26 | } 27 | 28 |

@jobForm.errors.filter(_.key == "").map(_.message).mkString(", ")

29 |
30 |
31 | @defining(if(existing) {Symbol("readonly")} else {Symbol("none")}) { newEditOnly => 32 | 33 | @helper.inputText(jobForm("group"), Symbol("_label") -> "Group", Symbol("labelClass") -> "col-sm-3 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Group", newEditOnly -> None, Symbol("class") -> "job-group-type-ahead form-control form-inline-control") 34 | @helper.inputText(jobForm("name"), Symbol("_label") -> "Name", Symbol("labelClass") -> "col-sm-3 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Name", newEditOnly -> None, Symbol("class") -> "form-control form-inline-control") 35 | @helper.inputText(jobForm("class"), Symbol("_label") -> "Class", Symbol("labelClass") -> "col-sm-3 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Name", Symbol("class") -> "form-control form-inline-control") 36 | @helper.inputText(jobForm("description"), Symbol("_label") -> "Description", Symbol("labelClass") -> "col-sm-3 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Description", Symbol("class") -> "form-control form-inline-control") 37 | @helper.checkbox(jobForm("durable"), Symbol("_label") -> "Durable", Symbol("labelClass") -> "col-sm-3 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("readonly") -> None, Symbol("class") -> "form-inline-control", Symbol("checked") -> true, Symbol("disabled") -> true, Symbol("id") -> "durable-placeholder") 38 | 39 | 40 | 41 | @helper.checkbox(jobForm("requests-recovery"), Symbol("_label") -> "Requests recovery", Symbol("labelClass") -> "col-sm-3 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("class") -> "form-inline-control") 42 | 43 |

Job Data Map

44 | 45 |
46 | @helper.repeat(jobForm("job-data-map"), min = jobForm("job-data-map").indexes.length + 1) { dataMap => 47 | 48 | @dataMap("key").value.map { _ => 49 | @dataMap("value").value.map { _ => 50 | 51 | } 52 | } 53 | 54 | @helper.inputText(dataMap("key"), Symbol("_label") -> "Key", Symbol("labelClass") -> "col-sm-3 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Key", Symbol("class") -> "job-data-key form-control form-inline-control") 55 | @helper.inputText(dataMap("value"), Symbol("_label") -> "Value", Symbol("labelClass") -> "col-sm-3 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Value", Symbol("class") -> "job-data-value form-control form-inline-control") 56 | 57 | } 58 | 59 | 60 |
61 | } 62 | 63 |
64 | 65 | 66 | 67 |
68 | } 69 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/editTrigger.scala.html: -------------------------------------------------------------------------------- 1 | @( 2 | triggersByGroup: scala.collection.mutable.Buffer[(String, scala.collection.immutable.List[org.quartz.TriggerKey])], 3 | monitoringTeams: Seq[String], 4 | triggerForm: Form[com.lucidchart.piezo.admin.controllers.TriggerFormValue], 5 | formAction: play.api.mvc.Call, 6 | existing: Boolean, 7 | isTemplate: Boolean, 8 | errorMessage: Option[String] = None, 9 | scripts: List[String] = List[String]("js/jobData.js", "js/typeAhead.js", "js/triggerMonitoring.js") 10 | )( 11 | implicit 12 | request: play.api.mvc.Request[AnyContent], 13 | messagesProvider: play.api.i18n.MessagesProvider, 14 | ) 15 | 16 | @import com.lucidchart.piezo.TriggerMonitoringPriority 17 | @import com.lucidchart.piezo.admin.controllers.{routes=>piezoRoutes} 18 | @import com.lucidchart.piezo.admin.views.FormHelpers._ 19 | 20 | @com.lucidchart.piezo.admin.views.html.triggersLayout(triggersByGroup, None, scripts) { 21 | @if(!errorMessage.isEmpty) { 22 |

@errorMessage.get

23 | } 24 | 25 | @if(existing) { 26 |

Edit Trigger

27 | } else { 28 |

New Trigger

29 | } 30 | 31 |
32 |
33 |
34 | 38 |
39 |
40 | @if(triggerForm.data.get("triggerType").getOrElse("") == "simple") { 41 | 42 | } else { 43 | 44 | } 45 |
46 |
47 |
48 |
49 | 50 |

@triggerForm.errors.filter(_.key == "").map(_.message).mkString(", ")

51 |
52 |
53 | @defining(if(existing) {Symbol("readonly")} else {Symbol("none")}) { newEditOnly => 54 | @helper.input(triggerForm("group"), Symbol("_label") -> "Group", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Group", Symbol("value")-> triggerForm.data.get("group").getOrElse(""), newEditOnly -> None) { (id, name, value, args) => 55 | 56 | } 57 | @helper.input(triggerForm("name"), Symbol("_label") -> "Name", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Name", Symbol("value")-> triggerForm.data.get("name").getOrElse(""), newEditOnly -> None) { (id, name, value, args) => 58 | 59 | } 60 | @helper.input(triggerForm("jobGroup"), Symbol("_label") -> "Job group", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Job group", Symbol("value")-> triggerForm.data.get("jobGroup").getOrElse(""), newEditOnly -> None) { (id, name, value, args) => 61 | 62 | } 63 | @helper.input(triggerForm("jobName"), Symbol("_label") -> "Job name", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Job name", Symbol("value")-> triggerForm.data.get("jobName").getOrElse(""), newEditOnly -> None) { (id, name, value, args) => 64 | 65 | } 66 | } 67 | @helper.select(triggerForm("triggerMonitoringPriority"), TriggerMonitoringPriority.values.map(tp => tp.name -> tp.name), Symbol("_label") -> "Monitoring Priority", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("class") -> "form-control", Symbol("value") -> triggerForm.data.get("triggerMonitoringPriority").getOrElse(TriggerMonitoringPriority.Low), Symbol("placeholder") -> TriggerMonitoringPriority.Low) 68 |
69 | @helper.input(triggerForm("triggerMaxErrorTime"), Symbol("_label") -> "Monitoring - Max Seconds Between Successes", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "", Symbol("value") -> triggerForm.data.get("triggerMaxErrorTime").getOrElse(300)) { (id, name, value, args) => 70 | 71 | } 72 | @if(monitoringTeams.nonEmpty) { 73 | @helper.select(triggerForm("triggerMonitoringTeam"), monitoringTeams.map(mt => mt -> mt), Symbol("_default") -> "Select team", Symbol("_label") -> "Monitoring team", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("class") -> "form-control", Symbol("value") -> triggerForm.data.get("triggerMonitoringTeam").getOrElse("")) 74 | } else { 75 | @helper.input(triggerForm("triggerMonitoringTeam"), Symbol("_label") -> "Monitoring team", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "", Symbol("value") -> triggerForm.data.get("triggerMonitoringTeam").getOrElse(None)) { (id, name, value, args) => 76 | 77 | } 78 | } 79 |
80 | 81 | @helper.input(triggerForm("description"), Symbol("_label") -> "Description", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("inputDivClass") -> "col-sm-10", Symbol("placeholder") -> "Description", Symbol("value")-> triggerForm.data.get("description").getOrElse("")) { (id, name, value, args) => 82 | 83 | } 84 | 85 |
86 |
87 |
88 | @if(triggerForm.data.get("triggerType").getOrElse("") == "simple") { 89 | @helper.input(triggerForm("simple.repeatCount"), Symbol("_label") -> "Repeat count", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("_class") -> "form-horizontal-inline", Symbol("inputDivClass") -> "col-sm-2", Symbol("placeholder") -> "Repeat count", Symbol("value")-> triggerForm.data.get("simple.repeatCount").getOrElse("")) { (id, name, value, args) => 90 | 91 | } 92 | @helper.input(triggerForm("simple.repeatInterval"), Symbol("_label") -> "Repeat interval (seconds)", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("_class") -> "form-horizontal-inline", Symbol("inputDivClass") -> "col-sm-2", Symbol("placeholder") -> "Repeat interval (seconds)", Symbol("value")-> triggerForm.data.get("simple.repeatInterval").getOrElse("")) { (id, name, value, args) => 93 | 94 | } 95 | } else { 96 | @helper.input(triggerForm("cron.cronExpression"), Symbol("_label") -> "Cron Expression", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("_class") -> "form-horizontal-inline", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Cron expression", Symbol("value")-> triggerForm.data.get("cron.cronExpression").getOrElse("")) { (id, name, value, args) => 97 | 98 | } 99 | } 100 | 101 |

Job Data Map

102 | 103 |
104 | @helper.repeat(triggerForm("job-data-map"), min = triggerForm("job-data-map").indexes.length + 1) { dataMap => 105 | 106 | @dataMap("key").value.map { _ => 107 | @dataMap("value").value.map { _ => 108 | 109 | } 110 | } 111 | 112 | @helper.inputText(dataMap("key"), Symbol("_label") -> "Key", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Key", Symbol("class") -> "job-data-key form-control form-inline-control") 113 | @helper.inputText(dataMap("value"), Symbol("_label") -> "Value", Symbol("labelClass") -> "col-sm-2 text-right", Symbol("inputDivClass") -> "col-sm-4", Symbol("placeholder") -> "Value", Symbol("class") -> "job-data-value form-control form-inline-control") 114 | 115 | } 116 | 117 | 118 |
119 | 120 |
121 |
122 | 123 | 124 |
125 | } 126 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/errors/error.scala.html: -------------------------------------------------------------------------------- 1 | @( 2 | errorMessage: Option[String] = None 3 | )( 4 | implicit 5 | request: RequestHeader 6 | ) 7 | 8 | @com.lucidchart.piezo.admin.views.html.main("Piezo Error") { 9 |
10 |

Error

11 |

@errorMessage.getOrElse("Unknown Error")

12 |
13 | } 14 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/errors/notfound.scala.html: -------------------------------------------------------------------------------- 1 | @( 2 | message: Option[String] = None 3 | )( 4 | implicit 5 | request: RequestHeader 6 | ) 7 | 8 | @com.lucidchart.piezo.admin.views.html.main("Piezo Error") { 9 |
10 |

Page Not Found

11 |

@message.getOrElse("The page you requested could not be located.")

12 |
13 | } 14 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/helpers/fieldConstructor.scala.html: -------------------------------------------------------------------------------- 1 | @(elements: helper.FieldElements) 2 | 3 |
4 | 5 |
6 | @elements.input 7 | @elements.errors.mkString(", ") 8 |
9 |
10 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/index.scala.html: -------------------------------------------------------------------------------- 1 | @( 2 | )( 3 | implicit 4 | request: play.api.mvc.Request[AnyContent] 5 | ) 6 | 7 | @com.lucidchart.piezo.admin.views.html.main("Piezo Admin Home") { 8 |
9 | 10 |
11 |
12 | Piezo 13 | was created by 14 | Lucid Software, Inc. 15 | to provide management tools for quartz scheduler clusters. 16 |
17 | } 18 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/jobs.scala.html: -------------------------------------------------------------------------------- 1 | @import com.lucidchart.piezo.admin.controllers.{routes=>piezoRoutes} 2 | @import org.joda.time.format.DateTimeFormat 3 | @import org.joda.time.DateTime 4 | @import org.quartz.{JobKey, SchedulerMetaData} 5 | @( 6 | jobsByGroup: scala.collection.mutable.Buffer[(String, scala.collection.immutable.List[org.quartz.JobKey])], 7 | currentJob: Option[org.quartz.JobDetail], 8 | jobsHistory: Option[List[com.lucidchart.piezo.JobRecord]], 9 | untriggeredJobs: List[JobKey], 10 | schedulerMetadata: SchedulerMetaData, 11 | errorMessage: Option[String] = None 12 | )( 13 | implicit request: play.api.mvc.Request[AnyContent] 14 | ) 15 | 16 | @com.lucidchart.piezo.admin.views.html.jobsLayout(jobsByGroup, currentJob) { 17 |

Select a job

18 | 19 | 20 | 21 |
22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 |
Scheduler name@schedulerMetadata.getSchedulerName()
Total jobs@{jobsByGroup.foldLeft(0)((a, b) => a + b._2.length)}
34 |

Import Jobs

35 |
36 |
37 | 40 | 41 |
42 | 43 | 44 |
45 | 46 |
47 |
48 | 49 | 50 | @if(jobsHistory.isDefined) { 51 |

Jobs History

52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | @defining(DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss")) { dtf => 67 | @jobsHistory.get.map { record => 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | } 79 | } 80 | 81 |
Most Recent StartFinishJob GroupJob NameTrigger GroupTrigger NameSuccess
@dtf.print(new DateTime(record.start))@dtf.print(new DateTime(record.finish))@record.group@record.name@record.trigger_group@record.trigger_name@record.success
82 | } 83 | @if(!untriggeredJobs.isEmpty) { 84 |

Untriggered Jobs

85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | @untriggeredJobs.map { job => 94 | 95 | 96 | 97 | 98 | } 99 | 100 |
Job GroupJob Name
@job.getGroup@job.getName
101 | } 102 | 103 | 153 | } 154 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/jobsLayout.scala.html: -------------------------------------------------------------------------------- 1 | @( 2 | jobsByGroup: scala.collection.mutable.Buffer[(String, scala.collection.immutable.List[org.quartz.JobKey])], 3 | currentJob: Option[org.quartz.JobDetail], 4 | scripts: List[String] = List[String]() 5 | )( 6 | detailsContent: Html 7 | )( 8 | implicit 9 | request: play.api.mvc.Request[AnyContent] 10 | ) 11 | @import com.lucidchart.piezo.admin.controllers.{routes=>piezoRoutes} 12 | @import com.lucidchart.piezo.admin.views 13 | @import java.net.URLEncoder 14 | 15 | @com.lucidchart.piezo.admin.views.html.main("Piezo Jobs", scripts) { 16 |
17 |
18 |

Job groups

19 |
20 | @jobsByGroup.map { jobGroup => 21 |
22 |
23 |
24 | 25 | @jobGroup._1 26 | 27 |
28 |
29 |
32 | } else { 33 | class="panel-collapse collapse "> 34 | } 35 |
36 |
37 | @jobGroup._2.map { jobKey => 38 | 45 | @jobKey.getName() 46 | 47 | } 48 |
49 |
50 |
51 |
52 | } 53 |
54 |
55 |
56 | 57 | @detailsContent 58 |
59 |
60 | } 61 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/main.scala.html: -------------------------------------------------------------------------------- 1 | @( 2 | title: String, 3 | scripts: List[String] = List(), 4 | styles: List[String] = List() 5 | )( 6 | content: Html 7 | )( 8 | implicit request: RequestHeader 9 | ) 10 | @import com.lucidchart.piezo.admin.controllers.{routes=>piezoRoutes} 11 | 12 | 13 | 14 | 15 | @if(!title.isEmpty){ @title | } Piezo 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | @for(style <- styles) { 25 | 26 | } 27 | 28 | 29 | 30 | 31 | @for(script <- scripts) { 32 | 33 | } 34 | 35 | 36 | 37 | 38 | 58 | 59 | 69 | 70 |
71 |
72 | @if(!request.flash.isEmpty) { 73 |
74 | @request.flash("message") 75 |
76 | 88 | } 89 |
90 | 91 | @content 92 | 93 |
94 |
95 |

© Apache License Version 2.0

96 |
Created by   
97 |
98 |
99 | 100 | @for(script <- scripts) { 101 | 102 | } 103 | 104 | 105 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/trigger.scala.html: -------------------------------------------------------------------------------- 1 | @import com.lucidchart.piezo.TriggerMonitoringPriority 2 | @import com.lucidchart.piezo.admin.controllers.{routes=>piezoRoutes} 3 | @import com.lucidchart.piezo.admin.views 4 | @import java.net.URLEncoder 5 | @import org.quartz._ 6 | @import org.quartz.Trigger.TriggerState 7 | @( 8 | triggersByGroup: scala.collection.mutable.Buffer[(String, scala.collection.immutable.List[org.quartz.TriggerKey])], 9 | currentTrigger: Option[org.quartz.Trigger], 10 | triggerHistory: Option[List[com.lucidchart.piezo.TriggerRecord]], 11 | errorMessage: Option[String] = None, 12 | triggerMonitoringPriority: Option[com.lucidchart.piezo.TriggerMonitoringPriority.Value] = None, 13 | triggerMaxErrorTime: Integer = 300, 14 | triggerMonitoringTeam: Option[String] = None, 15 | triggerState: Option[TriggerState] = None 16 | )( 17 | implicit 18 | request: play.api.mvc.Request[AnyContent] 19 | ) 20 | 21 | @com.lucidchart.piezo.admin.views.html.triggersLayout(triggersByGroup, currentTrigger) { 22 | @if(!errorMessage.isEmpty) { 23 |

@errorMessage.get

24 | } 25 | @if(!currentTrigger.isEmpty) { 26 |

@currentTrigger.get.getKey.getGroup() » @currentTrigger.get.getKey.getName()

27 | "simple" 31 | case t => throw new MatchError(t) 32 | } 33 | )}?templateGroup=@currentTrigger.get.getKey.getGroup()&templateName=@currentTrigger.get.getKey.getName()"> 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | @if(triggerState.contains(TriggerState.PAUSED) || triggerState.contains(TriggerState.ERROR)) { 44 | 45 | 46 | 47 | } else { 48 | 49 | 50 | 51 | } 52 |
53 | 56 | 57 | 62 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | @if(currentTrigger.get.getJobKey() != null) { 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | } 132 | @currentTrigger.get match { 133 | case c: CronTrigger => { 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | } 147 | case s: SimpleTrigger => { 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | } 161 | case _ => { } 162 | } 163 | @triggerMonitoringPriority.map { triggerMonitoringPriority => 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | } 177 | 178 | 179 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 |
Class name:@currentTrigger.get.getClass()
Trigger group:@currentTrigger.get.getKey.getGroup()
Trigger name:@currentTrigger.get.getKey.getName
Job group:@currentTrigger.get.getJobKey.getGroup()
Job name:@currentTrigger.get.getJobKey.getName()
Cron expression:@c.getCronExpression()
Expression summary:@c.getExpressionSummary()
Time zone:@c.getTimeZone().getDisplayName()
Repeat count:@s.getRepeatCount()
Repeat interval (seconds):@{s.getRepeatInterval() / 1000}
Times triggered:@s.getTimesTriggered()
Monitoring priority:@triggerMonitoringPriority
Monitoring - max seconds between successes:@triggerMaxErrorTime seconds
Monitoring team:@triggerMonitoringTeam.getOrElse("")
Description: 180 | @if(currentTrigger.get.getDescription() != null) {@currentTrigger.get.getDescription()} else {} 181 |
State:@triggerState.map(_.toString)
End time:@currentTrigger.get.getEndTime()
Final fire time:@currentTrigger.get.getFinalFireTime()
Misfire instruction:@currentTrigger.get.getMisfireInstruction()
Next fire time:@currentTrigger.get.getNextFireTime()
Previous fire time:@currentTrigger.get.getPreviousFireTime()
Start time:@currentTrigger.get.getStartTime()
213 | 214 |
215 | 216 |

Trigger Data Map

217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | @currentTrigger.get.getJobDataMap.getKeys.map { triggerDataKey => 226 | 227 | 228 | 229 | 230 | } 231 | 232 |
KeyValue
@triggerDataKey@currentTrigger.get.getJobDataMap.getString(triggerDataKey)
233 | 234 |
235 | 236 | @if(triggerHistory.isDefined) { 237 |

Trigger History

238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | @triggerHistory.get.map { record => 249 | 250 | 251 | 252 | 253 | 254 | 255 | } 256 | 257 |
Scheduled StartActual StartFinishMisfire
@record.scheduled_start@record.actual_start@record.finish@record.misfire
258 | } 259 | } 260 | } 261 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/triggers.scala.html: -------------------------------------------------------------------------------- 1 | @( 2 | triggersByGroup: scala.collection.mutable.Buffer[(String, scala.collection.immutable.List[org.quartz.TriggerKey])], 3 | currentTrigger: Option[org.quartz.Trigger], 4 | upcomingTriggers: List[org.quartz.Trigger], 5 | schedulerMetadata: org.quartz.SchedulerMetaData, 6 | errorMessage: Option[String] = None 7 | )( 8 | implicit 9 | request: play.api.mvc.Request[AnyContent] 10 | ) 11 | @import com.lucidchart.piezo.admin.views 12 | @import java.util.Date 13 | @import org.joda.time.format.DateTimeFormat 14 | @import org.joda.time.DateTime 15 | 16 | @com.lucidchart.piezo.admin.views.html.triggersLayout(triggersByGroup, currentTrigger) { 17 |

Select a trigger

18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 |
Scheduler name@schedulerMetadata.getSchedulerName()
Total triggers@{triggersByGroup.foldLeft(0)((a, b) => a + b._2.length)}
30 | 31 |

Upcoming Triggers

32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | @defining(DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss")) { dtf => 46 | @defining(new Date()) { now => 47 | @upcomingTriggers.map { trigger => 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | } 58 | } 59 | } 60 | 61 |
Next Fire TimeFollowing Fire TimeTrigger GroupTrigger NameJob GroupJob Name
@dtf.print(new DateTime(trigger.getFireTimeAfter(now)))@dtf.print(new DateTime(trigger.getFireTimeAfter(trigger.getFireTimeAfter(now))))@trigger.getKey.getGroup@trigger.getKey.getName@trigger.getJobKey.getGroup@trigger.getJobKey.getName
62 | } 63 | -------------------------------------------------------------------------------- /admin/app/com/lucidchart/piezo/admin/views/triggersLayout.scala.html: -------------------------------------------------------------------------------- 1 | @( 2 | triggersByGroup: scala.collection.mutable.Buffer[(String, scala.collection.immutable.List[org.quartz.TriggerKey])], 3 | currentTrigger: Option[org.quartz.Trigger], 4 | scripts: List[String] = List[String]() 5 | )( 6 | detailsContent: Html 7 | )( 8 | implicit 9 | request: play.api.mvc.Request[AnyContent] 10 | ) 11 | @import com.lucidchart.piezo.admin.controllers.{routes=>piezoRoutes} 12 | @import com.lucidchart.piezo.admin.views 13 | 14 | @com.lucidchart.piezo.admin.views.html.main("Piezo Triggers", scripts) { 15 |
16 |
17 |

Trigger groups

18 |
19 | @triggersByGroup.map { triggerGroup => 20 |
21 |
22 |
23 | 24 | @triggerGroup._1 25 | 26 |
27 |
28 |
31 | } else { 32 | class="panel-collapse collapse "> 33 | } 34 |
35 |
36 | @triggerGroup._2.map { triggerKey => 37 | 44 | @triggerKey.getName() 45 | 46 | } 47 |
48 |
49 |
50 |
51 | } 52 |
53 |
54 |
55 | 56 | @detailsContent 57 |
58 |
59 | } 60 | -------------------------------------------------------------------------------- /admin/build.sbt: -------------------------------------------------------------------------------- 1 | enablePlugins(PlayScala) 2 | enablePlugins(SystemdPlugin) 3 | 4 | import play.sbt.routes.RoutesKeys 5 | 6 | bashScriptExtraDefines ++= Seq( 7 | s"addJava -Dorg.quartz.properties=${defaultLinuxConfigLocation.value}/${(Linux / packageName).value}/quartz.properties", 8 | "addJava -Dpidfile.path=/run/piezo-admin/piezo-admin.pid", 9 | s"addJava -Dhttp.port=${PlayKeys.playDefaultPort.value}", 10 | ) 11 | 12 | // Workaround for https://github.com/playframework/playframework/issues/7382 13 | // so we don't get unused import warnings 14 | RoutesKeys.routesImport := Seq.empty 15 | // templateImports := Seq.empty 16 | 17 | javaOptions += s"-Dorg.quartz.properties=${(Compile / resourceDirectory).value / "quartz.properties"}" 18 | 19 | libraryDependencies ++= Seq( 20 | jdbc, 21 | "org.ow2.asm" % "asm" % "8.0.1", 22 | "ch.qos.logback" % "logback-classic" % "1.5.16", 23 | "org.quartz-scheduler" % "quartz" % "2.5.0", 24 | "org.quartz-scheduler" % "quartz-jobs" % "2.5.0", 25 | "com.softwaremill.macwire" %% "macros" % "2.6.6" % "provided", 26 | specs2 % Test, 27 | ) 28 | 29 | scalacOptions ++= Seq( 30 | "-Wconf:src=.*html&msg=unused import:s", 31 | ) 32 | 33 | Universal / doc / sources := Seq.empty 34 | Debian / doc / sources := Seq.empty 35 | 36 | Debian / version := { 37 | val noDashVersion = (Compile / version).value.replace("-", "~") 38 | if (noDashVersion.matches("^\\d.*")) { 39 | noDashVersion 40 | } else { 41 | "0~" + noDashVersion 42 | } 43 | } 44 | 45 | maintainer := "Lucid Software, Inc. " 46 | 47 | name := "piezo-admin" 48 | 49 | packageDescription := "Piezo web admin" 50 | 51 | PlayKeys.playDefaultPort := 8001 52 | 53 | Debian / defaultLinuxStartScriptLocation := "/lib/systemd/system" 54 | 55 | publishTo := sonatypePublishToBundle.value 56 | -------------------------------------------------------------------------------- /admin/conf/application.conf: -------------------------------------------------------------------------------- 1 | # This is the main configuration file for the application. 2 | # ~~~~~ 3 | 4 | # Secret key 5 | # ~~~~~ 6 | # The secret key is used to secure cryptographics functions. 7 | # If you deploy your application to several instances be sure to use the same key! 8 | play.http.secret.key="Z`ey`O4[5:83q8/3NupE>cSqiJRgAKCpruC/B4ns6/6p0?UF<@?6Ics0mnW>tCi4" 9 | 10 | # The application languages 11 | # ~~~~~ 12 | i18n.langs = ["en"] 13 | 14 | # Global object class 15 | # ~~~~~ 16 | # Define the Global object class for this application. 17 | # Default to Global in the root package. 18 | # application.global=Global 19 | 20 | # Router 21 | # ~~~~~ 22 | # Define the Router object to use for this application. 23 | # This router will be looked up first when the application is starting up, 24 | # so make sure this is the entry point. 25 | # Furthermore, it's assumed your route file is named properly. 26 | # So for an application router like `my.application.Router`, 27 | # you may need to define a router file `conf/my.application.routes`. 28 | # Default to Routes in the root package (and conf/routes) 29 | # application.router=my.application.Routes 30 | 31 | # Database configuration 32 | # ~~~~~ 33 | # You can declare as many datasources as you want. 34 | # By convention, the default datasource is named `default` 35 | # 36 | # db.default.driver=org.h2.Driver 37 | # db.default.url="jdbc:h2:mem:play" 38 | # db.default.user=sa 39 | # db.default.password="" 40 | 41 | # Evolutions 42 | # ~~~~~ 43 | # You can disable evolutions if needed 44 | # evolutionplugin=disabled 45 | com.lucidchart.piezo.heartbeatFile="/tmp/piezo/workerHeartbeatFile" 46 | com.lucidchart.piezo.admin.production=false 47 | healthCheck.worker.minutesBetween=5 48 | play.application.loader=com.lucidchart.piezo.admin.PiezoAdminApplicationLoader 49 | 50 | # Monitoring teams 51 | # ~~~~~ 52 | # Path to a JSON file that fills the "Monitoring Team" dropdown on editTrigger 53 | # in the admin UI with a predefined set of team names. File format: 54 | # [ 55 | # {"name": "team1"}, 56 | # {"name": "team2"} 57 | # ] 58 | # If this is left blank, monitoring team will be a freeform input. 59 | # com.lucidchart.piezo.admin.monitoringTeams.path = "/etc/piezo/teams.json" 60 | -------------------------------------------------------------------------------- /admin/conf/logger.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | ${application.home}/logs/application.log 7 | 8 | %date - [%level] - from %logger{10} in %.15thread %message %xException%n 9 | 10 | 11 | 12 | 13 | 14 | %date - %coloredLevel - from %logger{10} in %.15thread %message %xException{8}%n 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /admin/conf/quartz.properties: -------------------------------------------------------------------------------- 1 | #============================================================================ 2 | # Configure Main Scheduler Properties 3 | #============================================================================ 4 | 5 | org.quartz.scheduler.instanceName: Lucid 6 | org.quartz.scheduler.instanceId: AUTO 7 | org.quartz.scheduler.skipUpdateCheck: true 8 | org.quartz.scheduler.classLoadHelper.class: com.lucidchart.piezo.GeneratorClassLoader 9 | 10 | #============================================================================ 11 | # Configure ThreadPool 12 | #============================================================================ 13 | 14 | org.quartz.threadPool.class: org.quartz.simpl.SimpleThreadPool 15 | org.quartz.threadPool.threadCount: 2 16 | org.quartz.threadPool.threadPriority: 5 17 | 18 | #============================================================================ 19 | # Configure JobStore 20 | #============================================================================ 21 | 22 | org.quartz.jobStore.misfireThreshold: 120000 23 | 24 | org.quartz.jobStore.class=org.quartz.impl.jdbcjobstore.JobStoreTX 25 | org.quartz.jobStore.driverDelegateClass=org.quartz.impl.jdbcjobstore.StdJDBCDelegate 26 | org.quartz.jobStore.useProperties=false 27 | org.quartz.jobStore.dataSource=jobs 28 | org.quartz.jobStore.tablePrefix=QRTZ_ 29 | org.quartz.jobStore.isClustered=true 30 | 31 | #============================================================================ 32 | # Configure Datasources 33 | #============================================================================ 34 | 35 | org.quartz.dataSource.jobs.driver: com.mysql.cj.jdbc.Driver 36 | org.quartz.dataSource.jobs.URL: jdbc:mysql://localhost:3306/jobs 37 | org.quartz.dataSource.jobs.user: dev 38 | org.quartz.dataSource.jobs.password: dev 39 | org.quartz.dataSource.jobs.maxConnections: 10 40 | org.quartz.dataSource.jobs.validationQuery: select 0 41 | org.quartz.dataSource.jobs.connectionProvider.class = com.lucidchart.piezo.BeanConnectionProvider 42 | 43 | #============================================================================ 44 | # Configure Plugins 45 | #============================================================================ 46 | 47 | org.quartz.plugin.triggHistory.class: org.quartz.plugins.history.LoggingJobHistoryPlugin 48 | 49 | org.quartz.plugin.jobInitializer.class: org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin 50 | org.quartz.plugin.jobInitializer.fileNames: prod.xml 51 | org.quartz.plugin.jobInitializer.failOnFileNotFound: false 52 | org.quartz.plugin.jobInitializer.scanInterval: 120 53 | org.quartz.plugin.jobInitializer.wrapInUserTransaction: false 54 | -------------------------------------------------------------------------------- /admin/conf/routes: -------------------------------------------------------------------------------- 1 | # Routes 2 | # This file defines all application routes (Higher priority routes first) 3 | # ~~~~ 4 | 5 | # Home page 6 | GET / com.lucidchart.piezo.admin.controllers.ApplicationController.index 7 | GET /jobs com.lucidchart.piezo.admin.controllers.Jobs.getIndex 8 | GET /jobs/new com.lucidchart.piezo.admin.controllers.Jobs.getNewJobForm(templateGroup: Option[String] ?= None, templateName: Option[String] ?= None) 9 | GET /jobs/:group/:name com.lucidchart.piezo.admin.controllers.Jobs.getJob(group: String, name: String) 10 | POST /jobs/:group/:name com.lucidchart.piezo.admin.controllers.Jobs.putJob(group: String, name: String) 11 | DELETE /jobs/:group/:name com.lucidchart.piezo.admin.controllers.Jobs.deleteJob(group: String, name: String) 12 | GET /jobs/:group/:name/editor com.lucidchart.piezo.admin.controllers.Jobs.getEditJobAction(group: String, name: String) 13 | POST /jobs com.lucidchart.piezo.admin.controllers.Jobs.postJob 14 | 15 | POST /data/jobs com.lucidchart.piezo.admin.controllers.Jobs.postJobs 16 | GET /data/jobs com.lucidchart.piezo.admin.controllers.Jobs.getJobsDetail 17 | GET /data/jobs/:group/:name com.lucidchart.piezo.admin.controllers.Jobs.getJobDetail(group: String, name: String) 18 | 19 | GET /typeahead/jobs/:sofar com.lucidchart.piezo.admin.controllers.Jobs.jobGroupTypeAhead(sofar: String) 20 | GET /typeahead/jobs/:group/:sofar com.lucidchart.piezo.admin.controllers.Jobs.jobNameTypeAhead(group: String, sofar: String) 21 | 22 | GET /triggers com.lucidchart.piezo.admin.controllers.Triggers.getIndex 23 | GET /triggers/new/:triggerType com.lucidchart.piezo.admin.controllers.Triggers.getNewTriggerForm(triggerType, jobGroup: String ?= "", jobName: String ?= "", templateGroup: Option[String] ?= None, templateName: Option[String] ?= None) 24 | GET /triggers/:group/:name com.lucidchart.piezo.admin.controllers.Triggers.getTrigger(group: String, name: String) 25 | POST /triggers/:group/:name com.lucidchart.piezo.admin.controllers.Triggers.putTrigger(group: String, name: String) 26 | DELETE /triggers/:group/:name com.lucidchart.piezo.admin.controllers.Triggers.deleteTrigger(group: String, name: String) 27 | GET /triggers/:group/:name/editor com.lucidchart.piezo.admin.controllers.Triggers.getEditTriggerAction(group: String, name: String) 28 | POST /triggers/:group/:name/runner com.lucidchart.piezo.admin.controllers.Triggers.triggerJob(group: String, name: String) 29 | POST /triggers com.lucidchart.piezo.admin.controllers.Triggers.postTrigger() 30 | PATCH /triggers/:group/:name com.lucidchart.piezo.admin.controllers.Triggers.patchTrigger(group: String, name: String) 31 | 32 | GET /typeahead/triggers/:sofar com.lucidchart.piezo.admin.controllers.Triggers.triggerGroupTypeAhead(sofar: String) 33 | 34 | GET /favicon.ico controllers.Assets.at(path="/public/img", file="favicon.ico") 35 | 36 | # Worker Health Check 37 | GET /health com.lucidchart.piezo.admin.controllers.HealthCheck.main() 38 | 39 | # Map static resources from the /public folder to the /assets URL path 40 | 41 | GET /assets/*file controllers.Assets.at(path="/public", file) 42 | -------------------------------------------------------------------------------- /admin/public/bootstrap-3.3.6/css/bootstrap-theme.min.css.map: -------------------------------------------------------------------------------- 1 | {"version":3,"sources":["less/theme.less","less/mixins/vendor-prefixes.less","less/mixins/gradients.less","less/mixins/reset-filter.less"],"names":[],"mappings":";;;;AAmBA,YAAA,aAAA,UAAA,aAAA,aAAA,aAME,YAAA,EAAA,KAAA,EAAA,eC2CA,mBAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBDvCR,mBAAA,mBAAA,oBAAA,oBAAA,iBAAA,iBAAA,oBAAA,oBAAA,oBAAA,oBAAA,oBAAA,oBCsCA,mBAAA,MAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,IAAA,iBDlCR,qBAAA,sBAAA,sBAAA,uBAAA,mBAAA,oBAAA,sBAAA,uBAAA,sBAAA,uBAAA,sBAAA,uBAAA,+BAAA,gCAAA,6BAAA,gCAAA,gCAAA,gCCiCA,mBAAA,KACQ,WAAA,KDlDV,mBAAA,oBAAA,iBAAA,oBAAA,oBAAA,oBAuBI,YAAA,KAyCF,YAAA,YAEE,iBAAA,KAKJ,aErEI,YAAA,EAAA,IAAA,EAAA,KACA,iBAAA,iDACA,iBAAA,4CAAA,iBAAA,qEAEA,iBAAA,+CCnBF,OAAA,+GH4CA,OAAA,0DACA,kBAAA,SAuC2C,aAAA,QAA2B,aAAA,KArCtE,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,QACA,iBAAA,KAgBN,aEtEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,QACA,iBAAA,KAiBN,aEvEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,QACA,iBAAA,KAkBN,UExEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,gBAAA,gBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,iBAAA,iBAEE,iBAAA,QACA,aAAA,QAMA,mBAAA,0BAAA,yBAAA,0BAAA,yBAAA,yBAAA,oBAAA,2BAAA,0BAAA,2BAAA,0BAAA,0BAAA,6BAAA,oCAAA,mCAAA,oCAAA,mCAAA,mCAME,iBAAA,QACA,iBAAA,KAmBN,aEzEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,QACA,iBAAA,KAoBN,YE1EI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,kBAAA,kBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,mBAAA,mBAEE,iBAAA,QACA,aAAA,QAMA,qBAAA,4BAAA,2BAAA,4BAAA,2BAAA,2BAAA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,+BAAA,sCAAA,qCAAA,sCAAA,qCAAA,qCAME,iBAAA,QACA,iBAAA,KA2BN,eAAA,WClCE,mBAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,EAAA,IAAA,IAAA,iBD2CV,0BAAA,0BE3FI,iBAAA,QACA,iBAAA,oDACA,iBAAA,+CAAA,iBAAA,wEACA,iBAAA,kDACA,OAAA,+GF0FF,kBAAA,SAEF,yBAAA,+BAAA,+BEhGI,iBAAA,QACA,iBAAA,oDACA,iBAAA,+CAAA,iBAAA,wEACA,iBAAA,kDACA,OAAA,+GFgGF,kBAAA,SASF,gBE7GI,iBAAA,iDACA,iBAAA,4CACA,iBAAA,qEAAA,iBAAA,+CACA,OAAA,+GACA,OAAA,0DCnBF,kBAAA,SH+HA,cAAA,ICjEA,mBAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBD6DV,sCAAA,oCE7GI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SD2CF,mBAAA,MAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,IAAA,iBD0EV,cAAA,iBAEE,YAAA,EAAA,IAAA,EAAA,sBAIF,gBEhII,iBAAA,iDACA,iBAAA,4CACA,iBAAA,qEAAA,iBAAA,+CACA,OAAA,+GACA,OAAA,0DCnBF,kBAAA,SHkJA,cAAA,IAHF,sCAAA,oCEhII,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SD2CF,mBAAA,MAAA,EAAA,IAAA,IAAA,gBACQ,WAAA,MAAA,EAAA,IAAA,IAAA,gBDgFV,8BAAA,iCAYI,YAAA,EAAA,KAAA,EAAA,gBAKJ,qBAAA,kBAAA,mBAGE,cAAA,EAqBF,yBAfI,mDAAA,yDAAA,yDAGE,MAAA,KE7JF,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,UFqKJ,OACE,YAAA,EAAA,IAAA,EAAA,qBC3HA,mBAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,gBACQ,WAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,gBDsIV,eEtLI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF8KF,aAAA,QAKF,YEvLI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF8KF,aAAA,QAMF,eExLI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF8KF,aAAA,QAOF,cEzLI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF8KF,aAAA,QAeF,UEjMI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SFuMJ,cE3MI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SFwMJ,sBE5MI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SFyMJ,mBE7MI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF0MJ,sBE9MI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF2MJ,qBE/MI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF+MJ,sBElLI,iBAAA,yKACA,iBAAA,oKACA,iBAAA,iKFyLJ,YACE,cAAA,IC9KA,mBAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,EAAA,IAAA,IAAA,iBDgLV,wBAAA,8BAAA,8BAGE,YAAA,EAAA,KAAA,EAAA,QEnOE,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SFiOF,aAAA,QALF,+BAAA,qCAAA,qCAQI,YAAA,KAUJ,OCnME,mBAAA,EAAA,IAAA,IAAA,gBACQ,WAAA,EAAA,IAAA,IAAA,gBD4MV,8BE5PI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SFyPJ,8BE7PI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF0PJ,8BE9PI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF2PJ,2BE/PI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF4PJ,8BEhQI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF6PJ,6BEjQI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SFoQJ,MExQI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SFsQF,aAAA,QC3NA,mBAAA,MAAA,EAAA,IAAA,IAAA,gBAAA,EAAA,IAAA,EAAA,qBACQ,WAAA,MAAA,EAAA,IAAA,IAAA,gBAAA,EAAA,IAAA,EAAA"} -------------------------------------------------------------------------------- /admin/public/bootstrap-3.3.6/fonts/glyphicons-halflings-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lucidsoftware/piezo/061182e3d070cbe2c0dddf0ad6ee8dc5b5fda8df/admin/public/bootstrap-3.3.6/fonts/glyphicons-halflings-regular.eot -------------------------------------------------------------------------------- /admin/public/bootstrap-3.3.6/fonts/glyphicons-halflings-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lucidsoftware/piezo/061182e3d070cbe2c0dddf0ad6ee8dc5b5fda8df/admin/public/bootstrap-3.3.6/fonts/glyphicons-halflings-regular.ttf -------------------------------------------------------------------------------- /admin/public/bootstrap-3.3.6/fonts/glyphicons-halflings-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lucidsoftware/piezo/061182e3d070cbe2c0dddf0ad6ee8dc5b5fda8df/admin/public/bootstrap-3.3.6/fonts/glyphicons-halflings-regular.woff -------------------------------------------------------------------------------- /admin/public/bootstrap-3.3.6/fonts/glyphicons-halflings-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lucidsoftware/piezo/061182e3d070cbe2c0dddf0ad6ee8dc5b5fda8df/admin/public/bootstrap-3.3.6/fonts/glyphicons-halflings-regular.woff2 -------------------------------------------------------------------------------- /admin/public/bootstrap-3.3.6/js/npm.js: -------------------------------------------------------------------------------- 1 | // This file is autogenerated via the `commonjs` Grunt task. You can require() this file in a CommonJS environment. 2 | require('../../js/transition.js') 3 | require('../../js/alert.js') 4 | require('../../js/button.js') 5 | require('../../js/carousel.js') 6 | require('../../js/collapse.js') 7 | require('../../js/dropdown.js') 8 | require('../../js/modal.js') 9 | require('../../js/tooltip.js') 10 | require('../../js/popover.js') 11 | require('../../js/scrollspy.js') 12 | require('../../js/tab.js') 13 | require('../../js/affix.js') -------------------------------------------------------------------------------- /admin/public/img/LucidLogo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lucidsoftware/piezo/061182e3d070cbe2c0dddf0ad6ee8dc5b5fda8df/admin/public/img/LucidLogo.png -------------------------------------------------------------------------------- /admin/public/img/PiezoLogo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lucidsoftware/piezo/061182e3d070cbe2c0dddf0ad6ee8dc5b5fda8df/admin/public/img/PiezoLogo.png -------------------------------------------------------------------------------- /admin/public/img/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lucidsoftware/piezo/061182e3d070cbe2c0dddf0ad6ee8dc5b5fda8df/admin/public/img/favicon.ico -------------------------------------------------------------------------------- /admin/public/js/jobData.js: -------------------------------------------------------------------------------- 1 | (function () { 2 | var fixDataMapIndexes = function() { 3 | $('[id*=job-data-map]').each(function(i, element){ 4 | if (i % 2 === 0) { 5 | var itemNumber = i / 2; 6 | element.id = 'job-data-map_' + itemNumber + '_key'; 7 | element.name = 'job-data-map[' + itemNumber + '].key'; 8 | } 9 | 10 | if (i % 2 !== 0) { 11 | var itemNumber = (i - 1) / 2; 12 | element.id = 'job-data-map_' + itemNumber + '_value'; 13 | element.name = 'job-data-map[' + itemNumber + '].value'; 14 | } 15 | }) 16 | }; 17 | 18 | $('.job-data-map').on('click', '.job-data-delete a', function () { 19 | $(this).parent().next().next().remove(); 20 | $(this).parent().next().remove(); 21 | $(this).parent().remove(); 22 | 23 | fixDataMapIndexes(); 24 | }); 25 | 26 | $('.job-data-add').click(function () { 27 | var key = $(this).prev().prev().clone(); 28 | var value = $(this).prev().clone(); 29 | 30 | $(this).prev().prev().before($('')); 31 | 32 | key.find('input').val(''); 33 | value.find('input').val(''); 34 | 35 | $(this).before(key); 36 | $(this).before(value); 37 | 38 | fixDataMapIndexes(); 39 | }); 40 | 41 | $('form').submit(function () { 42 | fixDataMapIndexes(); 43 | }); 44 | })(); -------------------------------------------------------------------------------- /admin/public/js/triggerMonitoring.js: -------------------------------------------------------------------------------- 1 | window.addEventListener('load', () => { 2 | const priorityInput = document.getElementById('triggerMonitoringPriority'); 3 | const setMonitoringFieldVisibility = () => { 4 | const priority = priorityInput.value; 5 | const monitoringDetails = document.getElementById('triggerMonitoringDetails'); 6 | if (priority == 'Off') { 7 | monitoringDetails.style.display = 'none'; // hide 8 | } else { 9 | monitoringDetails.style.display = 'block'; // show 10 | } 11 | }; 12 | 13 | priorityInput.addEventListener('change', setMonitoringFieldVisibility); 14 | setMonitoringFieldVisibility(); 15 | }, {once: true}); 16 | -------------------------------------------------------------------------------- /admin/public/js/typeAhead.js: -------------------------------------------------------------------------------- 1 | (function () { 2 | var baseUrl = '/typeahead/'; 3 | var jobUrl = baseUrl + 'jobs/'; 4 | var triggerUrl = baseUrl + 'triggers/'; 5 | 6 | function sourceFunc(url, key, groupInput) { 7 | return function(request, response) { 8 | if (groupInput && !groupInput.val()) { 9 | response([]); 10 | } else { 11 | var append = (groupInput && groupInput.val()) ? 12 | groupInput.val() + '/' : ''; 13 | 14 | $.get(url + append + request.term, function(data) { 15 | response(data[key]); 16 | }); 17 | } 18 | }; 19 | } 20 | 21 | $('input.job-group-type-ahead').autocomplete({ 22 | source: sourceFunc(jobUrl, 'groups'), 23 | }); 24 | 25 | $('input.job-name-type-ahead').autocomplete({ 26 | source: sourceFunc(jobUrl, 'jobs', $('#jobGroup')), 27 | }); 28 | 29 | $('input.trigger-group-type-ahead').autocomplete({ 30 | source: sourceFunc(triggerUrl, 'groups'), 31 | }); 32 | })(); -------------------------------------------------------------------------------- /admin/public/stylesheets/main.css: -------------------------------------------------------------------------------- 1 | 2 | body { 3 | padding-top: 60px; /* 40px to make the container go all the way to the bottom of the topbar */ 4 | } 5 | 6 | .flash-container { 7 | text-align: center; 8 | } 9 | 10 | .flash-container div { 11 | margin-top: 10px; 12 | padding: 10px; 13 | } 14 | 15 | a:link.piezoicon, a:visited.piezoicon { 16 | color: #29aae1 !important; 17 | } 18 | 19 | a.piezoicon:hover, a.piezoicon:active, a.piezoicon:focus { 20 | color: #2083AE; 21 | } 22 | 23 | div.jobs-list .list-group { 24 | margin-bottom: 0px; 25 | } 26 | 27 | div.jobs-list .panel-heading { 28 | padding: 0px 0px 0px 10px; 29 | } 30 | 31 | div.jobs-list .panel-body { 32 | padding-top: 0px; 33 | padding-right: 0px; 34 | padding-bottom: 0px; 35 | } 36 | 37 | div.jobs-list .list-group-item { 38 | border-style: none; 39 | overflow: hidden; 40 | } 41 | 42 | div.jobs-list h5.title { 43 | margin-top: 0px; 44 | margin-bottom: 0px; 45 | overflow: hidden; 46 | } 47 | 48 | div.jobs-list a[data-toggle="collapse"] { 49 | display: block; 50 | padding: 10px; 51 | } 52 | 53 | div.jobs-list a.list-group-item { 54 | font-size: 12px; 55 | } 56 | 57 | h3.job-name { 58 | max-width: 585px; 59 | } 60 | 61 | div.job-data-delete, div.job-data-add { 62 | float: right; 63 | margin-right: 240px; 64 | } 65 | 66 | .table-fixed-first-col { 67 | width: auto; 68 | } 69 | 70 | .table-fixed-first-col td:first-child { 71 | white-space: nowrap; 72 | } 73 | 74 | .table-fixed-first-col td:nth-child(2) { 75 | width: 100%; 76 | } 77 | 78 | .job-data td:first-child { 79 | width: 33%; 80 | } 81 | 82 | table > tbody > tr > td { 83 | min-width: 150px; 84 | } 85 | 86 | .form-inline-control { 87 | display: inline-block; 88 | margin: 3px 2px; 89 | margin-bottom: 6px; 90 | } 91 | 92 | .form-horizontal-inline { 93 | margin-bottom: 0px; 94 | } 95 | 96 | .piezo-button { 97 | padding: 6px 8px; 98 | font-size: 20px; 99 | line-height: 1.33; 100 | color: #29aae1; 101 | } 102 | 103 | a.piezo-button:hover, a.piezo-button:focus { 104 | color: #2083AE; 105 | } 106 | 107 | .piezo-label { 108 | font-weight: normal; 109 | } 110 | 111 | .submit-btn { 112 | margin-top: 10px; 113 | } 114 | 115 | .inline-header { 116 | display: inline-block; 117 | } 118 | 119 | th, td { 120 | word-wrap: break-word; 121 | } 122 | 123 | .column-long-content { 124 | width: 17%; 125 | } 126 | 127 | .column-success { 128 | width: 70px; 129 | } 130 | 131 | .table-fixed-layout { 132 | table-layout: fixed; 133 | } 134 | 135 | .column-time { 136 | min-width: 85px; 137 | } -------------------------------------------------------------------------------- /admin/test/IntegrationSpec.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.* 4 | 5 | import com.lucidchart.piezo.admin.PiezoAdminApplicationLoader 6 | 7 | import play.api.test.* 8 | //import play.api.test.Helpers.* 9 | 10 | /** 11 | * add your integration spec here. An integration test will fire up a whole play application in a real (or headless) 12 | * browser 13 | */ 14 | class IntegrationSpec extends Specification { 15 | 16 | "Application" should { 17 | 18 | "work from within a browser" in new WithApplicationLoader(new PiezoAdminApplicationLoader) { 19 | override def running(): Unit = { 20 | // Getting selenium exception 21 | // https://groups.google.com/forum/#!msg/play-framework/ueXtbcG1oIo/Gc9yKQ4gd10J 22 | // browser.goTo("http://localhost:3333/") 23 | // browser.pageSource must contain("was created by") 24 | 25 | success 26 | } 27 | } 28 | 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /admin/test/com/lucidchart/piezo/admin/controllers/HealthCheckTest.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import java.io.{File, FileWriter} 4 | import org.joda.time.DateTime 5 | import org.joda.time.format.ISODateTimeFormat 6 | import org.specs2.mutable.* 7 | import play.api.Configuration 8 | import play.api.test.Helpers.* 9 | import play.api.test.* 10 | import org.joda.time.format.DateTimeFormatter 11 | 12 | class HealthCheckTest extends Specification { 13 | 14 | val filename = "HeartbeatTestFile" 15 | val dtf: DateTimeFormatter = ISODateTimeFormat.dateTimeNoMillis().withZoneUTC() 16 | 17 | trait FileCleaner extends After { 18 | def after: Unit = new File(filename).delete 19 | } 20 | 21 | private def testConfig(heartbeatFile: String) = Configuration("com.lucidchart.piezo.heartbeatFile" -> heartbeatFile) 22 | 23 | "HealthCheck" should { 24 | "send 200 when the worker timestamp is recent" in new FileCleaner { 25 | val file = new File(filename) 26 | val fileWrite = new FileWriter(file) 27 | val heartbeatTime = dtf.print(new DateTime(System.currentTimeMillis())) 28 | fileWrite.write(heartbeatTime) 29 | fileWrite.close() 30 | val healthCheck = new HealthCheck(testConfig(filename), Helpers.stubControllerComponents()) 31 | val response = healthCheck.main()(FakeRequest()) 32 | status(response) must equalTo(OK) 33 | } 34 | 35 | "send 503 when the worker timestamp is too far in the past" in new FileCleaner { 36 | val file = new File(filename) 37 | val fileWrite = new FileWriter(file) 38 | val heartbeatTime = dtf.print(new DateTime(System.currentTimeMillis()).minusMinutes(10)) 39 | fileWrite.write(heartbeatTime) 40 | fileWrite.close() 41 | val healthCheck = new HealthCheck(testConfig(filename), Helpers.stubControllerComponents()) 42 | val response = healthCheck.main()(FakeRequest()) 43 | status(response) must equalTo(SERVICE_UNAVAILABLE) 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /admin/test/com/lucidchart/piezo/admin/controllers/JobsControllerTest.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import org.specs2.mutable.* 4 | 5 | // import play.api.test.* 6 | // import play.api.test.Helpers.* 7 | // import com.lucidchart.piezo.jobs.monitoring.HeartBeat 8 | // import com.lucidchart.piezo.WorkerSchedulerFactory 9 | // import TestUtil.* 10 | import ch.qos.logback.classic.{Level, Logger} 11 | import org.slf4j.LoggerFactory 12 | //import org.quartz.Job 13 | //import com.lucidchart.piezo.util.DummyClassGenerator 14 | 15 | /** 16 | * Add your spec here. You can mock out a whole application including requests, plugins etc. For more information, 17 | * consult the wiki. 18 | */ 19 | class JobsControllerTest extends Specification { 20 | val rootLogger: Logger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[Logger] 21 | rootLogger.setLevel(Level.DEBUG) 22 | 23 | "Jobs" should { 24 | "create dummy job class" in { 25 | // TODO: figure out how to set the classpath in a test 26 | // val rootPackageClassName = "foo" 27 | // val rootPackageClassSource = Jobs.getDummyJobSource(rootPackageClassName) 28 | // val dummyClassGenerator = new DummyClassGenerator() 29 | // val rootPackageDummyClass: Option[Class[_]] = dummyClassGenerator.generate(rootPackageClassName, rootPackageClassSource) 30 | // rootPackageDummyClass.get.getName() must equalTo(rootPackageClassName) 31 | // rootPackageDummyClass.get.getInterfaces.contains(classOf[Job]) must beTrue 32 | 33 | // val nonRootPackageClassName = "bar.foo" 34 | // val nonRootPackageClassSource = Jobs.getDummyJobSource(nonRootPackageClassName) 35 | // val dummyClassGenerator2 = new DummyClassGenerator() 36 | // val nonRootPackageDummyClass: Option[Class[_]] = dummyClassGenerator2.generate(nonRootPackageClassName, nonRootPackageClassSource) 37 | // nonRootPackageDummyClass.get.getName() must equalTo(nonRootPackageClassName) 38 | success 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /admin/test/com/lucidchart/piezo/admin/controllers/JobsService.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import org.specs2.mutable.* 4 | import play.api.test.* 5 | import play.api.test.Helpers.* 6 | import com.lucidchart.piezo.jobs.monitoring.HeartBeat 7 | import com.lucidchart.piezo.WorkerSchedulerFactory 8 | import TestUtil.* 9 | import ch.qos.logback.classic.{Level, Logger} 10 | import org.slf4j.LoggerFactory 11 | import play.api.mvc.{AnyContentAsEmpty, Result} 12 | import java.util.Properties 13 | import play.api.Configuration 14 | import scala.concurrent.Future 15 | import com.lucidchart.piezo.admin.models.MonitoringTeams 16 | 17 | /** 18 | * Add your spec here. You can mock out a whole application including requests, plugins etc. For more information, 19 | * consult the wiki. 20 | */ 21 | class JobsService extends Specification { 22 | val rootLogger: Logger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[Logger] 23 | rootLogger.setLevel(Level.DEBUG) 24 | 25 | private val jobView = new com.lucidchart.piezo.admin.views.html.job(Configuration.empty) 26 | 27 | "Jobs" should { 28 | 29 | "send 404 on a non-existent job request" in { 30 | val schedulerFactory: WorkerSchedulerFactory = new WorkerSchedulerFactory() 31 | 32 | val propertiesStream = getClass().getResourceAsStream("/quartz_test.properties") 33 | val properties = new Properties 34 | properties.load(propertiesStream) 35 | schedulerFactory.initialize(properties) 36 | val scheduler = schedulerFactory.getScheduler() 37 | 38 | val jobsController = 39 | new Jobs( 40 | scheduler, 41 | TestUtil.mockModelComponents, 42 | jobView, 43 | Helpers.stubControllerComponents(), 44 | MonitoringTeams.empty, 45 | ) 46 | val request: FakeRequest[AnyContentAsEmpty.type] = FakeRequest(GET, "/jobs/missinggroup/missingname") 47 | val missingJob: Future[Result] = jobsController.getJob("missinggroup", "missingname")(request) 48 | 49 | status(missingJob) must equalTo(NOT_FOUND) 50 | contentType(missingJob) must beSome("text/html") 51 | contentAsString(missingJob) must contain("Job missinggroup missingname not found") 52 | } 53 | 54 | "send valid job details" in { 55 | val schedulerFactory: WorkerSchedulerFactory = new WorkerSchedulerFactory() 56 | val propertiesStream = getClass().getResourceAsStream("/quartz_test.properties") 57 | val properties = new Properties 58 | properties.load(propertiesStream) 59 | schedulerFactory.initialize(properties) 60 | val scheduler = schedulerFactory.getScheduler() 61 | createJob(scheduler) 62 | 63 | val jobsController = 64 | new Jobs( 65 | scheduler, 66 | TestUtil.mockModelComponents, 67 | jobView, 68 | Helpers.stubControllerComponents(), 69 | MonitoringTeams.empty, 70 | ) 71 | val request: FakeRequest[AnyContentAsEmpty.type] = FakeRequest(GET, "/jobs/" + jobGroup + "/" + jobName) 72 | val validJob: Future[Result] = jobsController.getJob(jobGroup, jobName)(request) 73 | 74 | status(validJob) must equalTo(OK) 75 | contentType(validJob) must beSome("text/html") 76 | contentAsString(validJob) must contain(classOf[HeartBeat].getName()) 77 | contentAsString(validJob) must contain(s"triggers/new/cron?jobGroup=$jobGroup&jobName=$jobName") 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /admin/test/com/lucidchart/piezo/admin/controllers/TestUtil.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import org.quartz.{JobBuilder, Scheduler, SimpleScheduleBuilder, TriggerBuilder} 4 | import com.lucidchart.piezo.jobs.monitoring.HeartBeat 5 | import com.lucidchart.piezo.admin.models.ModelComponents 6 | import java.util.Date 7 | 8 | /** 9 | */ 10 | object TestUtil { 11 | val jobGroup = "testJobGroup" 12 | val jobName = "testJobName" 13 | val triggerGroup = "testTriggerGroup" 14 | val triggerName = "testTriggerName" 15 | 16 | def createJob(scheduler: Scheduler): Date = { 17 | val jobDetail = JobBuilder 18 | .newJob(classOf[HeartBeat]) 19 | .withIdentity(jobName, jobGroup) 20 | .withDescription("test job description") 21 | .build() 22 | val trigger = TriggerBuilder.newTrigger 23 | .withIdentity(triggerName, triggerGroup) 24 | .withDescription("test trigger description") 25 | .withSchedule( 26 | SimpleScheduleBuilder.simpleSchedule 27 | .withIntervalInSeconds(5) 28 | .withRepeatCount(1), 29 | ) 30 | .withDescription("test schedule description") 31 | .build() 32 | scheduler.deleteJob(jobDetail.getKey()) 33 | scheduler.scheduleJob(jobDetail, trigger) 34 | } 35 | 36 | val mockModelComponents = new ModelComponents(() => throw new Exception("fake connection")) 37 | } 38 | -------------------------------------------------------------------------------- /admin/test/com/lucidchart/piezo/admin/controllers/TriggersService.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.controllers 2 | 3 | import org.specs2.mutable.* 4 | import play.api.test.* 5 | import play.api.test.Helpers.* 6 | import com.lucidchart.piezo.WorkerSchedulerFactory 7 | import TestUtil.* 8 | import java.util.Properties 9 | import play.api.mvc.{AnyContentAsEmpty, Result} 10 | import scala.concurrent.Future 11 | import com.lucidchart.piezo.admin.models.MonitoringTeams 12 | 13 | /** 14 | * Add your spec here. You can mock out a whole application including requests, plugins etc. For more information, 15 | * consult the wiki. 16 | */ 17 | class TriggersService extends Specification { 18 | "Triggers" should { 19 | 20 | "send 404 on a non-existent trigger request" in { 21 | val schedulerFactory: WorkerSchedulerFactory = new WorkerSchedulerFactory() 22 | 23 | val propertiesStream = getClass().getResourceAsStream("/quartz_test.properties") 24 | val properties = new Properties 25 | properties.load(propertiesStream) 26 | schedulerFactory.initialize(properties) 27 | 28 | val triggersController = 29 | new Triggers( 30 | schedulerFactory.getScheduler(), 31 | TestUtil.mockModelComponents, 32 | Helpers.stubControllerComponents(), 33 | MonitoringTeams.empty, 34 | ) 35 | val request: FakeRequest[AnyContentAsEmpty.type] = FakeRequest(GET, "/triggers/missinggroup/missingname") 36 | val missingTrigger: Future[Result] = triggersController.getTrigger("missinggroup", "missingname")(request) 37 | 38 | status(missingTrigger) must equalTo(NOT_FOUND) 39 | contentType(missingTrigger) must beSome("text/html") 40 | contentAsString(missingTrigger) must contain("Trigger missinggroup missingname not found") 41 | } 42 | 43 | "send valid trigger details" in { 44 | val schedulerFactory: WorkerSchedulerFactory = new WorkerSchedulerFactory() 45 | val propertiesStream = getClass().getResourceAsStream("/quartz_test.properties") 46 | val properties = new Properties 47 | properties.load(propertiesStream) 48 | schedulerFactory.initialize(properties) 49 | val scheduler = schedulerFactory.getScheduler() 50 | createJob(scheduler) 51 | 52 | val triggersController = 53 | new Triggers(scheduler, TestUtil.mockModelComponents, Helpers.stubControllerComponents(), MonitoringTeams.empty) 54 | val request: FakeRequest[AnyContentAsEmpty.type] = FakeRequest(GET, "/triggers/" + jobGroup + "/" + jobName) 55 | val validTrigger: Future[Result] = triggersController.getTrigger(triggerGroup, triggerName)(request) 56 | 57 | status(validTrigger) must equalTo(OK) 58 | contentType(validTrigger) must beSome("text/html") 59 | contentAsString(validTrigger) must contain(triggerGroup) 60 | contentAsString(validTrigger) must contain(triggerName) 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /admin/test/com/lucidchart/piezo/admin/util/CronHelperTest.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.util 2 | 3 | import com.lucidchart.piezo.admin.utils.CronHelper 4 | import org.specs2.mutable.Specification 5 | 6 | class CronHelperTest extends Specification { 7 | 8 | val SECOND: Int = 1 9 | val MINUTE: Int = 60 * SECOND 10 | val HOUR: Int = 60 * MINUTE 11 | val DAY: Int = 24 * HOUR 12 | val WEEK: Int = 7 * DAY 13 | val YEAR: Int = 365 * DAY 14 | val LEAP_YEAR: Int = YEAR + DAY 15 | val IMPOSSIBLE: Long = Long.MaxValue 16 | 17 | def maxInterval(str: String): Long = CronHelper.getMaxInterval(str) 18 | 19 | "CronHelper" should { 20 | "validate basic cron expressions" in { 21 | maxInterval("* * * * * ?") mustEqual SECOND // every second 22 | maxInterval("0 * * * * ?") mustEqual MINUTE // second 0 of every minute 23 | maxInterval("0 0 * * * ?") mustEqual HOUR // second 0 during minute 0 of every hour 24 | maxInterval("0 0 0 * * ?") mustEqual DAY // second 0 during minute 0 during hour 0 of every day 25 | maxInterval("* 0 * * * ?") mustEqual (HOUR - MINUTE + SECOND) // every second during minute 0 26 | maxInterval("* * 0 * * ?") mustEqual (DAY - HOUR + SECOND) 27 | } 28 | 29 | "validate more basic cron expressions" in { 30 | maxInterval("0/1 0-59 */1 * * ?") mustEqual SECOND // variations on 1 second 31 | maxInterval("* * 0-23 * * ?") mustEqual SECOND 32 | maxInterval("22 2/6 * * * ?") mustEqual 6 * MINUTE // 22nd second of every 6th minute after minute 2 33 | maxInterval("*/15 * * * * ?") mustEqual 15 * SECOND 34 | maxInterval("30 10 */1 * * ?") mustEqual HOUR 35 | maxInterval("15 * * * * ?") mustEqual MINUTE 36 | maxInterval("3,2,1,0 45,44,16,15 6,5,4 * * ? *") mustEqual (21 * HOUR + 29 * MINUTE + 57 * SECOND) 37 | maxInterval("50-0 30-40 14-12 * * ?") mustEqual (1 * HOUR + 49 * MINUTE + 1 * SECOND) 38 | maxInterval("0 0 8-4 * * ?") mustEqual 4 * HOUR 39 | maxInterval("0 0 0/6 * * ? *") mustEqual 6 * HOUR 40 | maxInterval("0 10,20,30 * * ? *") mustEqual 40 * MINUTE 41 | maxInterval("0-10/2 0-5,20-25 0,5-11/2,20-23 * ? *") mustEqual 8 * HOUR + 34 * MINUTE + 50 * SECOND 42 | } 43 | 44 | "validate complex cron expressions" in { 45 | maxInterval("0/15 * * 1-12 * ?") mustEqual 19 * DAY + 15 * SECOND // every 15 seconds on days 1-12 of the month 46 | maxInterval("* * * * 1-11 ?") mustEqual 31 * DAY + SECOND // every second of every month except for december 47 | maxInterval("* * * * * ? 1998") mustEqual IMPOSSIBLE // every second of 1998 48 | maxInterval("0 0 0 29 2 ? *") mustEqual 8 * YEAR + DAY // 8 years since we skip leap day roughly every 100 years 49 | maxInterval("* * * 29 2 ? *") mustEqual 8 * YEAR + SECOND // every second on leap day 50 | maxInterval("0 11 11 11 11 ?") mustEqual LEAP_YEAR // every november 11th at 11:11am 51 | maxInterval("1 2 3 ? * 6") mustEqual WEEK // every saturday 52 | maxInterval("0 15 10 ? * 6#3") mustEqual 5 * WEEK // third saturday of every month 53 | maxInterval("0 15 10 ? * MON-FRI") mustEqual 3 * DAY // every weekday 54 | maxInterval("0 0 0/6 * 1,2,3,4,5,6,7,8,9,10,11,12 ? *") mustEqual DAY - (18 * HOUR) 55 | maxInterval("* * * 1-31 * ?") mustEqual SECOND 56 | maxInterval("* * * * 1-12 ?") mustEqual SECOND 57 | maxInterval("* * * ? * 1-7") mustEqual SECOND 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /admin/test/com/lucidchart/piezo/admin/util/DummyClassGeneratorTest.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.admin.util 2 | 3 | import org.specs2.mutable.* 4 | import java.io.{PrintWriter, StringWriter} 5 | import scala.util.Random 6 | import org.slf4j.LoggerFactory 7 | import ch.qos.logback.classic.Level 8 | import ch.qos.logback.classic.Logger 9 | //import com.lucidchart.piezo.util.DummyClassGenerator 10 | 11 | class DummyClassGeneratorTest extends Specification { 12 | val rootLogger: Logger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[Logger] 13 | rootLogger.setLevel(Level.DEBUG) 14 | val writer = new StringWriter() 15 | val printWriter = new PrintWriter(writer) 16 | 17 | val className = "DummyTestClass123" 18 | val methodName = "echo" 19 | printWriter.println("package com.lucidchart;") 20 | printWriter.println("public class " + className + " {") 21 | printWriter.println(" public static Integer " + methodName + "(Integer value) {") 22 | printWriter.println(" return value;") 23 | printWriter.println(" }") 24 | printWriter.println("}") 25 | printWriter.close() 26 | 27 | val random = new Random() 28 | 29 | "generator" should { 30 | "create dummy class" in { 31 | // val dummyClassGenerator = new DummyClassGenerator() 32 | // TODO: figure out why it won't load 33 | // val dummyClass: Option[Class[_]] = dummyClassGenerator.generate(className, writer.toString) 34 | // val dummyMethod = dummyClass.get.getDeclaredMethod(methodName, classOf[java.lang.Integer]) 35 | // val echoParameter = random.nextInt() 36 | // val result = dummyMethod.invoke(null, echoParameter: java.lang.Integer) 37 | // result must equalTo(echoParameter) 38 | success 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /admin/test/resources/quartz_test.properties: -------------------------------------------------------------------------------- 1 | 2 | #============================================================================ 3 | # Configure Main Scheduler Properties 4 | #============================================================================ 5 | 6 | org.quartz.scheduler.instanceName: TestScheduler 7 | org.quartz.scheduler.instanceId: AUTO 8 | 9 | org.quartz.scheduler.skipUpdateCheck: true 10 | 11 | #============================================================================ 12 | # Configure ThreadPool 13 | #============================================================================ 14 | 15 | org.quartz.threadPool.class: org.quartz.simpl.SimpleThreadPool 16 | org.quartz.threadPool.threadCount: 2 17 | org.quartz.threadPool.threadPriority: 5 18 | 19 | #============================================================================ 20 | # Configure JobStore 21 | #============================================================================ 22 | 23 | org.quartz.jobStore.misfireThreshold: 60000 24 | 25 | org.quartz.jobStore.class: org.quartz.simpl.RAMJobStore 26 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | import play.api.libs.json.Json 2 | 3 | lazy val admin = project.dependsOn(worker) 4 | 5 | lazy val commonSettings = Seq(publishTo := sonatypePublishToBundle.value) 6 | 7 | lazy val worker = project.settings(publishTo := sonatypePublishToBundle.value) 8 | 9 | PgpKeys.pgpPassphrase in Global := Some(Array.emptyCharArray) 10 | 11 | inThisBuild( 12 | Seq( 13 | scalaVersion := "3.3.4", 14 | credentials += Credentials( 15 | "Sonatype Nexus Repository Manager", 16 | "oss.sonatype.org", 17 | System.getenv("SONATYPE_USERNAME"), 18 | System.getenv("SONATYPE_PASSWORD"), 19 | ), 20 | developers ++= List( 21 | Developer("lucidsoftware", "Lucid Software, Inc.", "", url("https://lucid.co/")), 22 | ), 23 | licenses += "Apache License, Version 2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0"), 24 | homepage := Some(url("https://github.com/lucidsoftware/piezo")), 25 | organization := "com.lucidchart", 26 | scmInfo := Some( 27 | ScmInfo(url("https://github.com/lucidsoftware/piezo"), "scm:git:git@github.com:lucidsoftware/piezo.git"), 28 | ), 29 | version := sys.props.getOrElse("build.version", "0-SNAPSHOT"), 30 | versionScheme := Some("early-semver"), 31 | scalacOptions ++= Seq( 32 | "-no-indent", 33 | "-Wunused:linted", 34 | "-Werror", 35 | // "-Xlint", 36 | ), 37 | ), 38 | ) 39 | 40 | publishTo := sonatypePublishToBundle.value 41 | -------------------------------------------------------------------------------- /documentation/piezo_project_architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lucidsoftware/piezo/061182e3d070cbe2c0dddf0ad6ee8dc5b5fda8df/documentation/piezo_project_architecture.png -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.10.7 2 | -------------------------------------------------------------------------------- /project/build.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.3.1") 2 | 3 | addSbtPlugin("org.playframework" % "sbt-plugin" % "3.0.6") 4 | 5 | addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.12.2") 6 | 7 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.4") 8 | 9 | libraryDependencies += "org.playframework" %% "play-json" % "3.0.4" 10 | 11 | resolvers += Resolver.typesafeRepo("releases") 12 | -------------------------------------------------------------------------------- /worker/build.sbt: -------------------------------------------------------------------------------- 1 | import java.io.File 2 | 3 | name := "piezo-worker" 4 | 5 | Compile / mainClass := Some("com.lucidchart.piezo.Worker") 6 | 7 | run / connectInput := true 8 | 9 | libraryDependencies ++= Seq( 10 | "ch.qos.logback" % "logback-classic" % "1.5.16" % Provided, 11 | "org.quartz-scheduler" % "quartz" % "2.5.0", 12 | "org.quartz-scheduler" % "quartz-jobs" % "2.5.0", 13 | "com.zaxxer" % "HikariCP" % "5.0.1", 14 | "org.slf4j" % "slf4j-api" % "2.0.16", 15 | "org.specs2" %% "specs2-core" % "4.20.9" % Test, 16 | "mysql" % "mysql-connector-java" % "8.0.33", 17 | "javax.transaction" % "jta" % "1.1", 18 | "joda-time" % "joda-time" % "2.13.1", 19 | "org.joda" % "joda-convert" % "3.0.1", 20 | "com.typesafe" % "config" % "1.4.3", 21 | "com.datadoghq" % "java-dogstatsd-client" % "4.4.3", 22 | ) 23 | 24 | fork := true 25 | 26 | javaOptions ++= Seq( 27 | s"-Dpidfile.path=${File.createTempFile("piezoWorkerPid", null)}", 28 | s"-Dcom.lucidchart.piezo.heartbeatfile=${File.createTempFile("piezoHeartbeat", null)}", 29 | "-Dorg.quartz.properties=quartz.properties", 30 | ) 31 | 32 | scalacOptions ++= Seq( 33 | "-deprecation", 34 | "-feature", 35 | "-unchecked", 36 | ) 37 | 38 | Compile / unmanagedClasspath += sourceDirectory.value / "run" / "resources" 39 | 40 | version := sys.props.getOrElse("build.version", "0.0-SNAPSHOT") 41 | -------------------------------------------------------------------------------- /worker/src/main/resources/blank.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 8 | * 9 | * 10 | 11 | 12 | 13 | 14 | true 15 | 16 | false 17 | 18 | 19 | 20 | 21 | HeartBeatJob 22 | Monitoring 23 | com.lucidchart.piezo.jobs.monitoring.HeartBeat 24 | true 25 | false 26 | 27 | 28 | 29 | 30 | HeartBeatJob 31 | Monitoring 32 | HeartBeatJob 33 | Monitoring 34 | MISFIRE_INSTRUCTION_SMART_POLICY 35 | 0 * * * * ? 36 | 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /worker/src/main/resources/piezo_mysql_0.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE job_history( 2 | fire_instance_id VARCHAR(120), 3 | job_name VARCHAR(190) NOT NULL, 4 | job_group VARCHAR(190) NOT NULL, 5 | trigger_name VARCHAR(190) NOT NULL, 6 | trigger_group VARCHAR(190) NOT NULL, 7 | success BOOLEAN NOT NULL, 8 | start DATETIME NOT NULL, 9 | finish DATETIME, 10 | PRIMARY KEY(fire_instance_id), 11 | KEY job_key(job_group, job_name), 12 | KEY start_key(start) 13 | ); 14 | 15 | CREATE TABLE trigger_history( 16 | trigger_name VARCHAR(190) NOT NULL, 17 | trigger_group VARCHAR(190) NOT NULL, 18 | scheduled_start DATETIME NOT NULL, 19 | actual_start DATETIME, 20 | finish DATETIME NOT NULL, 21 | misfire BOOLEAN NOT NULL, 22 | fire_instance_id VARCHAR(120) NOT NULL, 23 | PRIMARY KEY(trigger_group, trigger_name, scheduled_start, fire_instance_id), 24 | KEY sched_start_key(scheduled_start) 25 | ); 26 | -------------------------------------------------------------------------------- /worker/src/main/resources/piezo_mysql_1.sql: -------------------------------------------------------------------------------- 1 | START TRANSACTION; 2 | 3 | ALTER TABLE `job_history` 4 | MODIFY COLUMN trigger_name VARCHAR(190) NOT NULL, 5 | MODIFY COLUMN trigger_group VARCHAR(190) NOT NULL, 6 | MODIFY COLUMN job_name VARCHAR(190) NOT NULL, 7 | MODIFY COLUMN job_group VARCHAR(190) NOT NULL; 8 | 9 | ALTER TABLE `trigger_history` 10 | MODIFY COLUMN trigger_name VARCHAR(190) NOT NULL, 11 | MODIFY COLUMN trigger_group VARCHAR(190) NOT NULL; 12 | 13 | COMMIT; 14 | -------------------------------------------------------------------------------- /worker/src/main/resources/piezo_mysql_2.sql: -------------------------------------------------------------------------------- 1 | 2 | ALTER TABLE `job_history` 3 | MODIFY COLUMN trigger_name VARCHAR(100) NOT NULL, 4 | MODIFY COLUMN trigger_group VARCHAR(100) NOT NULL, 5 | MODIFY COLUMN job_name VARCHAR(100) NOT NULL, 6 | MODIFY COLUMN job_group VARCHAR(100) NOT NULL, 7 | DROP KEY job_key, 8 | ADD KEY job_key (job_group, job_name, start); 9 | 10 | ALTER TABLE `trigger_history` 11 | MODIFY COLUMN trigger_name VARCHAR(100) NOT NULL, 12 | MODIFY COLUMN trigger_group VARCHAR(100) NOT NULL; 13 | -------------------------------------------------------------------------------- /worker/src/main/resources/piezo_mysql_3.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE trigger_monitoring_priority( 2 | trigger_name VARCHAR(190) NOT NULL, 3 | trigger_group VARCHAR(190) NOT NULL, 4 | priority TINYINT DEFAULT NULL, 5 | created datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, 6 | modified datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, 7 | PRIMARY KEY(trigger_group, trigger_name) 8 | ); 9 | -------------------------------------------------------------------------------- /worker/src/main/resources/piezo_mysql_4.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO trigger_monitoring_priority (trigger_name, trigger_group, priority) 2 | SELECT TRIGGER_NAME, TRIGGER_GROUP, 3 from QRTZ_TRIGGERS; 3 | -------------------------------------------------------------------------------- /worker/src/main/resources/piezo_mysql_5.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE trigger_monitoring_priority ADD max_error_time MEDIUMINT NOT NULL; 2 | -------------------------------------------------------------------------------- /worker/src/main/resources/piezo_mysql_6.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE job_history ADD INDEX trigger_success_key (trigger_group, trigger_name, success, start); 2 | -------------------------------------------------------------------------------- /worker/src/main/resources/piezo_mysql_7.sql: -------------------------------------------------------------------------------- 1 | UPDATE trigger_monitoring_priority set max_error_time = 300; 2 | -------------------------------------------------------------------------------- /worker/src/main/resources/piezo_mysql_8.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE trigger_monitoring_priority ADD monitoring_team VARCHAR(100) DEFAULT NULL; 2 | -------------------------------------------------------------------------------- /worker/src/main/resources/quartz_mysql_0.sql: -------------------------------------------------------------------------------- 1 | # 2 | # Quartz seems to work best with the driver mm.mysql-2.0.7-bin.jar 3 | # 4 | # PLEASE consider using mysql with innodb tables to avoid locking issues 5 | # 6 | # In your Quartz properties file, you'll need to set 7 | # org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.StdJDBCDelegate 8 | # 9 | 10 | DROP TABLE IF EXISTS QRTZ_FIRED_TRIGGERS; 11 | DROP TABLE IF EXISTS QRTZ_PAUSED_TRIGGER_GRPS; 12 | DROP TABLE IF EXISTS QRTZ_SCHEDULER_STATE; 13 | DROP TABLE IF EXISTS QRTZ_LOCKS; 14 | DROP TABLE IF EXISTS QRTZ_SIMPLE_TRIGGERS; 15 | DROP TABLE IF EXISTS QRTZ_SIMPROP_TRIGGERS; 16 | DROP TABLE IF EXISTS QRTZ_CRON_TRIGGERS; 17 | DROP TABLE IF EXISTS QRTZ_BLOB_TRIGGERS; 18 | DROP TABLE IF EXISTS QRTZ_TRIGGERS; 19 | DROP TABLE IF EXISTS QRTZ_JOB_DETAILS; 20 | DROP TABLE IF EXISTS QRTZ_CALENDARS; 21 | 22 | 23 | CREATE TABLE QRTZ_JOB_DETAILS 24 | ( 25 | SCHED_NAME VARCHAR(120) NOT NULL, 26 | JOB_NAME VARCHAR(100) NOT NULL, 27 | JOB_GROUP VARCHAR(100) NOT NULL, 28 | DESCRIPTION VARCHAR(250) NULL, 29 | JOB_CLASS_NAME VARCHAR(250) NOT NULL, 30 | IS_DURABLE VARCHAR(1) NOT NULL, 31 | IS_NONCONCURRENT VARCHAR(1) NOT NULL, 32 | IS_UPDATE_DATA VARCHAR(1) NOT NULL, 33 | REQUESTS_RECOVERY VARCHAR(1) NOT NULL, 34 | JOB_DATA BLOB NULL, 35 | PRIMARY KEY (SCHED_NAME,JOB_NAME,JOB_GROUP) 36 | ); 37 | 38 | CREATE TABLE QRTZ_TRIGGERS 39 | ( 40 | SCHED_NAME VARCHAR(120) NOT NULL, 41 | TRIGGER_NAME VARCHAR(100) NOT NULL, 42 | TRIGGER_GROUP VARCHAR(100) NOT NULL, 43 | JOB_NAME VARCHAR(100) NOT NULL, 44 | JOB_GROUP VARCHAR(100) NOT NULL, 45 | DESCRIPTION VARCHAR(250) NULL, 46 | NEXT_FIRE_TIME BIGINT(13) NULL, 47 | PREV_FIRE_TIME BIGINT(13) NULL, 48 | PRIORITY INTEGER NULL, 49 | TRIGGER_STATE VARCHAR(16) NOT NULL, 50 | TRIGGER_TYPE VARCHAR(8) NOT NULL, 51 | START_TIME BIGINT(13) NOT NULL, 52 | END_TIME BIGINT(13) NULL, 53 | CALENDAR_NAME VARCHAR(190) NULL, 54 | MISFIRE_INSTR SMALLINT(2) NULL, 55 | JOB_DATA BLOB NULL, 56 | PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP), 57 | FOREIGN KEY (SCHED_NAME,JOB_NAME,JOB_GROUP) 58 | REFERENCES QRTZ_JOB_DETAILS(SCHED_NAME,JOB_NAME,JOB_GROUP) 59 | ); 60 | 61 | CREATE TABLE QRTZ_SIMPLE_TRIGGERS 62 | ( 63 | SCHED_NAME VARCHAR(120) NOT NULL, 64 | TRIGGER_NAME VARCHAR(100) NOT NULL, 65 | TRIGGER_GROUP VARCHAR(100) NOT NULL, 66 | REPEAT_COUNT BIGINT(7) NOT NULL, 67 | REPEAT_INTERVAL BIGINT(12) NOT NULL, 68 | TIMES_TRIGGERED BIGINT(10) NOT NULL, 69 | PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP), 70 | FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP) 71 | REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP) 72 | ); 73 | 74 | CREATE TABLE QRTZ_CRON_TRIGGERS 75 | ( 76 | SCHED_NAME VARCHAR(120) NOT NULL, 77 | TRIGGER_NAME VARCHAR(100) NOT NULL, 78 | TRIGGER_GROUP VARCHAR(100) NOT NULL, 79 | CRON_EXPRESSION VARCHAR(200) NOT NULL, 80 | TIME_ZONE_ID VARCHAR(80), 81 | PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP), 82 | FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP) 83 | REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP) 84 | ); 85 | 86 | CREATE TABLE QRTZ_SIMPROP_TRIGGERS 87 | ( 88 | SCHED_NAME VARCHAR(120) NOT NULL, 89 | TRIGGER_NAME VARCHAR(100) NOT NULL, 90 | TRIGGER_GROUP VARCHAR(100) NOT NULL, 91 | STR_PROP_1 VARCHAR(512) NULL, 92 | STR_PROP_2 VARCHAR(512) NULL, 93 | STR_PROP_3 VARCHAR(512) NULL, 94 | INT_PROP_1 INT NULL, 95 | INT_PROP_2 INT NULL, 96 | LONG_PROP_1 BIGINT NULL, 97 | LONG_PROP_2 BIGINT NULL, 98 | DEC_PROP_1 NUMERIC(13,4) NULL, 99 | DEC_PROP_2 NUMERIC(13,4) NULL, 100 | BOOL_PROP_1 VARCHAR(1) NULL, 101 | BOOL_PROP_2 VARCHAR(1) NULL, 102 | PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP), 103 | FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP) 104 | REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP) 105 | ); 106 | 107 | CREATE TABLE QRTZ_BLOB_TRIGGERS 108 | ( 109 | SCHED_NAME VARCHAR(120) NOT NULL, 110 | TRIGGER_NAME VARCHAR(100) NOT NULL, 111 | TRIGGER_GROUP VARCHAR(100) NOT NULL, 112 | BLOB_DATA BLOB NULL, 113 | PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP), 114 | FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP) 115 | REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP) 116 | ); 117 | 118 | CREATE TABLE QRTZ_CALENDARS 119 | ( 120 | SCHED_NAME VARCHAR(120) NOT NULL, 121 | CALENDAR_NAME VARCHAR(100) NOT NULL, 122 | CALENDAR BLOB NOT NULL, 123 | PRIMARY KEY (SCHED_NAME,CALENDAR_NAME) 124 | ); 125 | 126 | CREATE TABLE QRTZ_PAUSED_TRIGGER_GRPS 127 | ( 128 | SCHED_NAME VARCHAR(120) NOT NULL, 129 | TRIGGER_GROUP VARCHAR(100) NOT NULL, 130 | PRIMARY KEY (SCHED_NAME,TRIGGER_GROUP) 131 | ); 132 | 133 | CREATE TABLE QRTZ_FIRED_TRIGGERS 134 | ( 135 | SCHED_NAME VARCHAR(120) NOT NULL, 136 | ENTRY_ID VARCHAR(95) NOT NULL, 137 | TRIGGER_NAME VARCHAR(100) NOT NULL, 138 | TRIGGER_GROUP VARCHAR(100) NOT NULL, 139 | INSTANCE_NAME VARCHAR(200) NOT NULL, 140 | FIRED_TIME BIGINT(13) NOT NULL, 141 | SCHED_TIME BIGINT(13) NOT NULL, 142 | PRIORITY INTEGER NOT NULL, 143 | STATE VARCHAR(16) NOT NULL, 144 | JOB_NAME VARCHAR(100) NULL, 145 | JOB_GROUP VARCHAR(100) NULL, 146 | IS_NONCONCURRENT VARCHAR(1) NULL, 147 | REQUESTS_RECOVERY VARCHAR(1) NULL, 148 | PRIMARY KEY (SCHED_NAME,ENTRY_ID) 149 | ); 150 | 151 | CREATE TABLE QRTZ_SCHEDULER_STATE 152 | ( 153 | SCHED_NAME VARCHAR(120) NOT NULL, 154 | INSTANCE_NAME VARCHAR(100) NOT NULL, 155 | LAST_CHECKIN_TIME BIGINT(13) NOT NULL, 156 | CHECKIN_INTERVAL BIGINT(13) NOT NULL, 157 | PRIMARY KEY (SCHED_NAME,INSTANCE_NAME) 158 | ); 159 | 160 | CREATE TABLE QRTZ_LOCKS 161 | ( 162 | SCHED_NAME VARCHAR(120) NOT NULL, 163 | LOCK_NAME VARCHAR(40) NOT NULL, 164 | PRIMARY KEY (SCHED_NAME,LOCK_NAME) 165 | ); 166 | 167 | 168 | commit; 169 | -------------------------------------------------------------------------------- /worker/src/main/resources/run-sql.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo Running run_me_first.sql 4 | mysql < run_me_first.sql 5 | 6 | for script in $(ls quartz*.sql | sort -V); do 7 | echo Running $script 8 | mysql jobs < $script 9 | done 10 | 11 | for script in $(ls piezo*.sql | sort -V); do 12 | echo Running $script 13 | mysql jobs < $script 14 | done 15 | -------------------------------------------------------------------------------- /worker/src/main/resources/run_me_first.sql: -------------------------------------------------------------------------------- 1 | # 2 | # One time setup for the environment. 3 | # Must be run by a DB user with the necessary permissions. 4 | # 5 | 6 | DROP DATABASE IF EXISTS jobs; 7 | CREATE DATABASE jobs; 8 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/BeanConnectionProvider.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import scala.beans.BeanProperty 4 | import java.sql.Connection 5 | 6 | /** 7 | * Custom ConnectionProvider that is compatible with the configuration of a custom ConnectionProvider, using Java Beans. 8 | * Note that unlike our `ConnectionProvider` class, it is pretty mutable, because Java. 9 | * 10 | * It ultimately wraps the ConnectionProvider 11 | * 12 | * WARNING: This is intedned for the properties to all be set, and then `initialize` called to set everything up, after 13 | * which the properties should be left unchanged. 14 | */ 15 | class BeanConnectionProvider extends org.quartz.utils.ConnectionProvider { 16 | 17 | @BeanProperty 18 | var URL: String = null 19 | 20 | @BeanProperty 21 | var driver: String = null 22 | 23 | @BeanProperty 24 | var user: String = null 25 | 26 | @BeanProperty 27 | var password: String = null 28 | 29 | @BeanProperty 30 | var maxConnections: Int = -1 31 | 32 | @BeanProperty 33 | var validationQuery: String = "" 34 | 35 | @BeanProperty 36 | var supportIPFailover: Boolean = false 37 | 38 | @BeanProperty 39 | var causeFailoverEveryConnection: Boolean = false 40 | 41 | private var provider: PiezoConnectionProvider = null 42 | 43 | override def initialize(): Unit = { 44 | provider = new PiezoConnectionProvider( 45 | URL, 46 | driver, 47 | user, 48 | password, 49 | maxConnections, 50 | validationQuery, 51 | supportIPFailover, 52 | causeFailoverEveryConnection, 53 | ) 54 | } 55 | 56 | override def getConnection(): Connection = provider.getConnection() 57 | 58 | override def shutdown(): Unit = { 59 | if (provider != null) { 60 | provider.shutdown() 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/JobHistoryModel.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import java.sql.{ResultSet, Timestamp} 4 | import java.util.Date 5 | import org.quartz.{JobKey, TriggerKey} 6 | import org.slf4j.LoggerFactory 7 | import org.slf4j.Logger 8 | import java.sql.Connection 9 | 10 | case class JobRecord( 11 | name: String, 12 | group: String, 13 | trigger_name: String, 14 | trigger_group: String, 15 | success: Int, 16 | start: Date, 17 | finish: Date, 18 | fire_instance_id: String, 19 | ) 20 | 21 | class JobHistoryModel(getConnection: () => Connection) { 22 | val logger: Logger = LoggerFactory.getLogger(this.getClass) 23 | 24 | def addJob( 25 | fireInstanceId: String, 26 | jobKey: JobKey, 27 | triggerKey: TriggerKey, 28 | fireTime: Date, 29 | instanceDurationInMillis: Long, 30 | success: Boolean, 31 | ): Unit = { 32 | val connection = getConnection() 33 | try { 34 | val prepared = connection.prepareStatement( 35 | """ 36 | INSERT INTO job_history( 37 | fire_instance_id, 38 | job_name, 39 | job_group, 40 | trigger_name, 41 | trigger_group, 42 | success, 43 | start, 44 | finish 45 | ) 46 | VALUES(?, ?, ?, ?, ?, ?, ?, ?) 47 | """.stripMargin, 48 | ) 49 | prepared.setString(1, fireInstanceId) 50 | prepared.setString(2, jobKey.getName) 51 | prepared.setString(3, jobKey.getGroup) 52 | prepared.setString(4, triggerKey.getName) 53 | prepared.setString(5, triggerKey.getGroup) 54 | prepared.setBoolean(6, success) 55 | prepared.setTimestamp(7, new Timestamp(fireTime.getTime)) 56 | prepared.setTimestamp(8, new Timestamp(fireTime.getTime + instanceDurationInMillis)) 57 | prepared.executeUpdate() 58 | } catch { 59 | case e: Exception => logger.error("error in recording start of job", e) 60 | } finally { 61 | connection.close() 62 | } // TODO: close statement? 63 | } 64 | 65 | def deleteJobs(minStart: Long): Int = { 66 | val connection = getConnection() 67 | try { 68 | val prepared = connection.prepareStatement( 69 | """ 70 | DELETE 71 | FROM job_history 72 | WHERE start < ? 73 | """.stripMargin, 74 | ) 75 | prepared.setTimestamp(1, new Timestamp(minStart)) 76 | prepared.executeUpdate() 77 | } catch { 78 | case e: Exception => 79 | logger.error("error deleting job histories", e) 80 | 0 81 | } finally { 82 | connection.close() 83 | } 84 | } 85 | 86 | def getJob(jobKey: JobKey): List[JobRecord] = { 87 | val connection = getConnection() 88 | 89 | try { 90 | val prepared = connection.prepareStatement( 91 | """ 92 | SELECT * 93 | FROM job_history 94 | WHERE 95 | job_name=? 96 | AND job_group=? 97 | ORDER BY start DESC 98 | LIMIT 100 99 | """.stripMargin, 100 | ) 101 | prepared.setString(1, jobKey.getName) 102 | prepared.setString(2, jobKey.getGroup) 103 | val rs = prepared.executeQuery(); 104 | parseJobs(rs) 105 | } catch { 106 | case e: Exception => { 107 | logger.error("error in retrieving jobs", e) 108 | Nil 109 | } 110 | } finally { 111 | connection.close() 112 | } 113 | } 114 | 115 | def getLastJobSuccessByTrigger(triggerKey: TriggerKey): Option[JobRecord] = { 116 | val connection = getConnection() 117 | 118 | try { 119 | val prepared = connection.prepareStatement( 120 | """ 121 | SELECT * 122 | FROM job_history 123 | WHERE 124 | trigger_name=? 125 | AND trigger_group=? 126 | AND success=1 127 | ORDER BY start DESC 128 | LIMIT 1 129 | """.stripMargin, 130 | ) 131 | prepared.setString(1, triggerKey.getName) 132 | prepared.setString(2, triggerKey.getGroup) 133 | val rs = prepared.executeQuery() 134 | if (rs.next()) { 135 | Some(parseJob(rs)) 136 | } else { 137 | None 138 | } 139 | } catch { 140 | case e: Exception => { 141 | logger.error("error in retrieving last job success by trigger", e) 142 | None 143 | } 144 | } finally { 145 | connection.close() 146 | } 147 | } 148 | 149 | def getJobs(): List[JobRecord] = { 150 | val connection = getConnection() 151 | 152 | try { 153 | val prepared = connection.prepareStatement( 154 | """ 155 | SELECT * 156 | FROM job_history 157 | ORDER BY start DESC 158 | LIMIT 100 159 | """.stripMargin, 160 | ) 161 | val rs = prepared.executeQuery() 162 | parseJobs(rs) 163 | } catch { 164 | case e: Exception => { 165 | logger.error("error in retrieving jobs", e) 166 | Nil 167 | } 168 | } finally { 169 | connection.close() 170 | } 171 | } 172 | 173 | def parseJobs(rs: ResultSet): List[JobRecord] = { 174 | var result = List[JobRecord]() 175 | while (rs.next()) { 176 | result :+= parseJob(rs) 177 | } 178 | result 179 | } 180 | 181 | def parseJob(rs: ResultSet): JobRecord = { 182 | new JobRecord( 183 | rs.getString("job_name"), 184 | rs.getString("job_group"), 185 | rs.getString("trigger_name"), 186 | rs.getString("trigger_group"), 187 | rs.getInt("success"), 188 | rs.getTimestamp("start"), 189 | rs.getTimestamp("finish"), 190 | rs.getString("fire_instance_id"), 191 | ) 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/PiezoConnectionProvider.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import java.net.UnknownHostException 4 | import java.sql.{Connection, SQLTransientConnectionException} 5 | import java.util.concurrent.TimeUnit 6 | import org.quartz.utils.HikariCpPoolingConnectionProvider 7 | import org.slf4j.LoggerFactory 8 | import scala.annotation.tailrec 9 | import org.slf4j.Logger 10 | 11 | private[piezo] class PiezoConnectionProvider( 12 | url: String, 13 | driver: String, 14 | user: String, 15 | password: String, 16 | maxConnections: Int = 10, 17 | validationQuery: String = "SELECT 0", 18 | supportIPFailover: Boolean = false, 19 | // Intended to be used only for tests. This mocks an IP failover every time a connection is retreived 20 | causeFailoverEveryConnection: Boolean = false, 21 | ) { 22 | 23 | private class Pool(val ip: String, val connectionProvider: HikariCpPoolingConnectionProvider) { 24 | 25 | def this(ip: String) = this(ip, createNewConnectionProvider()) 26 | 27 | logger.info(s"Initialized Db connection pool for ${jdbcURL}") 28 | // Hikari takes about a second to add connections to the connection pool 29 | // We are now going to warm-up connectionPool(with timelimit of 2500ms) 30 | warmUpCP(connectionProvider) 31 | } 32 | 33 | val logger: Logger = LoggerFactory.getLogger(this.getClass) 34 | private def jdbcURL = url 35 | // Removes "jdbc:mysql://" prefix and ":{port}..." suffix 36 | private val dataSourceHostname = if (jdbcURL != null) jdbcURL.replace("jdbc:mysql://", "").split(":")(0) else null 37 | 38 | // Time (in milliseconds) that the in-memory cache will retain the ip address 39 | private val cachedIpTTL: Long = 1000 40 | private val getIpNumRetries = 10 41 | // Class for storing the ip address of the host, along with an expiration date 42 | private case class CachedIpWithExpiration(ip: String, expiration: Long) 43 | // Cache for ip address and its expiration for a host 44 | @volatile 45 | private var cachedIpWithExpiration: Option[CachedIpWithExpiration] = None 46 | 47 | @volatile 48 | private var pool: Pool = new Pool(getIP) 49 | 50 | def createNewConnectionProvider(): HikariCpPoolingConnectionProvider = { 51 | new HikariCpPoolingConnectionProvider( 52 | driver, 53 | jdbcURL, 54 | user, 55 | password, 56 | maxConnections, 57 | validationQuery, 58 | ) 59 | } 60 | 61 | /** 62 | * HikariCP connection pools don't automatically close when IP addresses for a hostname change. This function returns 63 | * True, iff at least one of the following conditions is met: 64 | * - IP addresses have changed for the CNAME record used for DNS lookup 65 | * - causeFailoverEveryConnection is set to "true", which is used for testing failover functionality 66 | * 67 | * @param pool 68 | * the connection pool currently being used 69 | * @param dnsIP 70 | * the IP returned when performing a DNS lookup 71 | * @return 72 | */ 73 | private def hasIpAddressChanged(pool: Pool, dnsIP: String): Boolean = { 74 | causeFailoverEveryConnection || pool.ip != dnsIP 75 | } 76 | 77 | @tailrec 78 | private def retryGettingIp(n: Int)(fn: => String): String = { 79 | try { 80 | return fn 81 | } catch { 82 | // Failed to resolve it from JVM 83 | case e: UnknownHostException if n > 1 => 84 | } 85 | // Wait 10 milliseconds between retries 86 | Thread.sleep(10) 87 | retryGettingIp(n - 1)(fn) 88 | } 89 | 90 | def _getIp: String = { 91 | retryGettingIp(getIpNumRetries) { 92 | // Get the ip address of the hostname. The result is cached in the JVM 93 | val ip = java.net.InetAddress.getByName(dataSourceHostname).getHostAddress 94 | cachedIpWithExpiration = Some(CachedIpWithExpiration(ip, System.currentTimeMillis() + cachedIpTTL)) 95 | ip 96 | } 97 | } 98 | 99 | def getIP: String = { 100 | synchronized { 101 | cachedIpWithExpiration 102 | .map { cachedValue => 103 | if (System.currentTimeMillis() > cachedValue.expiration) { 104 | _getIp 105 | } else { 106 | cachedValue.ip 107 | } 108 | } 109 | .getOrElse(_getIp) 110 | } 111 | } 112 | 113 | def getConnection(): Connection = { 114 | if (supportIPFailover && dataSourceHostname != null) { 115 | // If the IP has changed, then we know a failover has occurred, and we need to create a new hikari config 116 | val newIP: String = getIP 117 | if (hasIpAddressChanged(pool, newIP)) { 118 | // A failover has occurred, so we evict connections softly. New connectons look up the new IP address 119 | logger.info(s"IP Address has changed for ${jdbcURL}: ${pool.ip} -> ${newIP}. Attempt replacing pool...") 120 | val optionalOldPool = synchronized { 121 | val oldPool = pool 122 | // check if another thread updated the pool 123 | if (hasIpAddressChanged(pool, newIP)) { 124 | logger.info(s"Replacing pool for ${jdbcURL}...") 125 | pool = new Pool(newIP) 126 | Some(oldPool) 127 | } else { 128 | // already up to date 129 | logger.info(s"Pool already replaced for ${jdbcURL}") 130 | None 131 | } 132 | } 133 | 134 | // Clean up old pool so we don't leak connections to the old server 135 | optionalOldPool.foreach { oldPool => 136 | logger.info(s"Closing DB connection pool for ${jdbcURL} for failover (${oldPool.ip} -> ${pool.ip})") 137 | oldPool.connectionProvider.shutdown() 138 | } 139 | } 140 | } 141 | pool.connectionProvider.getConnection() 142 | } 143 | 144 | def shutdown(): Unit = { 145 | logger.info(s"Shutting down connection pool for ${jdbcURL}") 146 | pool.connectionProvider.shutdown() 147 | } 148 | 149 | private def warmUpCP(connectionPool: HikariCpPoolingConnectionProvider): Unit = { 150 | var testConn: Connection = null 151 | val start = System.currentTimeMillis 152 | while (testConn == null && (System.currentTimeMillis - start) < 2500) { 153 | try { 154 | testConn = connectionPool.getConnection() 155 | } catch { 156 | case _: SQLTransientConnectionException => { TimeUnit.MILLISECONDS.sleep(100) } // do nothing 157 | } 158 | } 159 | if (testConn != null) { 160 | testConn.close() 161 | } 162 | } 163 | } 164 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/TriggerHistoryModel.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import java.sql.Timestamp 4 | import org.quartz.TriggerKey 5 | import org.slf4j.LoggerFactory 6 | import java.util.Date 7 | import org.slf4j.Logger 8 | import java.sql.Connection 9 | 10 | case class TriggerRecord( 11 | name: String, 12 | group: String, 13 | scheduled_start: Date, 14 | actual_start: Option[Date], 15 | finish: Date, 16 | misfire: Int, 17 | fire_instance_id: String, 18 | ) 19 | 20 | class TriggerHistoryModel(getConnection: () => Connection) { 21 | val logger: Logger = LoggerFactory.getLogger(this.getClass) 22 | 23 | def addTrigger( 24 | triggerKey: TriggerKey, 25 | triggerFireTime: Option[Date], 26 | actualStart: Option[Date], 27 | misfire: Boolean, 28 | fireInstanceId: Option[String], 29 | ): Unit = { 30 | val connection = getConnection() 31 | try { 32 | val prepared = connection.prepareStatement( 33 | """ 34 | INSERT INTO trigger_history( 35 | trigger_name, 36 | trigger_group, 37 | scheduled_start, 38 | actual_start, 39 | finish, 40 | misfire, 41 | fire_instance_id 42 | ) VALUES(?, ?, ?, ?, ?, ?, ?) 43 | ON DUPLICATE KEY UPDATE 44 | trigger_name = Values(trigger_name), 45 | trigger_group = Values(trigger_group), 46 | scheduled_start = Values(scheduled_start), 47 | actual_start = Values(actual_start), 48 | finish = Values(finish), 49 | misfire = Values(misfire), 50 | fire_instance_id = Values(fire_instance_id) 51 | """.stripMargin, 52 | ) 53 | prepared.setString(1, triggerKey.getName) 54 | prepared.setString(2, triggerKey.getGroup) 55 | prepared.setTimestamp(3, new Timestamp(triggerFireTime.getOrElse(new Date).getTime)) 56 | prepared.setTimestamp(4, actualStart.map(date => new Timestamp(date.getTime)).getOrElse(null)) 57 | prepared.setTimestamp(5, new Timestamp(System.currentTimeMillis)) 58 | prepared.setBoolean(6, misfire) 59 | prepared.setString(7, fireInstanceId.getOrElse("")) 60 | prepared.executeUpdate() 61 | } catch { 62 | case e: Exception => logger.error("error in recording end of trigger", e) 63 | } finally { 64 | connection.close() 65 | } 66 | } 67 | 68 | def deleteTriggers(minScheduledStart: Long): Int = { 69 | val connection = getConnection() 70 | try { 71 | val prepared = connection.prepareStatement("""DELETE FROM trigger_history WHERE scheduled_start < ?""") 72 | prepared.setTimestamp(1, new Timestamp(minScheduledStart)) 73 | prepared.executeUpdate() 74 | } catch { 75 | case e: Exception => 76 | logger.error("error deleting trigger histories", e) 77 | 0 78 | } finally { 79 | connection.close() 80 | } 81 | } 82 | 83 | def getTrigger(triggerKey: TriggerKey): List[TriggerRecord] = { 84 | val connection = getConnection() 85 | 86 | try { 87 | val prepared = connection.prepareStatement( 88 | """SELECT * FROM trigger_history WHERE trigger_name=? AND trigger_group=? ORDER BY scheduled_start DESC LIMIT 100""", 89 | ) 90 | prepared.setString(1, triggerKey.getName) 91 | prepared.setString(2, triggerKey.getGroup) 92 | val rs = prepared.executeQuery() 93 | 94 | var result = List[TriggerRecord]() 95 | while (rs.next()) { 96 | result :+= new TriggerRecord( 97 | rs.getString("trigger_name"), 98 | rs.getString("trigger_group"), 99 | rs.getTimestamp("scheduled_start"), 100 | Option(rs.getTimestamp("actual_start")), 101 | rs.getTimestamp("finish"), 102 | rs.getInt("misfire"), 103 | rs.getString("fire_instance_id"), 104 | ) 105 | } 106 | result 107 | } catch { 108 | case e: Exception => 109 | logger.error("error in retrieving triggers", e) 110 | List() 111 | } finally { 112 | connection.close() 113 | } 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/TriggerMonitoringModel.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import com.lucidchart.piezo.TriggerMonitoringPriority.TriggerMonitoringPriority 4 | import java.util.Date 5 | import org.quartz.TriggerKey 6 | import org.slf4j.LoggerFactory 7 | import org.slf4j.Logger 8 | import java.sql.Connection 9 | 10 | object TriggerMonitoringPriority { 11 | case class Value(id: Int, name: String) { 12 | override def toString: String = name 13 | } 14 | type TriggerMonitoringPriority = Value 15 | val Off: Value = Value(0, "Off") 16 | val Low: Value = Value(1, "Low") 17 | val High: Value = Value(3, "High") 18 | 19 | val values: List[Value] = List(Off, Low, High) 20 | 21 | // map values that formerly identified a Medium priority to Low 22 | val valuesById: Map[Int, Value] = Map(2 -> Low) ++ values.map(p => p.id -> p) 23 | val valuesByName: Map[String, Value] = Map("Medium" -> Low) ++ values.map(p => p.name -> p) 24 | 25 | def withName: Function[String, Value] = valuesByName 26 | } 27 | 28 | case class TriggerMonitoringRecord( 29 | triggerName: String, 30 | triggerGroup: String, 31 | priority: TriggerMonitoringPriority, 32 | maxSecondsInError: Int, 33 | monitoringTeam: Option[String], 34 | created: Date, 35 | modified: Date, 36 | ) 37 | 38 | class TriggerMonitoringModel(getConnection: () => Connection) { 39 | val logger: Logger = LoggerFactory.getLogger(this.getClass) 40 | 41 | def setTriggerMonitoringRecord( 42 | triggerKey: TriggerKey, 43 | triggerMonitoringPriority: TriggerMonitoringPriority, 44 | maxSecondsInError: Int, 45 | monitoringTeam: Option[String], 46 | ): Int = { 47 | val connection = getConnection() 48 | try { 49 | val prepared = connection.prepareStatement(""" 50 | INSERT INTO trigger_monitoring_priority 51 | (trigger_name, trigger_group, priority, max_error_time, monitoring_team) 52 | VALUES 53 | (?, ?, ?, ?, ?) 54 | ON DUPLICATE KEY UPDATE 55 | priority = values(priority), 56 | max_error_time = values(max_error_time), 57 | monitoring_team = values(monitoring_team) 58 | """) 59 | prepared.setString(1, triggerKey.getName) 60 | prepared.setString(2, triggerKey.getGroup) 61 | prepared.setInt(3, triggerMonitoringPriority.id) 62 | prepared.setInt(4, maxSecondsInError) 63 | monitoringTeam match { 64 | case Some(team) => prepared.setString(5, team) 65 | case None => prepared.setNull(5, java.sql.Types.VARCHAR) 66 | } 67 | prepared.executeUpdate() 68 | } catch { 69 | case e: Exception => 70 | logger.error( 71 | s"Error setting trigger monitoring priority. " + 72 | s"Trigger name: ${triggerKey.getName} group: ${triggerKey.getGroup}", 73 | e, 74 | ) 75 | 0 76 | } finally { 77 | connection.close() 78 | } 79 | } 80 | 81 | def deleteTriggerMonitoringRecord(triggerKey: TriggerKey): Int = { 82 | val connection = getConnection() 83 | try { 84 | val prepared = connection.prepareStatement(""" 85 | DELETE 86 | FROM trigger_monitoring_priority 87 | WHERE 88 | trigger_name = ? 89 | AND trigger_group = ? 90 | """) 91 | prepared.setString(1, triggerKey.getName) 92 | prepared.setString(2, triggerKey.getGroup) 93 | prepared.executeUpdate() 94 | } catch { 95 | case e: Exception => { 96 | logger.error( 97 | s"Error deleting trigger monitoring priority. " + 98 | s"Trigger name: ${triggerKey.getName} group: ${triggerKey.getGroup}", 99 | e, 100 | ) 101 | 0 102 | } 103 | } finally { 104 | connection.close() 105 | } 106 | } 107 | 108 | def getTriggerMonitoringRecord(triggerKey: TriggerKey): Option[TriggerMonitoringRecord] = { 109 | val connection = getConnection() 110 | 111 | try { 112 | val prepared = connection.prepareStatement(""" 113 | SELECT * 114 | FROM trigger_monitoring_priority 115 | WHERE 116 | trigger_name = ? 117 | AND trigger_group = ? 118 | """) 119 | prepared.setString(1, triggerKey.getName) 120 | prepared.setString(2, triggerKey.getGroup) 121 | val rs = prepared.executeQuery() 122 | if (rs.next()) { 123 | TriggerMonitoringPriority.valuesById.get(rs.getInt("priority")).map { priority => 124 | TriggerMonitoringRecord( 125 | rs.getString("trigger_name"), 126 | rs.getString("trigger_group"), 127 | priority, 128 | rs.getInt("max_error_time"), 129 | Option(rs.getString("monitoring_team")), 130 | rs.getDate("created"), 131 | rs.getDate("modified"), 132 | ) 133 | } 134 | } else { 135 | None 136 | } 137 | } catch { 138 | case e: Exception => { 139 | logger.error( 140 | s"Error retrieving trigger monitoring priority. " + 141 | s"Trigger name: ${triggerKey.getName} group: ${triggerKey.getGroup}", 142 | e, 143 | ) 144 | None 145 | } 146 | } finally { 147 | connection.close() 148 | } 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/Worker.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import com.timgroup.statsd.NonBlockingStatsDClientBuilder 4 | import java.io.* 5 | import java.util.Properties 6 | import java.util.concurrent.{Semaphore, TimeUnit} 7 | import org.joda.time.DateTime 8 | import org.joda.time.format.ISODateTimeFormat 9 | import org.quartz.Scheduler 10 | import org.slf4j.LoggerFactory 11 | import scala.util.Try 12 | import scala.util.control.NonFatal 13 | import org.quartz.utils.DBConnectionManager 14 | import java.sql.Connection 15 | import org.quartz.SchedulerContext 16 | 17 | /** 18 | * To stop the worker without stopping SBT: Ctrl+D Enter 19 | */ 20 | object Worker { 21 | 22 | type GetConnection = () => Connection 23 | 24 | /** 25 | * A key to lookup the connectionManager in the SchedulerContext 26 | */ 27 | private val PiezoConnectionKey = "com.lucidchart.piezo.getConnection" 28 | 29 | private val logger = LoggerFactory.getLogger(this.getClass) 30 | private[piezo] val runSemaphore = new Semaphore(0) 31 | private val shutdownSemaphore = new Semaphore(1) 32 | private[piezo] val dtf = ISODateTimeFormat.dateTimeNoMillis().withZoneUTC() 33 | 34 | def main(args: Array[String]): Unit = { 35 | logger.info("worker starting") 36 | 37 | shutdownSemaphore.acquire() 38 | 39 | writePID() 40 | setupShutdownHandler() 41 | 42 | val schedulerFactory: WorkerSchedulerFactory = new WorkerSchedulerFactory() 43 | val scheduler = schedulerFactory.getScheduler() 44 | val props = schedulerFactory.props 45 | val useDatadog = 46 | Try(props.getProperty("com.lucidchart.piezo.statsd.useDatadog", "false").toBoolean).getOrElse(false) 47 | val statsd = new NonBlockingStatsDClientBuilder() 48 | .prefix(props.getProperty("com.lucidchart.piezo.statsd.prefix", "applications.piezo.worker")) 49 | .hostname(props.getProperty("com.lucidchart.piezo.statsd.host", "localhost")) 50 | .port(Try(props.getProperty("com.lucidchart.piezo.statsd.port").toInt).getOrElse(8125)) 51 | .build() 52 | 53 | val connectionManager = DBConnectionManager.getInstance() 54 | 55 | val piezoDataSource = { 56 | var source = props.getProperty("com.lucidchart.piezo.dataSource") 57 | if (source == null) { 58 | source = props.getProperty("org.quartz.jobStore.dataSource") 59 | } 60 | source 61 | } 62 | 63 | val getConnection = () => connectionManager.getConnection(piezoDataSource) 64 | 65 | scheduler.getContext().put(PiezoConnectionKey, getConnection) 66 | 67 | scheduler.getListenerManager.addJobListener(new WorkerJobListener(getConnection, statsd, useDatadog)) 68 | scheduler.getListenerManager.addTriggerListener(new WorkerTriggerListener(getConnection, statsd, useDatadog)) 69 | run(scheduler, props) 70 | 71 | logger.info("exiting") 72 | 73 | shutdownSemaphore.release() 74 | 75 | System.exit(0) 76 | } 77 | 78 | def connectionFactory(context: SchedulerContext): () => Connection = { 79 | context.get(PiezoConnectionKey).asInstanceOf[() => Connection] 80 | } 81 | 82 | private[piezo] def run( 83 | scheduler: Scheduler, 84 | properties: Properties, 85 | heartbeatSeconds: Int = 60, 86 | semaphorePermitsToStop: Int = 1, 87 | ): Unit = { 88 | val heartbeatFile = properties.getProperty("com.lucidchart.piezo.heartbeatFile") 89 | if (heartbeatFile == null) { 90 | logger.trace("No heartbeat file specified") 91 | } 92 | 93 | try { 94 | scheduler.start() 95 | logger.info("scheduler started") 96 | val reader = new InputStreamReader(System.in) 97 | 98 | var acquired = false 99 | while (!acquired) { 100 | try { 101 | acquired = runSemaphore.tryAcquire(semaphorePermitsToStop, 1, TimeUnit.SECONDS) 102 | if (!acquired) { 103 | if (System.currentTimeMillis() / 1000 % heartbeatSeconds == 0) { 104 | if (heartbeatFile != null) { 105 | writeHeartbeat(heartbeatFile) 106 | } 107 | val currentJobs: Int = scheduler.getCurrentlyExecutingJobs.size 108 | logger.info("worker heartbeat - currently running " + currentJobs + " jobs") 109 | } 110 | if (reader.ready && System.in.read == -1) { 111 | logger.info("Received EOF on stdin") 112 | runSemaphore.release() 113 | } 114 | } 115 | } catch { 116 | case e: InterruptedException => logger.error("caught interruption exception: " + e) 117 | case e: Exception => logger.error("caught exception: " + e) 118 | } 119 | } 120 | scheduler.shutdown(true) 121 | logger.info("scheduler shutdown") 122 | } catch { 123 | case e: Exception => logger.error("exception caught scheduling jobs: " + e) 124 | } 125 | } 126 | 127 | private[piezo] def writeHeartbeat(filePath: String): Unit = { 128 | try { 129 | val file = new File(filePath) 130 | file.getParentFile.mkdirs() 131 | val fileWrite = new FileWriter(file) 132 | val heartbeatTime = dtf.print(new DateTime(System.currentTimeMillis())) 133 | fileWrite.write(heartbeatTime) 134 | fileWrite.close() 135 | } catch { 136 | case NonFatal(e) => logger.warn(s"Exception caught writing heartbeat timestamp to file $filePath)", e) 137 | } 138 | } 139 | 140 | private def writePID() = { 141 | val location = getClass.getProtectionDomain.getCodeSource.getLocation 142 | val applicationPath = location.getFile() 143 | java.lang.management.ManagementFactory.getRuntimeMXBean.getName.split('@').headOption.map { pid => 144 | val pidFile = 145 | Option(System.getProperty("pidfile.path")).map(new File(_)).getOrElse(new File(applicationPath, "RUNNING_PID")) 146 | 147 | logger.info("process ID is " + pid) 148 | logger.info("pid file: " + pidFile.getAbsolutePath) 149 | 150 | if (pidFile.getAbsolutePath != "/dev/null") { 151 | new FileOutputStream(pidFile).write(pid.getBytes) 152 | Runtime.getRuntime.addShutdownHook(new Thread { 153 | override def run: Unit = { 154 | pidFile.delete() 155 | } 156 | }) 157 | } 158 | } 159 | } 160 | 161 | private def setupShutdownHandler(): Unit = { 162 | Runtime.getRuntime.addShutdownHook(new Thread() { 163 | override def run(): Unit = { 164 | logger.info("received shutdown signal") 165 | runSemaphore.release() 166 | shutdownSemaphore.acquire() 167 | } 168 | }) 169 | } 170 | } 171 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/WorkerJobListener.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import com.timgroup.statsd.StatsDClient 4 | import org.quartz.{JobExecutionContext, JobExecutionException, JobListener} 5 | import org.slf4j.LoggerFactory 6 | import org.slf4j.Logger 7 | import java.sql.Connection 8 | 9 | object WorkerJobListener { 10 | val logger: Logger = LoggerFactory.getLogger(this.getClass) 11 | } 12 | 13 | class WorkerJobListener(getConnection: () => Connection, statsd: StatsDClient, useDatadog: Boolean) 14 | extends JobListener { 15 | val jobHistoryModel = new JobHistoryModel(getConnection) 16 | 17 | def getName: String = "WorkerJobListener" 18 | 19 | def jobToBeExecuted(context: JobExecutionContext): Unit = {} 20 | 21 | def jobExecutionVetoed(context: JobExecutionContext): Unit = {} 22 | 23 | def jobWasExecuted(context: JobExecutionContext, jobException: JobExecutionException): Unit = { 24 | try { 25 | val success = jobException == null 26 | jobHistoryModel.addJob( 27 | context.getFireInstanceId, 28 | context.getTrigger.getJobKey, 29 | context.getTrigger.getKey, 30 | context.getFireTime, 31 | context.getJobRunTime, 32 | success = success, 33 | ) 34 | 35 | val suffix = if (success) "succeeded" else "failed" 36 | val jobKey = s"${context.getTrigger.getJobKey.getGroup}.${context.getTrigger.getJobKey.getName}" 37 | if (useDatadog) { 38 | statsd.increment("jobs", s"job:${jobKey}", s"event:${suffix}") 39 | } else { 40 | statsd.increment(s"jobs.${jobKey}.${suffix}") 41 | } 42 | } catch { 43 | case e: Exception => WorkerJobListener.logger.error("error in jobWasExecuted", e) 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/WorkerSchedulerFactory.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import org.quartz.impl.StdSchedulerFactory 4 | import java.util.Properties 5 | 6 | /** 7 | */ 8 | class WorkerSchedulerFactory extends StdSchedulerFactory { 9 | var props: Properties = null 10 | 11 | override def initialize(props: Properties): Unit = { 12 | this.props = props 13 | super.initialize(props) 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/WorkerTriggerListener.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import com.timgroup.statsd.StatsDClient 4 | 5 | import org.quartz.Trigger.{CompletedExecutionInstruction, TriggerState} 6 | import org.quartz.* 7 | import org.slf4j.LoggerFactory 8 | import java.sql.Connection 9 | 10 | object WorkerTriggerListener { 11 | private val logger = LoggerFactory.getLogger(this.getClass) 12 | } 13 | 14 | class WorkerTriggerListener(getConnection: () => Connection, statsd: StatsDClient, useDatadog: Boolean) 15 | extends TriggerListener { 16 | val triggerHistoryModel = new TriggerHistoryModel(getConnection) 17 | 18 | def getName: String = "WorkerTriggerListener" 19 | 20 | def vetoJobExecution(trigger: Trigger, context: JobExecutionContext): Boolean = { 21 | // Called right before the job is about to execute on the worker. 22 | 23 | /* 24 | Under certain conditions, a job may come up for execution after a trigger 25 | has been paused, leading to unexpected additional executions. 26 | 27 | To fix this, right before Quartz starts the job, we check the _current_ trigger 28 | state to ensure that job wasn't paused after the execution was queued. 29 | */ 30 | 31 | // Veto if current trigger state is paused 32 | context.getScheduler.getTriggerState(trigger.getKey) == TriggerState.PAUSED 33 | } 34 | 35 | def triggerFired(trigger: Trigger, context: JobExecutionContext): Unit = { 36 | val triggerKey = s"${trigger.getKey.getGroup}.${trigger.getKey.getName}" 37 | if (useDatadog) { 38 | statsd.increment("triggers", s"trigger:${triggerKey}", "event:fired") 39 | } else { 40 | statsd.increment(s"triggers.${triggerKey}.fired") 41 | } 42 | } 43 | 44 | def triggerComplete( 45 | trigger: Trigger, 46 | context: JobExecutionContext, 47 | triggerInstructionCode: CompletedExecutionInstruction, 48 | ): Unit = { 49 | try { 50 | triggerHistoryModel.addTrigger( 51 | trigger.getKey, 52 | Option(trigger.getPreviousFireTime), 53 | Some(context.getFireTime), 54 | misfire = false, 55 | Some(context.getFireInstanceId), 56 | ) 57 | 58 | val triggerKey = s"${trigger.getKey.getGroup}.${trigger.getKey.getName}" 59 | if (useDatadog) { 60 | statsd.increment("triggers", s"trigger:${triggerKey}", "event:completed") 61 | } else { 62 | statsd.increment(s"triggers.${triggerKey}.completed") 63 | } 64 | } catch { 65 | case e: Exception => WorkerTriggerListener.logger.error("exception in triggerComplete", e) 66 | } 67 | } 68 | 69 | def triggerMisfired(trigger: Trigger): Unit = { 70 | try { 71 | triggerHistoryModel.addTrigger( 72 | trigger.getKey, 73 | Option(trigger.getPreviousFireTime), 74 | None, 75 | misfire = true, 76 | None, 77 | ) 78 | 79 | val triggerKey = s"${trigger.getKey.getGroup}.${trigger.getKey.getName}" 80 | if (useDatadog) { 81 | statsd.increment("triggers", s"trigger:${triggerKey}", "event:misfired") 82 | } else { 83 | statsd.increment(s"triggers.${triggerKey}.misfired") 84 | } 85 | } catch { 86 | case e: Exception => WorkerTriggerListener.logger.error("exception in triggerMisfired", e) 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/jobs/cleanup/JobHistoryCleanup.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.jobs.cleanup 2 | 3 | import org.quartz.{Job, JobExecutionContext} 4 | import org.slf4j.LoggerFactory 5 | import com.lucidchart.piezo.{JobHistoryModel, TriggerHistoryModel, Worker} 6 | import java.util.Date 7 | import java.sql.Connection 8 | 9 | class JobHistoryCleanup extends Job { 10 | private val logger = LoggerFactory.getLogger(this.getClass) 11 | 12 | def execute(context: JobExecutionContext): Unit = { 13 | val maxAge = context.getMergedJobDataMap.getLong("maxAgeDays") * 24L * 3600L * 1000L 14 | val minStart = System.currentTimeMillis() - maxAge 15 | val getConnection = Worker.connectionFactory(context.getScheduler.getContext) 16 | deleteTriggerHistories(getConnection, minStart) 17 | deleteJobHistories(getConnection, minStart) 18 | } 19 | 20 | private[this] def deleteTriggerHistories(getConnection: () => Connection, minStart: Long): Unit = { 21 | logger.info("Deleting triggers older than " + new Date(minStart)) 22 | val numDeleted = new TriggerHistoryModel(getConnection).deleteTriggers(minStart) 23 | logger.info("Deleted " + numDeleted + " trigger histories") 24 | } 25 | 26 | private[this] def deleteJobHistories(getConnection: () => Connection, minStart: Long): Unit = { 27 | logger.info("Deleting jobs older than " + new Date(minStart)) 28 | val numDeleted = new JobHistoryModel(getConnection).deleteJobs(minStart) 29 | logger.info("Deleted " + numDeleted + " job histories") 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/jobs/exec/RunExec.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.jobs.exec 2 | 3 | import java.util.Scanner 4 | import org.quartz.{Job, JobExecutionContext} 5 | import org.slf4j.LoggerFactory 6 | import scala.jdk.CollectionConverters.* 7 | import org.slf4j.Logger 8 | 9 | /** 10 | * When creating job's data map, choose keys in alphabetical order for corresponding values to be executed in the 11 | * correct order 12 | */ 13 | class RunExec extends Job { 14 | val logger: Logger = LoggerFactory.getLogger(this.getClass) 15 | 16 | def execute(context: JobExecutionContext): Unit = { 17 | val jobDataMap = context.getJobDetail.getJobDataMap 18 | val sortedDataList = jobDataMap.entrySet.asScala.toList.sortBy(_.getKey) 19 | val commands: java.util.List[String] = sortedDataList.map(entry => entry.getValue.toString).asJava 20 | val cmdProcess = new ProcessBuilder(commands).start 21 | cmdProcess.waitFor 22 | val result = new Scanner(cmdProcess.getInputStream, "UTF-8").useDelimiter("\\A").next() 23 | logger.info("Executable output: " + result) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/jobs/monitoring/HeartBeat.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.jobs.monitoring 2 | 3 | import org.quartz.{Job, JobExecutionContext} 4 | import org.slf4j.LoggerFactory 5 | 6 | class HeartBeat extends Job { 7 | def execute(context: JobExecutionContext): Unit = { 8 | LoggerFactory.getLogger(this.getClass).info((System.currentTimeMillis / 1000).toString) 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/util/DummyClassGenerator.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.util 2 | 3 | import javax.tools.{DiagnosticCollector, JavaFileObject, ToolProvider} 4 | import org.quartz.{Job, JobExecutionContext} 5 | 6 | import scala.jdk.CollectionConverters.* 7 | import java.net.URLClassLoader 8 | import org.slf4j.LoggerFactory 9 | import java.io.File 10 | import javax.tools.JavaCompiler 11 | 12 | object DummyClassGenerator { 13 | var classLoader: ClassLoader = Thread.currentThread().getContextClassLoader() 14 | } 15 | 16 | class DummyClassGenerator { 17 | private val logger = LoggerFactory.getLogger(this.getClass) 18 | 19 | val tempDir: String = System.getProperty("java.io.tmpdir") 20 | val tempOutputDirName: String = tempDir + File.separator + "piezo" 21 | val tempOutputDir = new File(tempOutputDirName) 22 | if (!tempOutputDir.exists()) { 23 | tempOutputDir.mkdir() 24 | } 25 | val urlClassLoader: URLClassLoader = 26 | new URLClassLoader(Array(tempOutputDir.toURI().toURL()), Thread.currentThread().getContextClassLoader()) 27 | val compiler: JavaCompiler = ToolProvider.getSystemJavaCompiler() 28 | val diagnostics: DiagnosticCollector[JavaFileObject] = new DiagnosticCollector[JavaFileObject]() 29 | 30 | private def getClasspath() = { 31 | val dummyJob = new Job() { 32 | def execute(context: JobExecutionContext): Unit = {} 33 | } 34 | val classLoader = dummyJob.getClass.getClassLoader 35 | val urls = classLoader.asInstanceOf[URLClassLoader].getURLs() 36 | val buffer = new StringBuilder(1000) 37 | buffer.append(".") 38 | val separator = System.getProperty("path.separator") 39 | for (url <- urls) { 40 | buffer.append(separator).append(url.getFile) 41 | } 42 | val classpath = buffer.toString() 43 | logger.debug("Using classpath: " + classpath) 44 | classpath 45 | } 46 | 47 | def generate(name: String, source: String): Option[Class[_]] = { 48 | try { 49 | Some(urlClassLoader.loadClass(name)) 50 | } catch { 51 | case e: ClassNotFoundException => { 52 | logger.debug("Generating class " + name) 53 | val file: JavaFileObject = new SourceFromString(name, source) 54 | val compilationUnits = List[JavaFileObject](file) 55 | val classpath = getClasspath() 56 | val options = List("-d", tempOutputDirName, "-classpath", classpath) 57 | val task = compiler.getTask(null, null, diagnostics, options.asJava, null, compilationUnits.asJava) 58 | logger.debug(s"Compiling $name with options '$options'") 59 | val success = task.call() 60 | for (diagnostic <- diagnostics.getDiagnostics.asScala) { 61 | logger.debug("Result of compiling " + name) 62 | logger.debug(diagnostic.getCode) 63 | logger.debug(diagnostic.getKind.toString) 64 | logger.debug(diagnostic.getPosition.toString) 65 | logger.debug(diagnostic.getStartPosition.toString) 66 | logger.debug(diagnostic.getEndPosition.toString) 67 | logger.debug(diagnostic.getSource.toString) 68 | logger.debug(diagnostic.getMessage(null)) 69 | } 70 | logger.debug("Success: " + success) 71 | 72 | if (success) { 73 | logger.info("Generated class " + name) 74 | Some(urlClassLoader.loadClass(name)) 75 | } else { 76 | None 77 | } 78 | } 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /worker/src/main/scala/com/lucidchart/piezo/util/SourceFromString.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo.util 2 | 3 | import javax.tools.SimpleJavaFileObject 4 | import java.net.URI 5 | import javax.tools.JavaFileObject.Kind 6 | 7 | /** 8 | */ 9 | class SourceFromString(name: String, code: String) 10 | extends SimpleJavaFileObject( 11 | URI.create("string:///" + name.replace('.', '/') + Kind.SOURCE.extension), 12 | Kind.SOURCE, 13 | ) { 14 | 15 | override def getCharContent(ignoreEncodingErrors: Boolean): String = { 16 | code 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /worker/src/run/quartz.properties: -------------------------------------------------------------------------------- 1 | 2 | #============================================================================ 3 | # Configure Main Scheduler Properties 4 | #============================================================================ 5 | 6 | org.quartz.scheduler.instanceName: Lucid 7 | org.quartz.scheduler.instanceId: AUTO 8 | org.quartz.scheduler.skipUpdateCheck: true 9 | 10 | #============================================================================ 11 | # Configure ThreadPool 12 | #============================================================================ 13 | 14 | org.quartz.threadPool.class: org.quartz.simpl.SimpleThreadPool 15 | org.quartz.threadPool.threadCount: 2 16 | org.quartz.threadPool.threadPriority: 5 17 | 18 | #============================================================================ 19 | # Configure JobStore 20 | #============================================================================ 21 | 22 | org.quartz.jobStore.misfireThreshold: 120000 23 | 24 | org.quartz.jobStore.class=org.quartz.impl.jdbcjobstore.JobStoreTX 25 | org.quartz.jobStore.driverDelegateClass=org.quartz.impl.jdbcjobstore.StdJDBCDelegate 26 | org.quartz.jobStore.useProperties=false 27 | org.quartz.jobStore.dataSource=jobs 28 | org.quartz.jobStore.tablePrefix=QRTZ_ 29 | org.quartz.jobStore.isClustered=true 30 | 31 | #============================================================================ 32 | # Configure Datasources 33 | #============================================================================ 34 | 35 | org.quartz.dataSource.jobs.driver: com.mysql.cj.jdbc.Driver 36 | org.quartz.dataSource.jobs.URL: jdbc:mysql://localhost:3306/jobs 37 | org.quartz.dataSource.jobs.user: dev 38 | org.quartz.dataSource.jobs.password: dev 39 | org.quartz.dataSource.jobs.maxConnections: 10 40 | org.quartz.dataSource.jobs.validationQuery: select 0 41 | org.quartz.dataSource.jobs.connectionProvider.class = com.lucidchart.piezo.BeanConnectionProvider 42 | 43 | #============================================================================ 44 | # Configure Plugins 45 | #============================================================================ 46 | 47 | org.quartz.plugin.triggHistory.class: org.quartz.plugins.history.LoggingJobHistoryPlugin 48 | 49 | com.lucidchart.piezo.heartbeatFile: /tmp/piezo/workerHeartbeatFile 50 | com.lucidchart.piezo.statsd.prefix: applications.piezo.worker 51 | com.lucidchart.piezo.statsd.host: localhost 52 | com.lucidchart.piezo.statsd.port: 8125 53 | com.lucidchart.piezo.statsd.useDatadog: false 54 | -------------------------------------------------------------------------------- /worker/src/run/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | /tmp/worker.log 5 | 6 | %date - [%level] - from %logger{10} in %.15thread %message %xException%n 7 | 8 | 9 | 10 | 11 | /tmp/beat.log 12 | 13 | %message%n 14 | 15 | 16 | 17 | 18 | 19 | %date - [%level] - from %logger{10} in %.15thread %message %xException{8}%n 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /worker/src/test/resources/quartz_test.properties: -------------------------------------------------------------------------------- 1 | 2 | #============================================================================ 3 | # Configure Main Scheduler Properties 4 | #============================================================================ 5 | 6 | org.quartz.scheduler.instanceName: TestScheduler 7 | org.quartz.scheduler.instanceId: AUTO 8 | 9 | org.quartz.scheduler.skipUpdateCheck: true 10 | 11 | #============================================================================ 12 | # Configure ThreadPool 13 | #============================================================================ 14 | 15 | org.quartz.threadPool.class: org.quartz.simpl.SimpleThreadPool 16 | org.quartz.threadPool.threadCount: 2 17 | org.quartz.threadPool.threadPriority: 5 18 | 19 | #============================================================================ 20 | # Configure JobStore 21 | #============================================================================ 22 | 23 | org.quartz.jobStore.misfireThreshold: 60000 24 | 25 | org.quartz.jobStore.class: org.quartz.simpl.RAMJobStore 26 | -------------------------------------------------------------------------------- /worker/src/test/resources/quartz_test_mysql.properties: -------------------------------------------------------------------------------- 1 | 2 | #============================================================================ 3 | # Configure Main Scheduler Properties 4 | #============================================================================ 5 | 6 | org.quartz.scheduler.instanceName: TestScheduler 7 | org.quartz.scheduler.instanceId: AUTO 8 | 9 | org.quartz.scheduler.skipUpdateCheck: true 10 | 11 | #============================================================================ 12 | # Configure ThreadPool 13 | #============================================================================ 14 | 15 | org.quartz.threadPool.class: org.quartz.simpl.SimpleThreadPool 16 | org.quartz.threadPool.threadCount: 2 17 | org.quartz.threadPool.threadPriority: 5 18 | 19 | #============================================================================ 20 | # Configure JobStore 21 | #============================================================================ 22 | 23 | org.quartz.jobStore.misfireThreshold: 60000 24 | org.quartz.jobStore.class=org.quartz.impl.jdbcjobstore.JobStoreTX 25 | org.quartz.jobStore.dataSource=test_jobs 26 | 27 | 28 | org.quartz.dataSource.test_jobs.driver: com.mysql.cj.jdbc.Driver 29 | org.quartz.dataSource.test_jobs.URL: jdbc:mysql://localhost:3306/test_jobs 30 | org.quartz.dataSource.test_jobs.user: root 31 | org.quartz.dataSource.test_jobs.password: root 32 | org.quartz.dataSource.test_jobs.maxConnections: 10 33 | org.quartz.dataSource.test_jobs.validationQuery: select 0 34 | org.quartz.dataSource.test_jobs.connectionProvider.class = com.lucidchart.piezo.BeanConnectionProvider 35 | org.quartz.dataSource.test_jobs.supportIPFailover: true 36 | -------------------------------------------------------------------------------- /worker/src/test/scala/com/lucidchart/piezo/ModelTest.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import java.nio.file.Files 4 | import java.nio.file.Paths 5 | import org.quartz.{JobKey, TriggerKey} 6 | import org.specs2.mutable.* 7 | import org.specs2.specification.* 8 | import java.sql.DriverManager 9 | import java.util.Properties 10 | import scala.jdk.CollectionConverters.* 11 | import scala.util.Using 12 | import java.util.Date 13 | import java.io.InputStream 14 | 15 | class ModelTest extends Specification with BeforeAll with AfterAll { 16 | val propertiesStream: InputStream = getClass().getResourceAsStream("/quartz_test_mysql.properties") 17 | val properties = new Properties 18 | properties.load(propertiesStream) 19 | 20 | val propertiesStreamFailoverEveryConnection: InputStream = 21 | getClass().getResourceAsStream("/quartz_test_mysql_failover_every_connection.properties") 22 | 23 | val username: String = properties.getProperty("org.quartz.dataSource.test_jobs.user") 24 | val password: String = properties.getProperty("org.quartz.dataSource.test_jobs.password") 25 | val dbUrl: String = properties.getProperty("org.quartz.dataSource.test_jobs.URL") 26 | val urlParts :+ testDb = dbUrl.split("/").toSeq: @unchecked 27 | val mysqlUrl: String = urlParts.mkString("/") 28 | Class.forName("com.mysql.cj.jdbc.Driver") 29 | 30 | private def getPatchFile(fileName: String) = { 31 | Paths.get(getClass.getResource(s"/$fileName").toURI()) 32 | } 33 | 34 | private def runSql(dbUrl: String, sql: String) = { 35 | Using.resource(DriverManager.getConnection(dbUrl, username, password)) { connection => 36 | Using.resource(connection.createStatement) { statement => 37 | statement.executeUpdate(sql) 38 | } 39 | } 40 | } 41 | 42 | override def afterAll(): Unit = { 43 | runSql(mysqlUrl, s"DROP DATABASE IF EXISTS $testDb") 44 | } 45 | 46 | override def beforeAll(): Unit = { 47 | val piezoSchema = for (num <- 0 to 8) yield getPatchFile(s"piezo_mysql_$num.sql") 48 | val quartzSchema = getPatchFile("quartz_mysql_0.sql") 49 | val schema = (quartzSchema +: piezoSchema) 50 | .map { path => 51 | Files.readAllLines(path).asScala.mkString("\n") 52 | } 53 | .mkString("\n") 54 | .split(";") 55 | 56 | runSql(mysqlUrl, s"CREATE DATABASE IF NOT EXISTS $testDb") 57 | 58 | for (s <- schema) { 59 | runSql(dbUrl, s) 60 | } 61 | } 62 | 63 | private def getConnectionProvider(failoverEveryConnection: Boolean = false): () => java.sql.Connection = { 64 | val provider = new PiezoConnectionProvider( 65 | dbUrl, 66 | "com.mysql.cj.jdbc.Driver", 67 | username, 68 | password, 69 | supportIPFailover = true, 70 | causeFailoverEveryConnection = failoverEveryConnection, 71 | ) 72 | 73 | () => provider.getConnection() 74 | } 75 | 76 | "JobHistoryModel" should { 77 | "work correctly" in { 78 | val jobHistoryModel = new JobHistoryModel(getConnectionProvider()) 79 | val jobKey = new JobKey("blah", "blah") 80 | val triggerKey = new TriggerKey("blahtn", "blahtg") 81 | jobHistoryModel.getJobs().isEmpty must beTrue 82 | jobHistoryModel.addJob("ab", jobKey, triggerKey, new Date(), 1000, true) 83 | jobHistoryModel.getJob(jobKey).headOption must beSome 84 | jobHistoryModel.getLastJobSuccessByTrigger(triggerKey) must beSome 85 | jobHistoryModel.getJobs().nonEmpty must beTrue 86 | } 87 | 88 | "work correctly with a failover for every connection to the database" in { 89 | val jobHistoryModel = new JobHistoryModel(getConnectionProvider(true)) 90 | val jobKey = new JobKey("blahc", "blahc") 91 | val triggerKey = new TriggerKey("blahtnc", "blahtgc") 92 | jobHistoryModel.getJob(jobKey).headOption must beNone 93 | jobHistoryModel.addJob("abc", jobKey, triggerKey, new Date(), 1000, true) 94 | jobHistoryModel.getJob(jobKey).headOption must beSome 95 | jobHistoryModel.getLastJobSuccessByTrigger(triggerKey) must beSome 96 | } 97 | } 98 | 99 | "TriggerMonitoringModel" should { 100 | "work correctly" in { 101 | val triggerMonitoringPriorityModel = new TriggerMonitoringModel(getConnectionProvider()) 102 | val triggerKey = new TriggerKey("blahj", "blahg") 103 | triggerMonitoringPriorityModel.getTriggerMonitoringRecord(triggerKey) must beNone 104 | triggerMonitoringPriorityModel.setTriggerMonitoringRecord( 105 | triggerKey, 106 | TriggerMonitoringPriority.Low, 107 | 1800, 108 | Some("my-team"), 109 | ) 110 | triggerMonitoringPriorityModel.getTriggerMonitoringRecord(triggerKey) must beSome 111 | triggerMonitoringPriorityModel.deleteTriggerMonitoringRecord(triggerKey) mustEqual 1 112 | triggerMonitoringPriorityModel.getTriggerMonitoringRecord(triggerKey) must beNone 113 | } 114 | } 115 | 116 | "TriggerHistoryModel" should { 117 | "work correctly" in { 118 | val triggerHistoryModel = new TriggerHistoryModel(getConnectionProvider()) 119 | val triggerKey = new TriggerKey("blahj", "blahg") 120 | triggerHistoryModel.addTrigger( 121 | triggerKey, 122 | triggerFireTime = None, 123 | actualStart = None, 124 | misfire = true, 125 | fireInstanceId = None, 126 | ) 127 | val insertedRecord = triggerHistoryModel.getTrigger(triggerKey).head 128 | insertedRecord.actual_start must beNone 129 | insertedRecord.fire_instance_id mustEqual "" 130 | // increase the time by 1 second so that the condition for the test satisfies. 131 | triggerHistoryModel.deleteTriggers(new Date().getTime + 1000) mustEqual 1 132 | val triggerKey2 = new TriggerKey("blahj2", "blahg") 133 | triggerHistoryModel.addTrigger( 134 | triggerKey2, 135 | triggerFireTime = None, 136 | actualStart = Some(new Date()), 137 | misfire = true, 138 | fireInstanceId = Some("blah"), 139 | ) 140 | val newRecord = triggerHistoryModel.getTrigger(triggerKey2).head 141 | newRecord.actual_start must beSome 142 | newRecord.fire_instance_id mustEqual "blah" 143 | 144 | triggerHistoryModel.deleteTriggers(new Date().getTime + 1000) mustEqual 1 145 | } 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /worker/src/test/scala/com/lucidchart/piezo/WorkerTest.scala: -------------------------------------------------------------------------------- 1 | package com.lucidchart.piezo 2 | 3 | import java.io.{BufferedReader, File, FileReader} 4 | 5 | import org.specs2.mutable.* 6 | import org.quartz.* 7 | import org.quartz.JobBuilder.* 8 | import org.quartz.TriggerBuilder.* 9 | import org.quartz.SimpleScheduleBuilder.* 10 | import org.quartz.impl.StdSchedulerFactory 11 | import java.util.Properties 12 | 13 | import scala.util.Random 14 | 15 | object WorkerStopJob { 16 | var runCount = 0 17 | } 18 | 19 | class WorkerStopJob() extends Job { 20 | def execute(context: JobExecutionContext): Unit = { 21 | println("upping semaphore") 22 | WorkerStopJob.runCount += 1 23 | Worker.runSemaphore.release() 24 | } 25 | } 26 | 27 | class WorkerTest extends Specification { 28 | sequential 29 | "worker run" should { 30 | "stop" in { 31 | Worker.runSemaphore.drainPermits() 32 | val job = newJob((new WorkerStopJob).getClass) 33 | .withIdentity("job1", "group1") 34 | .build() 35 | 36 | val trigger = newTrigger() 37 | .withIdentity("trigger1", "group1") 38 | .startNow() 39 | .withSchedule( 40 | simpleSchedule() 41 | .withIntervalInSeconds(5) 42 | .repeatForever(), 43 | ) 44 | .build() 45 | 46 | val propertiesStream = getClass().getResourceAsStream("/quartz_test.properties") 47 | val properties = new Properties 48 | properties.load(propertiesStream) 49 | properties.setProperty("org.quartz.scheduler.instanceName", "testScheduler" + Random.nextInt()) 50 | val schedulerFactory = new StdSchedulerFactory(properties) 51 | val scheduler = schedulerFactory.getScheduler 52 | scheduler.scheduleJob(job, trigger) 53 | Worker.run(scheduler, properties) 54 | println("worker stopped") 55 | WorkerStopJob.runCount must equalTo(1) 56 | } 57 | 58 | "write heartbeat timestamp" in { 59 | Worker.runSemaphore.drainPermits() 60 | val job = newJob((new WorkerStopJob).getClass) 61 | .withIdentity("job2", "group2") 62 | .build() 63 | 64 | val trigger = newTrigger() 65 | .withIdentity("trigger2", "group2") 66 | .startNow() 67 | .withSchedule( 68 | simpleSchedule() 69 | .withIntervalInSeconds(1) 70 | .repeatForever(), 71 | ) 72 | .build() 73 | 74 | val propertiesStream = getClass().getResourceAsStream("/quartz_test.properties") 75 | val properties = new Properties 76 | properties.load(propertiesStream) 77 | 78 | val heartbeatFilePath = "/tmp/piezo/piezoHeartbeatTest" + Random.nextInt() 79 | properties.setProperty("com.lucidchart.piezo.heartbeatFile", heartbeatFilePath) 80 | 81 | println("running worker") 82 | properties.setProperty("org.quartz.scheduler.instanceName", "testScheduler" + Random.nextInt()) 83 | val schedulerFactory = new StdSchedulerFactory(properties) 84 | val scheduler = schedulerFactory.getScheduler 85 | scheduler.scheduleJob(job, trigger) 86 | Worker.run(scheduler, properties, 1, 3) 87 | println("worker stopped") 88 | 89 | val heartbeatFile = new File(heartbeatFilePath) 90 | val exists = heartbeatFile.exists() 91 | exists must equalTo(true) 92 | println("heartbeat file exists") 93 | 94 | val reader = new BufferedReader(new FileReader(heartbeatFile)) 95 | val heartbeat = reader.readLine() 96 | reader.close() 97 | println("heartbeat timestamp: " + heartbeat) 98 | val heartbeatTime = Worker.dtf.parseDateTime(heartbeat.trim) 99 | val inRange = heartbeatTime.isAfter(System.currentTimeMillis() - 5 * 1000) 100 | 101 | inRange must equalTo(true) 102 | } 103 | } 104 | } 105 | --------------------------------------------------------------------------------