├── .gitignore ├── .travis.yml ├── LICENSE ├── README.md ├── build.gradle ├── chronix-storage-integration ├── LICENSE ├── build.gradle └── src │ └── test │ ├── groovy │ └── de │ │ └── qaware │ │ └── chronix │ │ └── solr │ │ └── ChronixClientTestIT.groovy │ └── resources │ ├── log4j2.xml │ └── timeSeries │ ├── prod39_os_unix_int1-appserver-1_2013.08.27_00.00.02.csv │ ├── prod39_os_unix_int1-appserver-1_2013.08.28_00.00.05.csv │ ├── prod39_os_unix_int1-appserver-1_2013.08.29_00.00.05.csv │ ├── prod39_os_unix_int1-appserver-1_2013.08.30_00.00.00.csv │ ├── prod39_os_unix_int1-appserver-1_2013.08.31_00.00.01.csv │ ├── prod39_os_unix_int1-appserver-1_2013.09.01_00.00.01.csv │ └── prod39_os_unix_int1-appserver-1_2013.09.02_00.00.06.csv ├── chronix-storage ├── LICENSE ├── build.gradle ├── gradle.properties └── src │ ├── main │ └── java │ │ └── de │ │ └── qaware │ │ └── chronix │ │ └── lucene │ │ └── client │ │ ├── ChronixLuceneStorage.java │ │ ├── ChronixLuceneStorageConstants.java │ │ ├── LuceneIndex.java │ │ ├── add │ │ └── LuceneAddingService.java │ │ └── stream │ │ ├── LuceneStreamingService.java │ │ ├── TimeSeriesConverterCaller.java │ │ ├── TimeSeriesHandler.java │ │ └── date │ │ ├── DateMathParser.java │ │ └── DateQueryParser.java │ └── test │ ├── groovy │ └── de │ │ └── qaware │ │ └── chronix │ │ └── lucene │ │ └── client │ │ ├── ChronixLuceneStorageConstantsTest.groovy │ │ ├── ChronixLuceneStorageTest.groovy │ │ ├── LuceneIndexTest.groovy │ │ ├── add │ │ └── LuceneAddingServiceTest.groovy │ │ └── stream │ │ └── date │ │ └── DateQueryParserTest.groovy │ ├── java │ └── de │ │ └── qaware │ │ └── chronix │ │ └── lucene │ │ └── client │ │ ├── SimpleTimeSeries.java │ │ └── SimpleTimeSeriesConverter.java │ └── resources │ └── log4j2.xml ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.gitignore.io/api/java,intellij,gradle 2 | 3 | ### Java ### 4 | *.class 5 | 6 | # Mobile Tools for Java (J2ME) 7 | .mtj.tmp/ 8 | 9 | # Package Files # 10 | *.jar 11 | *.war 12 | *.ear 13 | 14 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 15 | hs_err_pid* 16 | 17 | 18 | ### Intellij ### 19 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio 20 | 21 | *.iml 22 | 23 | ## Directory-based project format: 24 | .idea/ 25 | # if you remove the above rule, at least ignore the following: 26 | 27 | # User-specific stuff: 28 | # .idea/workspace.xml 29 | # .idea/tasks.xml 30 | # .idea/dictionaries 31 | 32 | # Sensitive or high-churn files: 33 | # .idea/dataSources.ids 34 | # .idea/dataSources.xml 35 | # .idea/sqlDataSources.xml 36 | # .idea/dynamic.xml 37 | # .idea/uiDesigner.xml 38 | 39 | # Gradle: 40 | # .idea/gradle.xml 41 | # .idea/libraries 42 | 43 | # Mongo Explorer plugin: 44 | # .idea/mongoSettings.xml 45 | 46 | ## File-based project format: 47 | *.ipr 48 | *.iws 49 | 50 | ## Plugin-specific files: 51 | 52 | # IntelliJ 53 | /out/ 54 | 55 | # mpeltonen/sbt-idea plugin 56 | .idea_modules/ 57 | 58 | # JIRA plugin 59 | atlassian-ide-plugin.xml 60 | 61 | # Crashlytics plugin (for Android Studio and IntelliJ) 62 | com_crashlytics_export_strings.xml 63 | crashlytics.properties 64 | crashlytics-build.properties 65 | 66 | 67 | ### Gradle ### 68 | .gradle 69 | build/ 70 | 71 | # Ignore Gradle GUI config 72 | gradle-app.setting 73 | 74 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) 75 | !gradle-wrapper.jar 76 | 77 | bintrayUpload.bat -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: java 2 | 3 | jdk: 4 | - oraclejdk8 5 | 6 | before_install: 7 | - chmod +x gradlew 8 | 9 | script: ./gradlew build jacocoTestReport 10 | 11 | after_success: ./gradlew jacocoRootReport coveralls 12 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/ChronixDB/chronix.storage.svg)](https://travis-ci.org/ChronixDB/chronix.storage) 2 | [![Coverage Status](https://coveralls.io/repos/github/ChronixDB/chronix.storage/badge.svg?branch=master)](https://coveralls.io/github/ChronixDB/chronix.storage?branch=master) 3 | [![Apache License 2](http://img.shields.io/badge/license-ASF2-blue.svg)](https://github.com/ChronixDB/chronix.storage/blob/master/LICENSE) 4 | [ ![Download](https://api.bintray.com/packages/chronix/maven/chronix-storage/images/download.svg) ](https://bintray.com/chronix/maven/chronix-storage/_latestVersion) 5 | 6 | # Chronix Storage 7 | The Chronix Storage is an implementation of the Chronix API that uses Apache Lucene. 8 | Hence the Chronix Storage can be used with Chronix-Timeseries to store and query time series. 9 | 10 | ## Usage 11 | The following code snipped shows how to create a chronix client that writes to a given lucene index. 12 | ```groovy 13 | def chronix = new ChronixClient(new MetricTimeSeriesConverter<>(), new ChronixLuceneStorage(200, ChronixTimeSeriesDefaults.GROUP_BY, ChronixTimeSeriesDefaults.REDUCE)) 14 | def luceneIndex = new LuceneIndex(FSDirectory.open(Paths.get("build/lucene")), new StandardAnalyzer()) 15 | 16 | 17 | //Define a time series of type metric 18 | def ts = new MetricTimeSeries.Builder("github stars","metric") 19 | .attribute("host", "groovy") 20 | .attribute("source", "readme.md") 21 | .build() 22 | 23 | //Add values 24 | ts.add(Instant.now().toEpochMilli(), 4711) 25 | 26 | //Add the time series to the the index 27 | chronix.add([ts] as List, luceneIndex) 28 | 29 | //we do a hart commit to ensure documents are searchable 30 | luceneIndex.getOpenWriter().commit() 31 | 32 | //Stream time series that match the lucene query *:* 33 | def timeSeriesStream = chronix.stream(luceneIndex, createQuery("*:*")) 34 | 35 | //Create a proper lucene query 36 | Query createQuery(String searchString) { 37 | QueryParser queryParser = new QueryParser("name", luceneIndex.getOpenWriter().getAnalyzer()) 38 | return queryParser.parse(searchString) 39 | } 40 | ``` 41 | 42 | ## Contributing 43 | Is there anything missing? Do you have ideas for new features or improvements? You are highly welcome to contribute 44 | your improvements, to the Chronix projects. All you have to do is to fork this repository, 45 | improve the code and issue a pull request. 46 | 47 | ## Building Chronix Storage from Scratch 48 | Everything should run out of the box. The only three things that must be available: 49 | - Git 50 | - JDK 1.8 51 | - Gradle 52 | 53 | Just do the following steps: 54 | 55 | ```bash 56 | cd 57 | git clone https://github.com/ChronixDB/chronix.storage.git 58 | cd chronix.storage 59 | gradlew clean build 60 | ``` 61 | 62 | ## Maintainer 63 | 64 | Florian Lautenschlager @flolaut 65 | 66 | ## License 67 | 68 | This software is provided under the Apache License, Version 2.0 license. 69 | 70 | See the `LICENSE` file for details. 71 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | buildscript { 2 | repositories { 3 | jcenter() 4 | mavenCentral() 5 | maven { 6 | url "https://plugins.gradle.org/m2/" 7 | } 8 | maven { 9 | url "http://dl.bintray.com/chronix/maven" 10 | } 11 | } 12 | } 13 | 14 | plugins { 15 | id "org.sonarqube" version "2.5" 16 | id "com.github.hierynomus.license" version "0.14.0" 17 | id 'net.saliman.cobertura' version '2.3.1' 18 | id 'com.github.kt3k.coveralls' version '2.8.2' 19 | id "com.jfrog.bintray" version "1.7.3" 20 | } 21 | 22 | apply plugin: 'org.sonarqube' 23 | 24 | 25 | allprojects { 26 | 27 | apply plugin: 'jacoco' 28 | apply plugin: 'com.github.kt3k.coveralls' 29 | 30 | version '0.2' 31 | group 'de.qaware.chronix' 32 | 33 | repositories { 34 | mavenCentral() 35 | jcenter() 36 | maven { 37 | url "http://dl.bintray.com/chronix/maven" 38 | } 39 | 40 | } 41 | 42 | jacoco { 43 | toolVersion = '0.7.2.201409121644' 44 | } 45 | 46 | } 47 | 48 | 49 | subprojects { 50 | 51 | //Some plugins 52 | apply plugin: 'base' 53 | apply plugin: 'groovy' 54 | apply plugin: 'java' 55 | apply plugin: 'maven-publish' 56 | apply plugin: 'com.jfrog.bintray' 57 | apply plugin: 'com.github.hierynomus.license' 58 | 59 | license { 60 | includes(["**/*.java", "**/*.groovy"]) 61 | mapping { 62 | java = 'SLASHSTAR_STYLE' 63 | groovy = 'SLASHSTAR_STYLE' 64 | } 65 | } 66 | 67 | sonarqube { 68 | properties { 69 | property "sonar.sourceEncoding", "UTF-8" 70 | } 71 | } 72 | 73 | sourceCompatibility = JavaVersion.VERSION_1_8 74 | targetCompatibility = JavaVersion.VERSION_1_8 75 | 76 | dependencies { 77 | //Guava 78 | compile 'com.google.guava:guava:18.0' 79 | 80 | //Logging 81 | compile 'org.slf4j:slf4j-api:1.7.12' 82 | 83 | //Testing 84 | testCompile 'org.codehaus.groovy:groovy-all:2.4.6' 85 | testCompile 'org.spockframework:spock-core:1.0-groovy-2.4' 86 | testCompile 'cglib:cglib-nodep:3.2.0' 87 | testCompile 'org.objenesis:objenesis:2.2' 88 | //Logging framework for tests 89 | testCompile 'org.apache.logging.log4j:log4j-api:2.4' 90 | testCompile 'org.apache.logging.log4j:log4j-core:2.4' 91 | testCompile 'org.apache.logging.log4j:log4j-slf4j-impl:2.4' 92 | testCompile 'org.slf4j:jcl-over-slf4j:1.7.12' 93 | 94 | } 95 | 96 | test { 97 | reports { 98 | junitXml.enabled = false 99 | html.enabled = true 100 | } 101 | } 102 | 103 | // This disables the pedantic doclint feature of JDK8 104 | if (JavaVersion.current().isJava8Compatible()) { 105 | tasks.withType(Javadoc) { 106 | options.addStringOption('Xdoclint:none', '-quiet') 107 | } 108 | } 109 | 110 | task sourcesJar(type: Jar, dependsOn: classes) { 111 | classifier = 'sources' 112 | from sourceSets.main.allSource 113 | } 114 | 115 | jacocoTestReport { 116 | group = 'Coverage' 117 | description = 'Generate Jacoco coverage report for subproject' 118 | 119 | additionalSourceDirs = project.files(sourceSets.main.allSource.srcDirs) 120 | sourceDirectories = project.files(sourceSets.main.allSource.srcDirs) 121 | classDirectories = project.files(sourceSets.main.output) 122 | 123 | reports { 124 | xml.enabled = true 125 | html.enabled = true 126 | } 127 | 128 | afterEvaluate { 129 | classDirectories = files(classDirectories.files.collect { 130 | fileTree(dir: it, exclude: 'de/qaware/chronix/converter/dt/**') 131 | }) 132 | } 133 | } 134 | } 135 | 136 | task jacocoRootReport(type: JacocoReport, group: 'Coverage') { 137 | description = 'Generates aggregate Jacoco coverage report from all subprojects' 138 | dependsOn(subprojects.test) 139 | 140 | additionalSourceDirs = files(subprojects.sourceSets.main.allSource.srcDirs) 141 | sourceDirectories = files(subprojects.sourceSets.main.allSource.srcDirs) 142 | classDirectories = files(subprojects.sourceSets.main.output) 143 | executionData = files(subprojects.jacocoTestReport.executionData) 144 | 145 | reports { 146 | html.enabled = true 147 | xml.enabled = true 148 | } 149 | 150 | doFirst { 151 | executionData = files(executionData.findAll { it.exists() }) 152 | } 153 | } 154 | 155 | coveralls { 156 | sourceDirs = subprojects.sourceSets.main.allSource.srcDirs.flatten() 157 | jacocoReportPath = "${buildDir}/reports/jacoco/jacocoRootReport/jacocoRootReport.xml" 158 | } 159 | 160 | def isCI = System.env.'CI' == 'true' 161 | tasks.coveralls { 162 | group = 'Coverage' 163 | description = 'Upload aggregate Jacoco coverage report to Coveralls' 164 | 165 | dependsOn jacocoRootReport 166 | onlyIf { isCI } 167 | } 168 | 169 | task wrapper(type: Wrapper) { 170 | gradleVersion = '4.2.1' 171 | } 172 | 173 | sonarqube { 174 | def sonarUser = project.hasProperty('sonarUser') ? project.sonarUser : 'unknown' 175 | def sonarPw = project.hasProperty('sonarPw') ? project.sonarPw : 'unknown' 176 | 177 | def jdbcUser = project.hasProperty('jdbcUser') ? project.jdbcUser : 'unknown' 178 | def jdbcPw = project.hasProperty('jdbcPw') ? project.jdbcPw : 'unknown' 179 | 180 | properties { 181 | property "sonar.sourceEncoding", "UTF-8" 182 | property "sonar.host.url", "https://www.qaware.de/sonarqube" 183 | property "sonar.login", "$sonarUser" 184 | property "sonar.password", "$sonarPw" 185 | property "sonar.jdbc.url", "jdbc:mysql://nio-prj-2.intern.qaware.de:3306/sonardb?useUnicode=true&characterEncoding=utf8" 186 | property "sonar.jdbc.driverClassName", "com.mysql.jdbc.Driver" 187 | property "sonar.jdbc.username", "$jdbcUser" 188 | property "sonar.jdbc.password", "$jdbcPw" 189 | property "sonar.projectName", "ChronixDB-Storage" 190 | property "sonar.projectKey", "de.qaware.chronix:chronix.storage" 191 | property "sonar.projectVersion", "$project.version" 192 | 193 | } 194 | } 195 | -------------------------------------------------------------------------------- /chronix-storage-integration/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (C) 2016 QAware GmbH 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. -------------------------------------------------------------------------------- /chronix-storage-integration/build.gradle: -------------------------------------------------------------------------------- 1 | dependencies { 2 | testCompile 'de.qaware.chronix:chronix-api:0.2-beta' 3 | testCompile 'de.qaware.chronix:chronix-timeseries:0.3.2-beta' 4 | testCompile 'de.qaware.chronix:chronix-timeseries-converter:0.3.2-beta' 5 | 6 | testCompile 'org.apache.lucene:lucene-core:7.1.0' 7 | testCompile 'org.apache.lucene:lucene-analyzers-common:7.1.0' 8 | testCompile 'org.apache.lucene:lucene-queryparser:7.1.0' 9 | 10 | testCompile project(':chronix-storage') 11 | } -------------------------------------------------------------------------------- /chronix-storage-integration/src/test/groovy/de/qaware/chronix/solr/ChronixClientTestIT.groovy: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.solr 17 | 18 | import de.qaware.chronix.ChronixClient 19 | import de.qaware.chronix.converter.MetricTimeSeriesConverter 20 | import de.qaware.chronix.lucene.client.ChronixLuceneStorage 21 | import de.qaware.chronix.lucene.client.LuceneIndex 22 | import de.qaware.chronix.timeseries.MetricTimeSeries 23 | import org.apache.lucene.analysis.standard.StandardAnalyzer 24 | import org.apache.lucene.queryparser.classic.QueryParser 25 | import org.apache.lucene.search.Query 26 | import org.apache.lucene.store.FSDirectory 27 | import org.slf4j.Logger 28 | import org.slf4j.LoggerFactory 29 | import spock.lang.Shared 30 | import spock.lang.Specification 31 | 32 | import java.nio.file.Path 33 | import java.nio.file.Paths 34 | import java.text.DecimalFormat 35 | import java.util.function.BinaryOperator 36 | import java.util.function.Function 37 | import java.util.stream.Collectors 38 | 39 | /** 40 | * Tests the integration of Chronix with Lucene 41 | * 42 | * @author f.lautenschlager 43 | */ 44 | class ChronixClientTestIT extends Specification { 45 | 46 | private static final Logger LOGGER = LoggerFactory.getLogger(ChronixClientTestIT.class); 47 | 48 | //Test subjects 49 | @Shared 50 | ChronixClient chronix 51 | 52 | @Shared 53 | LuceneIndex luceneIndex 54 | 55 | @Shared 56 | def listStringField = ["List first part", "List second part"] 57 | @Shared 58 | def listIntField = [1I, 2I] 59 | @Shared 60 | def listLongField = [11L, 25L] 61 | @Shared 62 | def listDoubleField = [1.5D, 2.6D] 63 | 64 | 65 | @Shared 66 | Function groupBy = new Function() { 67 | @Override 68 | String apply(MetricTimeSeries ts) { 69 | StringBuilder metricKey = new StringBuilder(); 70 | 71 | metricKey.append(ts.attribute("host")).append("-") 72 | .append(ts.attribute("source")).append("-") 73 | .append(ts.attribute("group")).append("-") 74 | .append(ts.name) 75 | 76 | return metricKey.toString() 77 | } 78 | } 79 | 80 | @Shared 81 | BinaryOperator reduce = new BinaryOperator() { 82 | @Override 83 | MetricTimeSeries apply(MetricTimeSeries t1, MetricTimeSeries t2) { 84 | t1.addAll(t2.getTimestampsAsArray(), t2.getValuesAsArray()) 85 | t1.getAttributesReference().putAll(t2.getAttributesReference()) 86 | return t1 87 | } 88 | } 89 | 90 | def setupSpec() { 91 | given: 92 | LOGGER.info("Setting up the integration test.") 93 | chronix = new ChronixClient(new MetricTimeSeriesConverter<>(), new ChronixLuceneStorage(200, groupBy, reduce)) 94 | Path path = Paths.get("build/lucene") 95 | def directory = FSDirectory.open(path) 96 | def analyzer = new StandardAnalyzer() 97 | luceneIndex = new LuceneIndex(directory, analyzer) 98 | 99 | 100 | when: "We first clean the index to ensure that no old data is loaded." 101 | luceneIndex.getOpenWriter().deleteAll() 102 | luceneIndex.getOpenWriter().commit() 103 | 104 | LOGGER.info("Adding data to Chronix.") 105 | importTimeSeriesData() 106 | //we do a hart commit - only for testing purposes 107 | luceneIndex.getOpenWriter().commit() 108 | 109 | then: 110 | true 111 | 112 | 113 | } 114 | 115 | def importTimeSeriesData() { 116 | def url = ChronixClientTestIT.getResource("/timeSeries") 117 | def tsDir = new File(url.toURI()) 118 | 119 | tsDir.listFiles().each { File file -> 120 | LOGGER.info("Processing file {}", file) 121 | def documents = new HashMap() 122 | 123 | def attributes = file.name.split("_") 124 | def onlyOnce = true 125 | def nf = DecimalFormat.getInstance(Locale.ENGLISH) 126 | 127 | def filePoints = 0 128 | 129 | file.splitEachLine(";") { fields -> 130 | //Its the first line of a csv file 131 | if ("Date" == fields[0]) { 132 | if (onlyOnce) { 133 | fields.subList(1, fields.size()).eachWithIndex { String field, int i -> 134 | def ts = new MetricTimeSeries.Builder(field,"metric") 135 | .attribute("host", attributes[0]) 136 | .attribute("source", attributes[1]) 137 | .attribute("group", attributes[2]) 138 | 139 | //Add some generic fields an values 140 | .attribute("myIntField", 5I) 141 | .attribute("myLongField", 8L) 142 | .attribute("myDoubleField", 5.5D) 143 | .attribute("myByteField", "String as byte".getBytes("UTF-8")) 144 | .attribute("myStringList", listStringField) 145 | .attribute("myIntList", listIntField) 146 | .attribute("myLongList", listLongField) 147 | .attribute("myDoubleList", listDoubleField) 148 | .build() 149 | documents.put(i, ts) 150 | 151 | } 152 | } 153 | } else { 154 | //First field is the timestamp: 26.08.2013 00:00:17.361 155 | def date = Date.parse("dd.MM.yyyy HH:mm:ss.SSS", fields[0]) 156 | fields.subList(1, fields.size()).eachWithIndex { String value, int i -> 157 | documents.get(i).add(date.getTime(), nf.parse(value).doubleValue()) 158 | filePoints = i 159 | 160 | } 161 | } 162 | onlyOnce = false 163 | } 164 | chronix.add(documents.values(), luceneIndex) 165 | def updateResponse = luceneIndex.getOpenWriter().commit() 166 | LOGGER.info("Update Response of Commit is {}", updateResponse) 167 | } 168 | } 169 | 170 | def "Test add and query time series to Chronix with Solr"() { 171 | when: 172 | //query all documents 173 | List timeSeries = chronix.stream(luceneIndex, createQuery("*:*")).collect(Collectors.toList()) 174 | 175 | then: 176 | timeSeries.size() == 26i 177 | def selectedTimeSeries = timeSeries.get(0) 178 | 179 | selectedTimeSeries.size() >= 7000 180 | selectedTimeSeries.attribute("myIntField") == 5 181 | selectedTimeSeries.attribute("myLongField") == 8L 182 | selectedTimeSeries.attribute("myDoubleField") == 5.5D 183 | selectedTimeSeries.attribute("myByteField") == "String as byte".getBytes("UTF-8") 184 | selectedTimeSeries.attribute("myStringList") == listStringField 185 | selectedTimeSeries.attribute("myIntList") == listIntField 186 | selectedTimeSeries.attribute("myLongList") == listLongField 187 | selectedTimeSeries.attribute("myDoubleList") == listDoubleField 188 | } 189 | 190 | 191 | Query createQuery(String searchString) { 192 | QueryParser queryParser = new QueryParser("name", luceneIndex.getOpenWriter().getAnalyzer()) 193 | return queryParser.parse(searchString) 194 | } 195 | 196 | } 197 | -------------------------------------------------------------------------------- /chronix-storage-integration/src/test/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | %d %p %c{1.} [%t] %m%n 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /chronix-storage/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (C) 2016 QAware GmbH 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. -------------------------------------------------------------------------------- /chronix-storage/build.gradle: -------------------------------------------------------------------------------- 1 | sonarqube { 2 | properties { 3 | property "sonar.projectName", "de.qaware.chronix:chronix-storage" 4 | } 5 | } 6 | 7 | dependencies { 8 | compile 'org.apache.commons:commons-lang3:3.1' 9 | compile 'de.qaware.chronix:chronix-api:0.2-beta' 10 | compile 'org.apache.lucene:lucene-core:7.1.0' 11 | 12 | testCompile 'org.apache.lucene:lucene-analyzers-common:7.1.0' 13 | testCompile 'org.apache.lucene:lucene-queryparser:7.1.0' 14 | } 15 | 16 | task copyTestResources(type: Copy) { 17 | from "${projectDir}/src/test/resources" 18 | into "${buildDir}/classes/test" 19 | } 20 | processTestResources.dependsOn copyTestResources 21 | 22 | task javadocJar(type: Jar, dependsOn: groovydoc) { 23 | classifier = 'javadoc' 24 | from groovydoc.destinationDir 25 | } 26 | 27 | artifacts { 28 | archives sourcesJar 29 | archives javadocJar 30 | } 31 | 32 | def pomConfig = { 33 | 34 | inceptionYear '2016' 35 | 36 | scm { 37 | connection "scm:git:${project.scmUrl}" 38 | developerConnection "scm:git:${project.scmUrl}" 39 | url project.websiteUrl 40 | } 41 | 42 | issueManagement { 43 | system 'GitHub' 44 | url project.issueTrackerUrl 45 | } 46 | 47 | licenses { 48 | license([:]) { 49 | name 'The Apache Software License, Version 2.0' 50 | url 'http://www.apache.org/licenses/LICENSE-2.0.txt' 51 | distribution 'repo' 52 | } 53 | } 54 | 55 | organisation { 56 | name 'QAware GmbH' 57 | url 'https://www.qaware.de' 58 | } 59 | 60 | developers { 61 | developer { 62 | id 'florianlautenschlager' 63 | name 'Florian Lautenschlager' 64 | email 'florian.lautenschlager@qaware.de' 65 | organization 'QAware GmbH' 66 | organizationUrl 'https://www.qaware.de' 67 | roles { role 'Developer' } 68 | } 69 | } 70 | } 71 | 72 | publishing { 73 | publications { 74 | chronixStorage(MavenPublication) { 75 | from components.java 76 | artifact sourcesJar 77 | artifact javadocJar 78 | 79 | pom.withXml { 80 | asNode().appendNode('name', project.displayName) 81 | asNode().appendNode('description', project.description) 82 | asNode().appendNode('url', project.websiteUrl) 83 | 84 | asNode().children().last() + pomConfig 85 | } 86 | } 87 | } 88 | } 89 | 90 | bintray { 91 | user = project.hasProperty('bintrayUsername') ? project.bintrayUsername : 'unknown' 92 | key = project.hasProperty('bintrayApiKey') ? project.bintrayApiKey : 'unknown' 93 | publications = ['chronixStorage'] 94 | dryRun = false 95 | publish = true 96 | pkg { 97 | repo = project.bintrayRepo 98 | name = project.name 99 | desc = project.description 100 | licenses = ['Apache-2.0'] 101 | labels = ['java', 'time series', 'chronix', 'storage'] 102 | websiteUrl = project.websiteUrl 103 | issueTrackerUrl = project.issueTrackerUrl 104 | vcsUrl = project.scmUrl 105 | publicDownloadNumbers = true 106 | version { 107 | name = project.version 108 | desc = project.description 109 | released = new Date().format("yyyy-MM-dd'T'HH:mm:ss.SSSZZ") 110 | vcsTag = "v${project.version}" 111 | attributes = [ 112 | 'chronix-storage': ['de.qaware.chronix', project.group, project.name].join(':') 113 | ] 114 | mavenCentralSync { 115 | sync = false 116 | } 117 | } 118 | } 119 | } -------------------------------------------------------------------------------- /chronix-storage/gradle.properties: -------------------------------------------------------------------------------- 1 | displayName=Chronix Storage 2 | description=The Chronix Storage that uses Lucene 3 | websiteUrl=https://github.com/ChronixDB/chronix.storage 4 | scmUrl=https://github.com/ChronixDB/chronix.storage.git 5 | issueTrackerUrl=https://github.com/ChronixDB/chronix.storage/issues 6 | bintrayRepo=maven -------------------------------------------------------------------------------- /chronix-storage/src/main/java/de/qaware/chronix/lucene/client/ChronixLuceneStorage.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client; 17 | 18 | import de.qaware.chronix.converter.TimeSeriesConverter; 19 | import de.qaware.chronix.lucene.client.add.LuceneAddingService; 20 | import de.qaware.chronix.lucene.client.stream.LuceneStreamingService; 21 | import de.qaware.chronix.streaming.StorageService; 22 | import org.apache.lucene.search.Query; 23 | import org.slf4j.Logger; 24 | import org.slf4j.LoggerFactory; 25 | 26 | import java.io.IOException; 27 | import java.util.Collection; 28 | import java.util.Objects; 29 | import java.util.Spliterator; 30 | import java.util.Spliterators; 31 | import java.util.function.BinaryOperator; 32 | import java.util.function.Function; 33 | import java.util.function.Supplier; 34 | import java.util.stream.Stream; 35 | import java.util.stream.StreamSupport; 36 | 37 | import static java.util.stream.Collectors.groupingBy; 38 | 39 | /** 40 | * Lucene storage implementation of the Chronix StorageService interface 41 | * 42 | * @param - the time series type 43 | */ 44 | public final class ChronixLuceneStorage implements StorageService { 45 | private static final Logger LOGGER = LoggerFactory.getLogger(ChronixLuceneStorage.class); 46 | 47 | private final int nrOfDocumentPerBatch; 48 | private final BinaryOperator reduce; 49 | private final Function groupBy; 50 | 51 | /** 52 | * Constructs a Chronix storage that is based on Apache solr. 53 | * 54 | * @param nrOfDocumentPerBatch number of documents that are processed in one batch 55 | * @param groupBy the function to group time series records 56 | * @param reduce the function to reduce the grouped time series records into one time series 57 | */ 58 | public ChronixLuceneStorage(final int nrOfDocumentPerBatch, final Function groupBy, final BinaryOperator reduce) { 59 | this.nrOfDocumentPerBatch = nrOfDocumentPerBatch; 60 | this.groupBy = groupBy; 61 | this.reduce = reduce; 62 | } 63 | 64 | /** 65 | * Queries apache solr and returns the time series in a stream. 66 | * 67 | * @param converter the time series converter 68 | * @param index the connection to apache solr 69 | * @param query the user query 70 | * @return a stream of time series 71 | */ 72 | @Override 73 | public Stream stream(TimeSeriesConverter converter, LuceneIndex index, Query query) { 74 | LOGGER.debug("Streaming data from lucene using converter {}, Lucene Index {}, and Lucene Query {}", converter, index, query); 75 | try { 76 | LuceneStreamingService luceneStreamingService = new LuceneStreamingService<>(converter, query, index.getSearcher(), nrOfDocumentPerBatch); 77 | 78 | return StreamSupport.stream(Spliterators.spliteratorUnknownSize(luceneStreamingService, Spliterator.SIZED), false) 79 | .filter(Objects::nonNull)//Remove empty results 80 | .collect(groupingBy(groupBy)).values().stream() 81 | .map(ts -> ts.stream().reduce(reduce).get()); 82 | 83 | } catch (IOException e) { 84 | LOGGER.error("Could not open the lucene index searcher", e); 85 | } 86 | return Stream.empty(); 87 | } 88 | 89 | /** 90 | * Adds the given collection of documents to the solr connection using the collector. 91 | * Note: The function does not call commit on the connection. Documents are just added to lucene. 92 | * 93 | * @param converter the converter matching the type 94 | * @param documents the documents of type 95 | * @param luceneIndex the lucene index 96 | * @return true if the documents are added to apache solr. 97 | */ 98 | @Override 99 | public boolean add(TimeSeriesConverter converter, Collection documents, LuceneIndex luceneIndex) { 100 | try { 101 | return LuceneAddingService.add(converter, documents, luceneIndex.getOpenWriter()); 102 | } catch (IOException e) { 103 | LOGGER.error("Could not open lucene index writer", e); 104 | } 105 | return false; 106 | } 107 | 108 | 109 | } 110 | -------------------------------------------------------------------------------- /chronix-storage/src/main/java/de/qaware/chronix/lucene/client/ChronixLuceneStorageConstants.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client; 17 | 18 | /** 19 | * Some constants used within the chronix lucene storage 20 | * 21 | * @author f.lautenschlager 22 | */ 23 | public final class ChronixLuceneStorageConstants { 24 | 25 | public static final String MULTI_VALUE_FIELD_DELIMITER = "::mv::"; 26 | 27 | private ChronixLuceneStorageConstants() { 28 | 29 | } 30 | 31 | 32 | } 33 | -------------------------------------------------------------------------------- /chronix-storage/src/main/java/de/qaware/chronix/lucene/client/LuceneIndex.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client; 17 | 18 | import org.apache.lucene.analysis.Analyzer; 19 | import org.apache.lucene.index.DirectoryReader; 20 | import org.apache.lucene.index.IndexReader; 21 | import org.apache.lucene.index.IndexWriter; 22 | import org.apache.lucene.index.IndexWriterConfig; 23 | import org.apache.lucene.search.IndexSearcher; 24 | import org.apache.lucene.store.Directory; 25 | import org.slf4j.Logger; 26 | import org.slf4j.LoggerFactory; 27 | 28 | import java.io.IOException; 29 | 30 | /** 31 | * Class that holds the lucene index writer and searcher 32 | * 33 | * @author f.lautenschlager 34 | */ 35 | public final class LuceneIndex { 36 | 37 | private static final Logger LOGGER = LoggerFactory.getLogger(LuceneIndex.class); 38 | 39 | private IndexSearcher searcher; 40 | private IndexReader reader; 41 | private IndexWriter writer; 42 | 43 | private final Directory directory; 44 | private final Analyzer analyzer; 45 | 46 | /** 47 | * Constructs and lucene index 48 | * 49 | * @param directory the directory of the index (RAM, File System, ...) 50 | * @param analyzer the analyzer for the reader and writer 51 | */ 52 | public LuceneIndex(Directory directory, Analyzer analyzer) { 53 | this.directory = directory; 54 | this.analyzer = analyzer; 55 | } 56 | 57 | /** 58 | * Initializes the searcher, if not initialized. 59 | * Closes the writer and opens a searcher. 60 | * 61 | * @return the lucene index searcher 62 | * @throws IOException if the underlying lucene reader can not be opened or created 63 | */ 64 | public IndexSearcher getSearcher() throws IOException { 65 | if (searcher == null && readerClosed()) { 66 | reader = getOpenReader(); 67 | searcher = new IndexSearcher(reader); 68 | } 69 | return searcher; 70 | 71 | } 72 | 73 | 74 | /** 75 | * This method returns an open writer for the given directory. 76 | * If the reader is open this method will close the reader. 77 | * 78 | * @return an open lucene writer 79 | * @throws IOException if the lucene writer can not be opened or created 80 | */ 81 | public IndexWriter getOpenWriter() throws IOException { 82 | if (writerClosed()) { 83 | LOGGER.debug("Closing reader and opening writer."); 84 | if (readerOpen()) { 85 | LOGGER.debug("Closing reader."); 86 | reader.close(); 87 | } 88 | IndexWriterConfig config = new IndexWriterConfig(analyzer); 89 | writer = new IndexWriter(directory, config); 90 | } 91 | return writer; 92 | } 93 | 94 | /** 95 | * Closes the index writer if it is open. 96 | * Then opens the index reader. 97 | * 98 | * @return an open lucene reader. 99 | * @throws IOException if the lucene reader can not be opened or created 100 | */ 101 | public IndexReader getOpenReader() throws IOException { 102 | if (writerOpen()) { 103 | LOGGER.debug("Closing writer"); 104 | writer.close(); 105 | } 106 | if (readerClosed()) { 107 | LOGGER.debug("Opening reader"); 108 | reader = DirectoryReader.open(directory); 109 | } 110 | return reader; 111 | } 112 | 113 | 114 | /** 115 | * @return the directory holding the index 116 | */ 117 | public Directory getDirectory() { 118 | return directory; 119 | } 120 | 121 | /** 122 | * @return true if the reader is open 123 | */ 124 | private boolean readerOpen() { 125 | return !readerClosed(); 126 | } 127 | 128 | /** 129 | * @return true if the reader is null or closed 130 | */ 131 | private boolean readerClosed() { 132 | return reader == null || reader.getRefCount() == 0; 133 | } 134 | 135 | /** 136 | * @return true if the writer is null or closed 137 | */ 138 | private boolean writerClosed() { 139 | return writer == null || !writer.isOpen(); 140 | } 141 | 142 | /** 143 | * @return true if the writer is open 144 | */ 145 | private boolean writerOpen() { 146 | return !writerClosed(); 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /chronix-storage/src/main/java/de/qaware/chronix/lucene/client/add/LuceneAddingService.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client.add; 17 | 18 | import de.qaware.chronix.converter.BinaryTimeSeries; 19 | import de.qaware.chronix.converter.TimeSeriesConverter; 20 | import de.qaware.chronix.lucene.client.ChronixLuceneStorageConstants; 21 | import org.apache.lucene.document.Document; 22 | import org.apache.lucene.document.Field; 23 | import org.apache.lucene.document.StoredField; 24 | import org.apache.lucene.document.TextField; 25 | import org.apache.lucene.index.IndexWriter; 26 | import org.apache.lucene.util.BytesRef; 27 | import org.slf4j.Logger; 28 | import org.slf4j.LoggerFactory; 29 | 30 | import java.io.IOException; 31 | import java.util.Arrays; 32 | import java.util.Collection; 33 | 34 | /** 35 | * A service class to add time series to lucene. 36 | **/ 37 | public final class LuceneAddingService { 38 | 39 | private static final Logger LOGGER = LoggerFactory.getLogger(LuceneAddingService.class); 40 | 41 | private LuceneAddingService() { 42 | //Avoid instances 43 | } 44 | 45 | /** 46 | * Adds the given collection of time series to the lucene index. 47 | * Converts the time series using the default object types of java and available lucene fields. 48 | * If an attribute of a time series is user defined data type then it is ignored. 49 | *

50 | * Note: The add method do not commit the time series. 51 | * 52 | * @param converter the converter to converter the time series into a lucene document 53 | * @param timeSeries the collection with time series 54 | * @param indexWriter the lucene index writer 55 | * @return true if successful, otherwise false 56 | */ 57 | public static boolean add(TimeSeriesConverter converter, Collection timeSeries, IndexWriter indexWriter) { 58 | 59 | if (timeSeries == null || timeSeries.isEmpty()) { 60 | LOGGER.debug("Collection is empty. Nothing to commit"); 61 | return true; 62 | } 63 | 64 | timeSeries.parallelStream().forEach(ts -> { 65 | try { 66 | indexWriter.addDocument(convert(ts, converter)); 67 | } catch (IOException e) { 68 | LOGGER.error("Could not add documents to lucene.", e); 69 | } 70 | }); 71 | return true; 72 | } 73 | 74 | /** 75 | * Converts a time series of type to lucene document. 76 | * Handles the default java object types (e.g. double, int, array, collections, ...) 77 | * and wraps them into the matching lucene fields (int -> IntField). 78 | * 79 | * @param ts the time series of type 80 | * @return a filled lucene document 81 | */ 82 | private static Document convert(T ts, TimeSeriesConverter converter) { 83 | BinaryTimeSeries series = converter.to(ts); 84 | Document document = new Document(); 85 | 86 | series.getFields().entrySet().forEach(entry -> { 87 | 88 | if (entry.getValue() instanceof Number) { 89 | handleNumbers(document, entry.getKey(), entry.getValue()); 90 | } else if (entry.getValue() instanceof String || entry.getValue() instanceof byte[]) { 91 | handleStringsAndBytes(document, entry.getKey(), entry.getValue()); 92 | } else if (entry.getValue() instanceof Collection || entry.getValue() instanceof Object[]) { 93 | handleArraysAndIterable(document, entry.getKey(), entry.getValue()); 94 | } else { 95 | LOGGER.debug("Field {} could not be handled. Type is not supported", entry); 96 | } 97 | }); 98 | return document; 99 | } 100 | 101 | /** 102 | * Tries to cast field value (object) to an array or iterable. 103 | * If the field value is not an array or iterable then the method ignores the field. 104 | *

105 | * If the value is an array or iterable than the value is warped into a matching lucene field (Field for String, 106 | * StoredField for byte[]) and added to the lucene document. 107 | * 108 | * @param document the lucene document to add the number 109 | * @param fieldName the field name 110 | * @param fieldValue the field value 111 | */ 112 | private static void handleArraysAndIterable(Document document, String fieldName, Object fieldValue) { 113 | 114 | //assign the value as it is modified below 115 | Object modifiedFieldValue = fieldValue; 116 | 117 | //If have an array, simple convert it into an list. 118 | if (fieldValue != null && fieldValue.getClass().isArray()) { 119 | modifiedFieldValue = Arrays.asList((Object[]) fieldValue); 120 | } 121 | //Handle all iterable data types 122 | if (modifiedFieldValue instanceof Iterable) { 123 | Iterable objects = (Iterable) modifiedFieldValue; 124 | 125 | int fieldCounter = 0; 126 | String modifiedFieldName = fieldName + ChronixLuceneStorageConstants.MULTI_VALUE_FIELD_DELIMITER; 127 | for (Object o : objects) { 128 | fieldCounter++; 129 | handleNumbers(document, modifiedFieldName + fieldCounter, o); 130 | handleStringsAndBytes(document, modifiedFieldName + fieldCounter, o); 131 | } 132 | } 133 | } 134 | 135 | /** 136 | * Tries to cast field value (object) to a string or byte[]. 137 | * If the field value is not a string or a byte[] then the method ignores the field. 138 | *

139 | * If the value is a string or byte[] than the value is warped into a matching lucene field (Field for String, 140 | * StoredField for byte[]) and added to the lucene document. 141 | * 142 | * @param document the lucene document to add the number 143 | * @param fieldName the field name 144 | * @param fieldValue the field value 145 | */ 146 | private static void handleStringsAndBytes(Document document, String fieldName, Object fieldValue) { 147 | if (fieldValue instanceof String) { 148 | document.add(new Field(fieldName, fieldValue.toString(), TextField.TYPE_STORED)); 149 | } else if (fieldValue instanceof byte[]) { 150 | document.add(new StoredField(fieldName, new BytesRef((byte[]) fieldValue))); 151 | } 152 | } 153 | 154 | /** 155 | * Tries to cast field value (object) to a number (double, integer, float, long). 156 | * If the field value is not a number then method ignores the field. 157 | *

158 | * If the value is a number than the value is warped into a matching lucene field (IntField, DoubleField, ...) 159 | * and added to the lucene document. 160 | * 161 | * @param document the lucene document to add the number 162 | * @param fieldName the field name 163 | * @param fieldValue the field value 164 | */ 165 | private static void handleNumbers(Document document, String fieldName, Object fieldValue) { 166 | if (fieldValue instanceof Double) { 167 | document.add(new StoredField(fieldName, Double.parseDouble(fieldValue.toString()))); 168 | } else if (fieldValue instanceof Integer) { 169 | document.add(new StoredField(fieldName, Integer.parseInt(fieldValue.toString()))); 170 | } else if (fieldValue instanceof Float) { 171 | document.add(new StoredField(fieldName, Float.parseFloat(fieldValue.toString()))); 172 | } else if (fieldValue instanceof Long) { 173 | document.add(new StoredField(fieldName, Long.parseLong(fieldValue.toString()))); 174 | } else { 175 | LOGGER.warn("Cloud not extract value from field {} with value {}", fieldName, fieldValue); 176 | } 177 | 178 | } 179 | 180 | } 181 | -------------------------------------------------------------------------------- /chronix-storage/src/main/java/de/qaware/chronix/lucene/client/stream/LuceneStreamingService.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client.stream; 17 | 18 | import com.google.common.util.concurrent.Futures; 19 | import com.google.common.util.concurrent.ListenableFuture; 20 | import com.google.common.util.concurrent.ListeningExecutorService; 21 | import com.google.common.util.concurrent.MoreExecutors; 22 | import de.qaware.chronix.Schema; 23 | import de.qaware.chronix.converter.TimeSeriesConverter; 24 | import de.qaware.chronix.lucene.client.stream.date.DateQueryParser; 25 | import org.apache.lucene.document.Document; 26 | import org.apache.lucene.search.IndexSearcher; 27 | import org.apache.lucene.search.Query; 28 | import org.apache.lucene.search.ScoreDoc; 29 | import org.slf4j.Logger; 30 | import org.slf4j.LoggerFactory; 31 | 32 | import java.io.IOException; 33 | import java.text.ParseException; 34 | import java.util.Iterator; 35 | import java.util.concurrent.Executors; 36 | 37 | /** 38 | * The lucene streaming service let one stream data from a lucene index. 39 | * 40 | * @param type of the returned class 41 | * @author f.lautenschlager 42 | */ 43 | public class LuceneStreamingService implements Iterator { 44 | 45 | /** 46 | * The class logger 47 | */ 48 | private static final Logger LOGGER = LoggerFactory.getLogger(LuceneStreamingService.class); 49 | 50 | /** 51 | * The query and connection to solr 52 | */ 53 | private final Query query; 54 | private final IndexSearcher searcher; 55 | 56 | /** 57 | * Converter for converting the documents 58 | */ 59 | private final TimeSeriesConverter converter; 60 | 61 | /** 62 | * Query parameters 63 | */ 64 | private int nrOfTimeSeriesPerBatch; 65 | private long nrOfAvailableTimeSeries = -1; 66 | private int currentDocumentCount = 0; 67 | 68 | 69 | /** 70 | * The executor service to do the work asynchronously 71 | */ 72 | private final ListeningExecutorService service = MoreExecutors.listeningDecorator(Executors.newCachedThreadPool()); 73 | 74 | 75 | /** 76 | * Start and end of the query to filter points on client side 77 | */ 78 | private long queryStart; 79 | private long queryEnd; 80 | 81 | private TimeSeriesHandler timeSeriesHandler; 82 | 83 | /** 84 | * Constructs a streaming service 85 | * 86 | * @param converter - the converter to convert documents 87 | * @param query - the lucene query 88 | * @param searcher - the index search 89 | * @param nrOfTimeSeriesPerBatch - the number of time series that are read by one query 90 | */ 91 | public LuceneStreamingService(TimeSeriesConverter converter, Query query, IndexSearcher searcher, int nrOfTimeSeriesPerBatch) { 92 | this.converter = converter; 93 | this.query = query; 94 | this.searcher = searcher; 95 | this.nrOfTimeSeriesPerBatch = nrOfTimeSeriesPerBatch; 96 | this.timeSeriesHandler = new TimeSeriesHandler<>(200); 97 | parseDates(query); 98 | } 99 | 100 | private void parseDates(Query query) { 101 | DateQueryParser dateRangeParser = new DateQueryParser(new String[]{Schema.START, Schema.END}); 102 | long[] startAndEnd = new long[0]; 103 | try { 104 | startAndEnd = dateRangeParser.getNumericQueryTerms(query.toString()); 105 | 106 | } catch (ParseException e) { 107 | LOGGER.warn("Could not parse start or end", e); 108 | } 109 | this.queryStart = or(startAndEnd[0], -1, 0); 110 | this.queryEnd = or(startAndEnd[1], -1, Long.MAX_VALUE); 111 | } 112 | 113 | private long or(long value, long condition, long or) { 114 | if (value == condition) { 115 | return or; 116 | } else { 117 | return value; 118 | } 119 | } 120 | 121 | @Override 122 | public boolean hasNext() { 123 | if (nrOfAvailableTimeSeries == -1) { 124 | try { 125 | nrOfAvailableTimeSeries = searcher.count(query); 126 | } catch (IOException e) { 127 | LOGGER.error("Could not count the found documents", e); 128 | } 129 | } 130 | 131 | return currentDocumentCount < nrOfAvailableTimeSeries; 132 | } 133 | 134 | @Override 135 | public T next() { 136 | if (currentDocumentCount % nrOfTimeSeriesPerBatch == 0) { 137 | try { 138 | ScoreDoc[] hits = searcher.search(query, nrOfTimeSeriesPerBatch).scoreDocs; 139 | convertHits(hits); 140 | } catch (IOException e) { 141 | LOGGER.info("Could not search documents"); 142 | } 143 | } 144 | currentDocumentCount++; 145 | return timeSeriesHandler.take(); 146 | } 147 | 148 | 149 | private void convertHits(ScoreDoc[] hits) throws IOException { 150 | for (ScoreDoc hit : hits) { 151 | Document hitDoc = searcher.doc(hit.doc); 152 | ListenableFuture future = service.submit(new TimeSeriesConverterCaller<>(hitDoc, converter, queryStart, queryEnd)); 153 | Futures.addCallback(future, timeSeriesHandler); 154 | } 155 | } 156 | 157 | } 158 | -------------------------------------------------------------------------------- /chronix-storage/src/main/java/de/qaware/chronix/lucene/client/stream/TimeSeriesConverterCaller.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client.stream; 17 | 18 | 19 | import de.qaware.chronix.converter.BinaryTimeSeries; 20 | import de.qaware.chronix.converter.TimeSeriesConverter; 21 | import de.qaware.chronix.lucene.client.ChronixLuceneStorageConstants; 22 | import org.apache.lucene.document.Document; 23 | import org.apache.lucene.index.IndexableField; 24 | import org.slf4j.Logger; 25 | import org.slf4j.LoggerFactory; 26 | 27 | import java.util.ArrayList; 28 | import java.util.HashMap; 29 | import java.util.List; 30 | import java.util.Map; 31 | import java.util.concurrent.Callable; 32 | 33 | /** 34 | * Converts the lucene document into a binary time series and calls the given document converter. 35 | * 36 | * @param the type of the returned time series class 37 | * @author f.lautenschlager 38 | */ 39 | public class TimeSeriesConverterCaller implements Callable { 40 | 41 | private static final Logger LOGGER = LoggerFactory.getLogger(TimeSeriesConverterCaller.class); 42 | 43 | private final Document document; 44 | private final TimeSeriesConverter documentConverter; 45 | private final long queryEnd; 46 | private final long queryStart; 47 | 48 | /** 49 | * Constructs a SolrDocumentConverter. 50 | * 51 | * @param document - the fields and values 52 | * @param documentConverter - the concrete document converter 53 | */ 54 | public TimeSeriesConverterCaller(final Document document, final TimeSeriesConverter documentConverter, long queryStart, long queryEnd) { 55 | this.document = document; 56 | this.documentConverter = documentConverter; 57 | this.queryStart = queryStart; 58 | this.queryEnd = queryEnd; 59 | } 60 | 61 | /** 62 | * Converts the solr document given in the constructor into a time series of type 63 | * 64 | * @return a time series of type 65 | * @throws Exception if bad things happen. 66 | */ 67 | @Override 68 | @SuppressWarnings("PMD.SignatureDeclareThrowsException") 69 | public T call() throws Exception { 70 | BinaryTimeSeries.Builder timeSeriesBuilder = new BinaryTimeSeries.Builder(); 71 | 72 | Map> multivalued = new HashMap<>(); 73 | 74 | document.forEach(attributeField -> { 75 | String key = attributeField.name(); 76 | 77 | if (key.contains(ChronixLuceneStorageConstants.MULTI_VALUE_FIELD_DELIMITER)) { 78 | key = key.substring(0, key.indexOf(ChronixLuceneStorageConstants.MULTI_VALUE_FIELD_DELIMITER)); 79 | //Handle multivalued fields 80 | if (!multivalued.containsKey(key)) { 81 | multivalued.put(key, new ArrayList<>()); 82 | } 83 | multivalued.get(key).add(convert(attributeField)); 84 | 85 | } else { 86 | timeSeriesBuilder.field(key, convert(attributeField)); 87 | } 88 | }); 89 | multivalued.forEach(timeSeriesBuilder::field); 90 | 91 | LOGGER.debug("Calling document converter with {}", document); 92 | T timeSeries = documentConverter.from(timeSeriesBuilder.build(), queryStart, queryEnd); 93 | LOGGER.debug("Returning time series {} to callee", timeSeries); 94 | return timeSeries; 95 | } 96 | 97 | /** 98 | * Adds user defined attributes to the binary time series builder. 99 | * Checks if the attribute is of type byte[], String, Number or Collection. 100 | * Otherwise the attribute is ignored. 101 | * 102 | * @param field the attribute field 103 | */ 104 | private Object convert(IndexableField field) { 105 | LOGGER.debug("Reading field {} ", field); 106 | 107 | if (field.numericValue() != null) { 108 | return field.numericValue(); 109 | } else if (field.stringValue() != null) { 110 | return field.stringValue(); 111 | } else if (field.binaryValue() != null) { 112 | return field.binaryValue().bytes; 113 | } else { 114 | LOGGER.debug("Field {} could not be handled. Type is not supported", field); 115 | return null; 116 | } 117 | } 118 | } 119 | 120 | -------------------------------------------------------------------------------- /chronix-storage/src/main/java/de/qaware/chronix/lucene/client/stream/TimeSeriesHandler.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client.stream; 17 | 18 | import com.google.common.util.concurrent.FutureCallback; 19 | import org.slf4j.Logger; 20 | import org.slf4j.LoggerFactory; 21 | 22 | import java.util.concurrent.ArrayBlockingQueue; 23 | import java.util.concurrent.BlockingQueue; 24 | import java.util.concurrent.TimeUnit; 25 | 26 | /** 27 | * Class to handle future callbacks 28 | * 29 | * @param the element type 30 | * @author f.lautenschlager 31 | */ 32 | public class TimeSeriesHandler implements FutureCallback { 33 | 34 | private static final Logger LOGGER = LoggerFactory.getLogger(TimeSeriesHandler.class); 35 | /** 36 | * The blocking queue containing elements of type t 37 | */ 38 | private BlockingQueue queue; 39 | 40 | /** 41 | * Constructs a time series callback handler 42 | * 43 | * @param nrOfTimeDocumentsPerBatch the max nr of elements in the queue 44 | */ 45 | public TimeSeriesHandler(int nrOfTimeDocumentsPerBatch) { 46 | this.queue = new ArrayBlockingQueue<>(nrOfTimeDocumentsPerBatch); 47 | } 48 | 49 | /** 50 | * On success we add the result to our blocking queue 51 | * 52 | * @param result - the resulting time series 53 | */ 54 | @Override 55 | public void onSuccess(T result) { 56 | try { 57 | LOGGER.debug("Putting {} into queue", result); 58 | queue.put(result); 59 | } catch (InterruptedException e) { 60 | LOGGER.warn("Exception occurred while putting the converted result in queue", e); 61 | } 62 | } 63 | 64 | /** 65 | * On failure we doing a log and going on 66 | * 67 | * @param t - the throwable that was thrown 68 | */ 69 | @Override 70 | public void onFailure(Throwable t) { 71 | LOGGER.warn("Exception occurred while converting documents.", t); 72 | } 73 | 74 | /** 75 | * Gets the first element in the queue. 76 | * If no element is present within one minute. 77 | * Then it returns null. 78 | * 79 | * @return the first element of type in the queue 80 | */ 81 | public T take() { 82 | try { 83 | T object = queue.poll(1, TimeUnit.MINUTES); 84 | LOGGER.debug("Getting element from queue: {}", object); 85 | return object; 86 | 87 | } catch (InterruptedException e) { 88 | LOGGER.warn("InterruptedException occurred. Returning null value to callee.", e); 89 | throw new IllegalStateException("Try to poll time series records for more than 1 Minute. Stopping.", e); 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /chronix-storage/src/main/java/de/qaware/chronix/lucene/client/stream/date/DateMathParser.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client.stream.date; 17 | 18 | 19 | import java.text.ParseException; 20 | import java.util.*; 21 | import java.util.regex.Pattern; 22 | 23 | /** 24 | * A Simple Utility class for parsing "math" like strings relating to Dates. 25 | *

26 | *

27 | * The basic syntax support addition, subtraction and rounding at various 28 | * levels of granularity (or "units"). Commands can be chained together 29 | * and are parsed from left to right. '+' and '-' denote addition and 30 | * subtraction, while '/' denotes "round". Round requires only a unit, while 31 | * addition/subtraction require an integer value and a unit. 32 | * Command strings must not include white space, but the "No-Op" command 33 | * (empty string) is allowed.... 34 | *

35 | *

36 | *

 37 |  *   /HOUR
 38 |  *      ... Round to the start of the current hour
 39 |  *   /DAY
 40 |  *      ... Round to the start of the current day
 41 |  *   +2YEARS
 42 |  *      ... Exactly two years in the future from now
 43 |  *   -1DAY
 44 |  *      ... Exactly 1 day prior to now
 45 |  *   /DAY+6MONTHS+3DAYS
 46 |  *      ... 6 months and 3 days in the future from the start of
 47 |  *          the current day
 48 |  *   +6MONTHS+3DAYS/DAY
 49 |  *      ... 6 months and 3 days in the future from now, rounded
 50 |  *          down to nearest day
 51 |  * 
52 | *

53 | *

54 | * (Multiple aliases exist for the various units of time (ie: 55 | * MINUTE and MINUTES; MILLI, 56 | * MILLIS, MILLISECOND, and 57 | * MILLISECONDS.) The complete list can be found by 58 | * inspecting the keySet of {@link #CALENDAR_UNITS}) 59 | *

60 | *

61 | *

62 | * All commands are relative to a "now" which is fixed in an instance of 63 | * DateMathParser such that 64 | * p.parseMath("+0MILLISECOND").equals(p.parseMath("+0MILLISECOND")) 65 | * no matter how many wall clock milliseconds elapse between the two 66 | * distinct calls to parse (Assuming no other thread calls 67 | * "setNow" in the interim). The default value of 'now' is 68 | * the time at the moment the DateMathParser instance is 69 | * constructed, unless overridden by the {CommonParams#NOW NOW} 70 | * request param. 71 | *

72 | *

73 | *

74 | * All commands are also affected to the rules of a specified {@link TimeZone} 75 | * (including the start/end of DST if any) which determine when each arbitrary 76 | * day starts. This not only impacts rounding/adding of DAYs, but also 77 | * cascades to rounding of HOUR, MIN, MONTH, YEAR as well. The default 78 | * TimeZone used is UTC unless overridden by the 79 | * {CommonParams#TZ TZ} 80 | * request param. 81 | *

82 | */ 83 | public final class DateMathParser { 84 | 85 | private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); 86 | 87 | /** 88 | * Default TimeZone for DateMath rounding (UTC) 89 | */ 90 | private static final TimeZone DEFAULT_MATH_TZ = UTC; 91 | /** 92 | * Default Locale for DateMath rounding (Locale.ROOT) 93 | */ 94 | private static final Locale DEFAULT_MATH_LOCALE = Locale.ROOT; 95 | 96 | /** 97 | * A mapping from (uppercased) String labels idenyifying time units, 98 | * to the corresponding Calendar constant used to set/add/roll that unit 99 | * of measurement. 100 | *

101 | *

102 | * A single logical unit of time might be represented by multiple labels 103 | * for convenience (ie: DATE==DAY, 104 | * MILLI==MILLISECOND) 105 | *

106 | * 107 | * @see Calendar 108 | */ 109 | private static final Map CALENDAR_UNITS = makeUnitsMap(); 110 | 111 | private static Pattern splitter = Pattern.compile("\\b|(?<=\\d)(?=\\D)"); 112 | 113 | private TimeZone zone; 114 | private Locale loc; 115 | private Date now; 116 | 117 | /** 118 | * Default constructor that assumes UTC should be used for rounding unless 119 | * otherwise specified in the SolrRequestInfo 120 | * 121 | * @see #DEFAULT_MATH_LOCALE 122 | */ 123 | public DateMathParser() { 124 | this(null, DEFAULT_MATH_LOCALE); 125 | 126 | } 127 | 128 | /** 129 | * @param tz The TimeZone used for rounding (to determine when hours/days begin). If null, then this method defaults to the value dicated by the SolrRequestInfo if it 130 | * exists -- otherwise it uses UTC. 131 | * @param l The Locale used for rounding (to determine when weeks begin). If null, then this method defaults to en_US. 132 | * @see #DEFAULT_MATH_TZ 133 | * @see #DEFAULT_MATH_LOCALE 134 | * @see Calendar#getInstance(TimeZone, Locale) 135 | */ 136 | public DateMathParser(TimeZone tz, Locale l) { 137 | if (null == l) { 138 | loc = DEFAULT_MATH_LOCALE; 139 | } else { 140 | loc = l; 141 | } 142 | 143 | if (null == tz) { 144 | zone = DEFAULT_MATH_TZ; 145 | } else { 146 | zone = tz; 147 | } 148 | } 149 | 150 | 151 | /** 152 | * @see #CALENDAR_UNITS 153 | */ 154 | private static Map makeUnitsMap() { 155 | 156 | // NOTE: consciously choosing not to support WEEK at this time, 157 | // because of complexity in rounding down to the nearest week 158 | // arround a month/year boundry. 159 | // (Not to mention: it's not clear what people would *expect*) 160 | // 161 | // If we consider adding some time of "week" support, then 162 | // we probably need to change "Locale loc" to default to something 163 | // from a param via SolrRequestInfo as well. 164 | 165 | Map units = new HashMap<>(13); 166 | units.put("YEAR", Calendar.YEAR); 167 | units.put("YEARS", Calendar.YEAR); 168 | units.put("MONTH", Calendar.MONTH); 169 | units.put("MONTHS", Calendar.MONTH); 170 | units.put("DAY", Calendar.DATE); 171 | units.put("DAYS", Calendar.DATE); 172 | units.put("DATE", Calendar.DATE); 173 | units.put("HOUR", Calendar.HOUR_OF_DAY); 174 | units.put("HOURS", Calendar.HOUR_OF_DAY); 175 | units.put("MINUTE", Calendar.MINUTE); 176 | units.put("MINUTES", Calendar.MINUTE); 177 | units.put("SECOND", Calendar.SECOND); 178 | units.put("SECONDS", Calendar.SECOND); 179 | units.put("MILLI", Calendar.MILLISECOND); 180 | units.put("MILLIS", Calendar.MILLISECOND); 181 | units.put("MILLISECOND", Calendar.MILLISECOND); 182 | units.put("MILLISECONDS", Calendar.MILLISECOND); 183 | 184 | return units; 185 | } 186 | 187 | /** 188 | * Modifies the specified Calendar by "adding" the specified value of units 189 | * 190 | * @throws IllegalArgumentException if unit isn't recognized. 191 | * @see #CALENDAR_UNITS 192 | */ 193 | private static void add(Calendar c, int val, String unit) { 194 | Integer uu = CALENDAR_UNITS.get(unit); 195 | if (null == uu) { 196 | throw new IllegalArgumentException("Adding Unit not recognized: " + unit); 197 | } 198 | c.add(uu.intValue(), val); 199 | } 200 | 201 | /** 202 | * Modifies the specified Calendar by "rounding" down to the specified unit 203 | * 204 | * @throws IllegalArgumentException if unit isn't recognized. 205 | * @see #CALENDAR_UNITS 206 | */ 207 | private static void round(Calendar c, String unit) { 208 | Integer uu = CALENDAR_UNITS.get(unit); 209 | if (null == uu) { 210 | throw new IllegalArgumentException("Rounding Unit not recognized: " + unit); 211 | } 212 | int u = uu; 213 | 214 | switch (u) { 215 | 216 | case Calendar.YEAR: 217 | c.clear(Calendar.MONTH); 218 | /* fall through */ 219 | case Calendar.MONTH: 220 | c.clear(Calendar.DAY_OF_MONTH); 221 | c.clear(Calendar.DAY_OF_WEEK); 222 | c.clear(Calendar.DAY_OF_WEEK_IN_MONTH); 223 | c.clear(Calendar.DAY_OF_YEAR); 224 | c.clear(Calendar.WEEK_OF_MONTH); 225 | c.clear(Calendar.WEEK_OF_YEAR); 226 | /* fall through */ 227 | case Calendar.DATE: 228 | c.clear(Calendar.HOUR_OF_DAY); 229 | c.clear(Calendar.HOUR); 230 | c.clear(Calendar.AM_PM); 231 | /* fall through */ 232 | case Calendar.HOUR_OF_DAY: 233 | c.clear(Calendar.MINUTE); 234 | /* fall through */ 235 | case Calendar.MINUTE: 236 | c.clear(Calendar.SECOND); 237 | /* fall through */ 238 | case Calendar.SECOND: 239 | c.clear(Calendar.MILLISECOND); 240 | break; 241 | default: 242 | throw new IllegalStateException("No logic for rounding value (" + u + ") " + unit); 243 | } 244 | 245 | } 246 | 247 | /** 248 | * @return the current date 249 | */ 250 | private Date getNow() { 251 | if (now == null) { 252 | // fall back to current time if no request info set 253 | now = new Date(); 254 | } 255 | return (Date) now.clone(); 256 | } 257 | 258 | /** 259 | * Parses a string of commands relative "now" are returns the resulting Date. 260 | * 261 | * @return the resulting date 262 | * @throws ParseException positions in ParseExceptions are token positions, not character positions. 263 | */ 264 | @SuppressWarnings("all") // The class is copied from solr codebase 265 | public Date parseMath(String math) throws ParseException { 266 | 267 | Calendar cal = Calendar.getInstance(zone, loc); 268 | cal.setTime(getNow()); 269 | 270 | /* check for No-Op */ 271 | if (0 == math.length()) { 272 | return cal.getTime(); 273 | } 274 | 275 | String[] ops = splitter.split(math); 276 | int pos = 0; 277 | while (pos < ops.length) { 278 | 279 | if (1 != ops[pos].length()) { 280 | throw new ParseException("Multi character command found: \"" + ops[pos] + "\"", pos); 281 | } 282 | char command = ops[pos++].charAt(0); 283 | 284 | switch (command) { 285 | case '/': 286 | if (ops.length < pos + 1) { 287 | throw new ParseException("Need a unit after command: \"" + command + "\"", pos); 288 | } 289 | try { 290 | round(cal, ops[pos++]); 291 | } catch (IllegalArgumentException e) { 292 | throw new ParseException("Unit not recognized: \"" + ops[pos - 1] + "\"", pos - 1); 293 | } 294 | break; 295 | case '+': /* fall through */ 296 | case '-': 297 | if (ops.length < pos + 2) { 298 | throw new ParseException("Need a value and unit for command: \"" + command + "\"", pos); 299 | } 300 | int val; 301 | try { 302 | val = Integer.valueOf(ops[pos++]); 303 | } catch (NumberFormatException e) { 304 | throw new ParseException("Not a Number: \"" + ops[pos - 1] + "\"", pos - 1); 305 | } 306 | if ('-' == command) { 307 | val = 0 - val; 308 | } 309 | try { 310 | String unit = ops[pos++]; 311 | add(cal, val, unit); 312 | } catch (IllegalArgumentException e) { 313 | throw new ParseException("Unit not recognized: \"" + ops[pos - 1] + "\"", pos - 1); 314 | } 315 | break; 316 | default: 317 | throw new ParseException("Unrecognized command: \"" + command + "\"", pos - 1); 318 | } 319 | } 320 | 321 | return cal.getTime(); 322 | } 323 | 324 | 325 | } 326 | -------------------------------------------------------------------------------- /chronix-storage/src/main/java/de/qaware/chronix/lucene/client/stream/date/DateQueryParser.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client.stream.date; 17 | 18 | 19 | import org.apache.commons.lang3.StringUtils; 20 | 21 | import java.text.ParseException; 22 | import java.time.Instant; 23 | import java.util.HashMap; 24 | import java.util.Map; 25 | import java.util.regex.Pattern; 26 | 27 | /** 28 | * This class is used to first, transform queries like start:NOW-30DAYS 29 | * in expressions like 'NOW as long + 30 Days as long' and second, 30 | * to build matching range queries on our time series documents. 31 | * The current queries are supported: 32 | *

33 | * - end:47859 AND start:4578965 34 | * - end:2015-11-25T12:06:57.330Z OR start:2015-12-25T12:00:00.000Z 35 | * - start:NOW-30DAYS AND stop:NOW+30DAYS 36 | * 37 | * @author f.lautenschlager 38 | */ 39 | public class DateQueryParser { 40 | 41 | private final String[] dateFields; 42 | 43 | private final Pattern solrDateMathPattern; 44 | private final Pattern instantDatePattern; 45 | 46 | /** 47 | * Constructs a date query parser 48 | * 49 | * @param dateFields - the date fields 50 | */ 51 | public DateQueryParser(String[] dateFields) { 52 | this.dateFields = dateFields.clone(); 53 | this.solrDateMathPattern = Pattern.compile(".*(NOW|DAY|MONTH|YEAR).*"); 54 | this.instantDatePattern = Pattern.compile("\\d{4}\\-\\d{2}\\-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d{3}Z"); 55 | } 56 | 57 | /** 58 | * Converts the term for the date fields into an numeric representation. 59 | *

60 | * [0] -> numeric value for date field [0] 61 | *

62 | * [1] -> numeric value for date field [1] 63 | *

64 | * If the query does not contain a date field the value is represented as -1. 65 | * 66 | * @param query the user defined solr query 67 | * @return an array containing numeric representations of the date fields 68 | * @throws ParseException if the date term is not a numeric or solr date expression 69 | */ 70 | public long[] getNumericQueryTerms(String query) throws ParseException { 71 | long[] result = new long[dateFields.length]; 72 | for (int i = 0; i < dateFields.length; i++) { 73 | if (query.contains(dateFields[i])) { 74 | String dateField = dateFields[i]; 75 | String dateTerm = getTokenTerm(query, dateField); 76 | result[i] = getNumberRepresentation(dateTerm); 77 | } else { 78 | result[i] = -1; 79 | } 80 | } 81 | 82 | return result; 83 | } 84 | 85 | /** 86 | * Replaces the date fields with range queries. 87 | * 88 | * @param query the plain user query 89 | * @return an enriched plain solr query 90 | * @throws ParseException if there are characters that can not be parsed 91 | */ 92 | public String replaceRangeQueryTerms(String query) throws ParseException { 93 | Map replacements = new HashMap<>(); 94 | 95 | String queryWithPlaceholders = markQueryWithPlaceholders(query, replacements); 96 | return replacePlaceholders(queryWithPlaceholders, replacements); 97 | } 98 | 99 | /** 100 | * Converts the given date term into a numeric representation 101 | * 102 | * @param dateTerm the date term, e.g, start:NOW+30DAYS 103 | * @return the long representation of the date term 104 | * @throws ParseException if the date term could not be evaluated 105 | */ 106 | private long getNumberRepresentation(String dateTerm) throws ParseException { 107 | long numberRepresentation; 108 | if (StringUtils.isNumeric(dateTerm)) { 109 | numberRepresentation = Long.valueOf(dateTerm); 110 | } else if (solrDateMathPattern.matcher(dateTerm).matches()) { 111 | numberRepresentation = parseDateTerm(dateTerm); 112 | } else if (instantDatePattern.matcher(dateTerm).matches()) { 113 | numberRepresentation = Instant.parse(dateTerm).toEpochMilli(); 114 | } else { 115 | throw new ParseException("Could not parse date representation '" + dateTerm + "'", 0); 116 | } 117 | return numberRepresentation; 118 | } 119 | 120 | /** 121 | * Replaces the placeholders with concrete values 122 | * 123 | * @param query the query with placeholders 124 | * @param replacements the replacements 125 | * @return a query with concrete values 126 | */ 127 | private String replacePlaceholders(String query, Map replacements) { 128 | String resultQuery = query; 129 | for (Map.Entry entry : replacements.entrySet()) { 130 | resultQuery = resultQuery.replace(entry.getKey(), entry.getValue()); 131 | } 132 | return resultQuery; 133 | } 134 | 135 | /** 136 | * @param query the origin query 137 | * @param replacements a map for to put in the replacements 138 | * @return a query with placeholders and the matching replacements 139 | * @throws ParseException if the date term could not be parsed 140 | */ 141 | private String markQueryWithPlaceholders(String query, Map replacements) throws ParseException { 142 | String placeHolderQuery = query; 143 | for (int i = 0; i < dateFields.length; i++) { 144 | String dateField = dateFields[i]; 145 | 146 | if (placeHolderQuery.contains(dateField)) { 147 | String dateTerm = getTokenTerm(placeHolderQuery, dateField); 148 | long numberRepresentation = getNumberRepresentation(dateTerm); 149 | String rangeQuery = getDateRangeQuery(numberRepresentation, dateField); 150 | placeHolderQuery = placeHolderQuery.replace(dateField + dateTerm, keyPart(i)); 151 | 152 | //add the placeholders 153 | replacements.put(keyPart(i), rangeQuery); 154 | } 155 | } 156 | return placeHolderQuery; 157 | } 158 | 159 | /** 160 | * Important: The end of an term is marked by an " " 161 | * 162 | * @param query the origin query 163 | * @param startToken the start token 164 | * @return the term for the start token 165 | */ 166 | private String getTokenTerm(String query, String startToken) { 167 | int tokenLength = startToken.length(); 168 | int index = query.indexOf(startToken); 169 | int stopIndex = query.indexOf(' ', index); 170 | 171 | if (stopIndex > -1) { 172 | return query.substring(index + tokenLength, stopIndex); 173 | 174 | } 175 | 176 | return query.substring(index + tokenLength); 177 | } 178 | 179 | /** 180 | * Parses the given date query term into a date representation 181 | * 182 | * @param dateQueryTerm the date query term as string 183 | * @return the milliseconds since 1970 of the given dateQueryTerm 184 | * @throws ParseException if the term could not be parsed 185 | */ 186 | private long parseDate(String dateQueryTerm) throws ParseException { 187 | return new DateMathParser().parseMath(dateQueryTerm).getTime(); 188 | } 189 | 190 | /** 191 | * Builds a range query that 192 | * 193 | * @param value - the date value as long 194 | * @param field - the date field (start or end) 195 | * @return a solr range query 196 | */ 197 | private String getDateRangeQuery(long value, String field) { 198 | 199 | if ("start:".equals(field)) { 200 | //We don`t need documents, that have and end before our start 201 | // q = -end[* TO (START-1)] 202 | return "-end:[* TO " + (value - 1) + "]"; 203 | } else { 204 | //We don`t need documents, that have and start after our end 205 | // q = -start[* TO (START-1)] 206 | return "-start:[" + (value - 1) + " TO *]"; 207 | } 208 | 209 | } 210 | 211 | 212 | private String keyPart(int i) { 213 | return "key-" + i; 214 | } 215 | 216 | /** 217 | * Parses a solr date to long representation 218 | * 219 | * @param term the solr date term (NOW + 30 DAYS) 220 | * @return the term as long 221 | * @throws ParseException if the term could not be parsed 222 | */ 223 | private long parseDateTerm(String term) throws ParseException { 224 | String dateTerm = term.replace("NOW", "+0MILLISECOND"); 225 | return parseDate(dateTerm); 226 | } 227 | } -------------------------------------------------------------------------------- /chronix-storage/src/test/groovy/de/qaware/chronix/lucene/client/ChronixLuceneStorageConstantsTest.groovy: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client 17 | 18 | import spock.lang.Specification 19 | 20 | /** 21 | * Unit test for the storage constants 22 | * @author f.lautenschlager 23 | */ 24 | class ChronixLuceneStorageConstantsTest extends Specification { 25 | 26 | def "test private constructor"() { 27 | when: 28 | ChronixLuceneStorageConstants.newInstance() 29 | then: 30 | noExceptionThrown() 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /chronix-storage/src/test/groovy/de/qaware/chronix/lucene/client/ChronixLuceneStorageTest.groovy: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client 17 | 18 | import org.apache.lucene.analysis.standard.StandardAnalyzer 19 | import org.apache.lucene.document.Document 20 | import org.apache.lucene.queryparser.classic.QueryParser 21 | import org.apache.lucene.search.Query 22 | import org.apache.lucene.store.FSDirectory 23 | import spock.lang.Shared 24 | import spock.lang.Specification 25 | 26 | import java.nio.file.Path 27 | import java.nio.file.Paths 28 | import java.util.function.BinaryOperator 29 | import java.util.function.Function 30 | /** 31 | * Unit test for the Chronix Lucene Storage 32 | * @author f.lautenschlager 33 | */ 34 | class ChronixLuceneStorageTest extends Specification { 35 | 36 | @Shared 37 | def analyzer = new StandardAnalyzer() 38 | 39 | @Shared 40 | def group = new Function() { 41 | @Override 42 | Object apply(Object o) { 43 | return o 44 | } 45 | } 46 | @Shared 47 | def reduce = new Function() { 48 | @Override 49 | Document apply(Document indexableFields) { 50 | return indexableFields 51 | } 52 | } as BinaryOperator 53 | 54 | 55 | def "test add and stream document"() { 56 | given: 57 | Path path = Paths.get("build/lucene") 58 | def directory = FSDirectory.open(path) 59 | def luceneIndex = new LuceneIndex(directory, analyzer) 60 | def luceneStorage = new ChronixLuceneStorage<>(200, group, reduce) 61 | def query = createQuery("text") 62 | 63 | luceneIndex.getOpenWriter().deleteAll() 64 | 65 | when: 66 | def documents = createDocument() 67 | luceneStorage.add(new SimpleTimeSeriesConverter(), documents, luceneIndex) 68 | 69 | def stream = luceneStorage.stream(new SimpleTimeSeriesConverter(), luceneIndex, query) 70 | 71 | luceneIndex.getOpenReader().close() 72 | luceneIndex.getDirectory().close() 73 | 74 | then: 75 | stream.count() == 1 76 | } 77 | 78 | Collection createDocument() { 79 | def text = "This is the text to be indexed." 80 | 81 | def document = new SimpleTimeSeries() 82 | document.add("fieldname", text) 83 | 84 | [document] 85 | } 86 | 87 | 88 | Query createQuery(String searchString) { 89 | QueryParser queryParser = new QueryParser("fieldname", analyzer) 90 | return queryParser.parse(searchString) 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /chronix-storage/src/test/groovy/de/qaware/chronix/lucene/client/LuceneIndexTest.groovy: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client 17 | 18 | import org.apache.lucene.analysis.standard.StandardAnalyzer 19 | import org.apache.lucene.store.RAMDirectory 20 | import spock.lang.Specification 21 | 22 | /** 23 | * Unit test for the lucene index wrapper class 24 | * @author f.lautenschlager 25 | */ 26 | class LuceneIndexTest extends Specification { 27 | 28 | 29 | def "test getSearcher after openWriter"() { 30 | given: 31 | def luceneIndex = new LuceneIndex(new RAMDirectory(), new StandardAnalyzer()) 32 | 33 | when: 34 | luceneIndex.getOpenWriter().commit() 35 | def searcher = luceneIndex.getSearcher() 36 | 37 | then: 38 | searcher != null 39 | luceneIndex.readerOpen() 40 | luceneIndex.writerClosed() 41 | } 42 | 43 | def "test getOpenWriter after openReader"() { 44 | given: 45 | def luceneIndex = new LuceneIndex(new RAMDirectory(), new StandardAnalyzer()) 46 | 47 | when: 48 | luceneIndex.getOpenWriter().commit() 49 | luceneIndex.getOpenReader() 50 | def indexWriter = luceneIndex.getOpenWriter() 51 | 52 | 53 | then: 54 | indexWriter != null 55 | luceneIndex.readerClosed() 56 | luceneIndex.writerOpen() 57 | } 58 | 59 | def "test getOpenReader after openWriter"() { 60 | given: 61 | def luceneIndex = new LuceneIndex(new RAMDirectory(), new StandardAnalyzer()) 62 | 63 | when: 64 | luceneIndex.openWriter.commit() 65 | def indexWriter = luceneIndex.getOpenReader() 66 | 67 | then: 68 | indexWriter != null 69 | luceneIndex.readerOpen() 70 | luceneIndex.writerClosed() 71 | } 72 | 73 | def "test getDirectory"() { 74 | given: 75 | def luceneIndex = new LuceneIndex(new RAMDirectory(), new StandardAnalyzer()) 76 | 77 | when: 78 | def dir = luceneIndex.getDirectory() 79 | 80 | then: 81 | dir != null 82 | dir instanceof RAMDirectory 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /chronix-storage/src/test/groovy/de/qaware/chronix/lucene/client/add/LuceneAddingServiceTest.groovy: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client.add 17 | 18 | import de.qaware.chronix.lucene.client.LuceneIndex 19 | import de.qaware.chronix.lucene.client.SimpleTimeSeries 20 | import de.qaware.chronix.lucene.client.SimpleTimeSeriesConverter 21 | import org.apache.lucene.analysis.standard.StandardAnalyzer 22 | import org.apache.lucene.document.StoredField 23 | import org.apache.lucene.index.IndexableField 24 | import org.apache.lucene.store.RAMDirectory 25 | import spock.lang.Shared 26 | import spock.lang.Specification 27 | 28 | /** 29 | * Unit test for the lucene adding service 30 | * @author f.lautenschlager 31 | */ 32 | class LuceneAddingServiceTest extends Specification { 33 | 34 | @Shared 35 | def expectedFields = new ArrayList(); 36 | 37 | def setup() { 38 | //double array 39 | expectedFields.add(new StoredField("double_array::mv::1", 5.0d)) 40 | expectedFields.add(new StoredField("double_array::mv::2", 3.2)) 41 | expectedFields.add(new StoredField("double_array::mv::3", 4.2d)) 42 | 43 | //Single fields 44 | expectedFields.add(new StoredField("double", 5.0d)) 45 | expectedFields.add(new StoredField("float", 3.2f)) 46 | expectedFields.add(new StoredField("int", 10)) 47 | expectedFields.add(new StoredField("long", 12l)) 48 | 49 | expectedFields.add(new StoredField("string", "hello")) 50 | 51 | //byte field 52 | expectedFields.add(new StoredField("bytes", "chronix rocks".bytes)) 53 | 54 | //String array 55 | expectedFields.add(new StoredField("string_array::mv::1", "one")) 56 | expectedFields.add(new StoredField("string_array::mv::2", "two")) 57 | expectedFields.add(new StoredField("string_array::mv::3", "three")) 58 | 59 | //mixed 60 | expectedFields.add(new StoredField("mixed::mv::1", "hello")) 61 | expectedFields.add(new StoredField("mixed::mv::2", 1.2d)) 62 | expectedFields.add(new StoredField("mixed::mv::3", "chronix")) 63 | 64 | } 65 | 66 | def "test add"() { 67 | given: 68 | def timeSeries = createTimeSeries(1) as Collection 69 | def luceneIndex = new LuceneIndex(new RAMDirectory(), new StandardAnalyzer()) 70 | 71 | when: 72 | def result = LuceneAddingService.add(new SimpleTimeSeriesConverter(), timeSeries, luceneIndex.openWriter) 73 | luceneIndex.openWriter.commit() 74 | def doc = luceneIndex.searcher.doc(0) 75 | 76 | then: 77 | result 78 | def fields = doc.fields 79 | fields.size() == 15 80 | 81 | 82 | checkIfEquals(fields, expectedFields) 83 | } 84 | 85 | def "test private constructor"() { 86 | when: 87 | LuceneAddingService.newInstance() 88 | then: 89 | noExceptionThrown() 90 | } 91 | 92 | def "test with empty or null argument"() { 93 | when: 94 | def returned = LuceneAddingService.add(new SimpleTimeSeriesConverter(), collection, null) 95 | then: 96 | returned 97 | 98 | where: 99 | collection << [null, new ArrayList<>()] 100 | 101 | } 102 | 103 | 104 | boolean checkIfEquals(List returned, ArrayList expected) { 105 | List returnedAsStrings = new ArrayList<>() 106 | List expectedAsStrings = new ArrayList<>() 107 | 108 | returned.each { 109 | returnedAsStrings.add(it.name() + value(it)) 110 | } 111 | 112 | expected.each { 113 | expectedAsStrings.add(it.name() + value(it)) 114 | } 115 | 116 | returnedAsStrings.containsAll(expectedAsStrings) 117 | } 118 | 119 | Object value(IndexableField value) { 120 | if (value.numericValue() != null) { 121 | return value.numericValue() 122 | } 123 | if (value.binaryValue() != null) { 124 | return value.binaryValue().bytes 125 | } 126 | if (value.stringValue() != null) { 127 | return value.stringValue(); 128 | } 129 | return "nothing" 130 | } 131 | 132 | 133 | Collection createTimeSeries(int numberOfTimeSeries) { 134 | def result = new ArrayList<>(); 135 | 136 | numberOfTimeSeries.times { 137 | SimpleTimeSeries sts = new SimpleTimeSeries() 138 | sts.add("string", "hello") 139 | 140 | sts.add("double", 5.0d) 141 | sts.add("int", 10i) 142 | sts.add("float", 3.2f) 143 | sts.add("long", 12l) 144 | 145 | sts.add("bytes", "chronix rocks".bytes) 146 | 147 | sts.add("double_array", [5.0d, 3.2d, 4.2d] as Double[]) 148 | sts.add("string_array", ["one", "two", "three"] as String[]) 149 | 150 | sts.add("mixed", ["hello", 1.2d, "chronix"] as ArrayList) 151 | //Not a pojo 152 | sts.add("ignored", new SimpleTimeSeries()) 153 | 154 | 155 | result.add(sts) 156 | } 157 | result 158 | } 159 | 160 | } 161 | -------------------------------------------------------------------------------- /chronix-storage/src/test/groovy/de/qaware/chronix/lucene/client/stream/date/DateQueryParserTest.groovy: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client.stream.date 17 | 18 | import spock.lang.Specification 19 | 20 | import java.text.ParseException 21 | 22 | /** 23 | * Unit test for the date query parser 24 | * @author f.lautenschlager 25 | */ 26 | class DateQueryParserTest extends Specification { 27 | 28 | def "test get numeric query terms"() { 29 | given: 30 | String[] dateFields = ["start:", "end:"] 31 | def dateQueryParser = new DateQueryParser(dateFields) 32 | 33 | when: 34 | def result = dateQueryParser.getNumericQueryTerms("start:1 AND end:0"); 35 | 36 | then: 37 | result.length == 2 38 | result[0] == 1l 39 | result[1] == 0l 40 | 41 | } 42 | 43 | def "test replaceRangeQueryTerms"() { 44 | given: 45 | 46 | String[] dateFields = ["start:", "end:"] 47 | def dateQueryParser = new DateQueryParser(dateFields) 48 | 49 | when: 50 | def modifiedQuery = dateQueryParser.replaceRangeQueryTerms(query) 51 | 52 | then: 53 | modifiedQuery == expected 54 | 55 | where: 56 | query << ["", "metric:\\Load\\start", 57 | "end:47859 AND start:4578965", 58 | "host:laptop OR end:47859 AND start:4578965 AND metric:\\Load\\AVG", 59 | "end:2015-11-25T12:06:57.330Z OR start:2015-12-25T12:00:00.000Z", 60 | "end:NOW/DAY"] 61 | expected << ["", "metric:\\Load\\start", 62 | "-start:[47858 TO *] AND -end:[* TO 4578964]", 63 | "host:laptop OR -start:[47858 TO *] AND -end:[* TO 4578964] AND metric:\\Load\\AVG", 64 | "-start:[1448453217329 TO *] OR -end:[* TO 1451044799999]", 65 | "-start:[${testDateMathHelper("NOW/DAY") - 1} TO *]"] 66 | } 67 | 68 | def "testDateMathHelper"(String term) { 69 | String dateTerm = term.replace("NOW", "+0MILLISECOND"); 70 | return new DateMathParser().parseMath(dateTerm).getTime(); 71 | } 72 | 73 | def "test replace range query with invalid arguments"() { 74 | given: 75 | 76 | String[] dateFields = ["start:", "end:"] 77 | def dateQueryParser = new DateQueryParser(dateFields) 78 | 79 | when: 80 | dateQueryParser.replaceRangeQueryTerms(query) 81 | 82 | then: 83 | thrown ParseException.class 84 | 85 | where: 86 | query << ["start:hallo AND end:stop"] 87 | } 88 | 89 | } -------------------------------------------------------------------------------- /chronix-storage/src/test/java/de/qaware/chronix/lucene/client/SimpleTimeSeries.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client; 17 | 18 | import java.util.HashMap; 19 | import java.util.Map; 20 | 21 | /** 22 | * An simple time series (without points ;-) used for test purposes 23 | * 24 | * @author f.lautenschlager 25 | */ 26 | public class SimpleTimeSeries { 27 | 28 | private Map fields; 29 | 30 | /** 31 | * Constructs a simple time series 32 | */ 33 | public SimpleTimeSeries() { 34 | fields = new HashMap<>(); 35 | } 36 | 37 | /** 38 | * Adds a field for the given name and value. 39 | * Overrides old values. 40 | * 41 | * @param name the field name 42 | * @param value the field value 43 | */ 44 | public void add(String name, Object value) { 45 | fields.put(name, value); 46 | } 47 | 48 | /** 49 | * @return the fields of the time series 50 | */ 51 | public Map getFields() { 52 | return fields; 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /chronix-storage/src/test/java/de/qaware/chronix/lucene/client/SimpleTimeSeriesConverter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 QAware GmbH 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package de.qaware.chronix.lucene.client; 17 | 18 | import de.qaware.chronix.converter.BinaryTimeSeries; 19 | import de.qaware.chronix.converter.TimeSeriesConverter; 20 | 21 | /** 22 | * A converter used for test purposes. 23 | * Converts a SimpleTimeSeries to a BinaryTimeSeries an back. 24 | * 25 | * @author f.lautenschlager 26 | */ 27 | public class SimpleTimeSeriesConverter implements TimeSeriesConverter { 28 | @Override 29 | public SimpleTimeSeries from(BinaryTimeSeries binaryTimeSeries, long queryStart, long queryEnd) { 30 | SimpleTimeSeries doc = new SimpleTimeSeries(); 31 | binaryTimeSeries.getFields().forEach(doc::add); 32 | 33 | return doc; 34 | } 35 | 36 | @Override 37 | public BinaryTimeSeries to(SimpleTimeSeries document) { 38 | BinaryTimeSeries.Builder binaryTimeSeries = new BinaryTimeSeries.Builder(); 39 | document.getFields().forEach(binaryTimeSeries::field); 40 | return binaryTimeSeries.build(); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /chronix-storage/src/test/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChronixDB/chronix.storage/6a3669d189e3a0b159874b63bf87b3f865f8f108/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Fri Jul 15 16:01:29 CEST 2016 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.2.1-all.zip 7 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 10 | DEFAULT_JVM_OPTS="" 11 | 12 | APP_NAME="Gradle" 13 | APP_BASE_NAME=`basename "$0"` 14 | 15 | # Use the maximum available, or set MAX_FD != -1 to use that value. 16 | MAX_FD="maximum" 17 | 18 | warn ( ) { 19 | echo "$*" 20 | } 21 | 22 | die ( ) { 23 | echo 24 | echo "$*" 25 | echo 26 | exit 1 27 | } 28 | 29 | # OS specific support (must be 'true' or 'false'). 30 | cygwin=false 31 | msys=false 32 | darwin=false 33 | case "`uname`" in 34 | CYGWIN* ) 35 | cygwin=true 36 | ;; 37 | Darwin* ) 38 | darwin=true 39 | ;; 40 | MINGW* ) 41 | msys=true 42 | ;; 43 | esac 44 | 45 | # Attempt to set APP_HOME 46 | # Resolve links: $0 may be a link 47 | PRG="$0" 48 | # Need this for relative symlinks. 49 | while [ -h "$PRG" ] ; do 50 | ls=`ls -ld "$PRG"` 51 | link=`expr "$ls" : '.*-> \(.*\)$'` 52 | if expr "$link" : '/.*' > /dev/null; then 53 | PRG="$link" 54 | else 55 | PRG=`dirname "$PRG"`"/$link" 56 | fi 57 | done 58 | SAVED="`pwd`" 59 | cd "`dirname \"$PRG\"`/" >/dev/null 60 | APP_HOME="`pwd -P`" 61 | cd "$SAVED" >/dev/null 62 | 63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 64 | 65 | # Determine the Java command to use to start the JVM. 66 | if [ -n "$JAVA_HOME" ] ; then 67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 68 | # IBM's JDK on AIX uses strange locations for the executables 69 | JAVACMD="$JAVA_HOME/jre/sh/java" 70 | else 71 | JAVACMD="$JAVA_HOME/bin/java" 72 | fi 73 | if [ ! -x "$JAVACMD" ] ; then 74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 75 | 76 | Please set the JAVA_HOME variable in your environment to match the 77 | location of your Java installation." 78 | fi 79 | else 80 | JAVACMD="java" 81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 82 | 83 | Please set the JAVA_HOME variable in your environment to match the 84 | location of your Java installation." 85 | fi 86 | 87 | # Increase the maximum file descriptors if we can. 88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then 89 | MAX_FD_LIMIT=`ulimit -H -n` 90 | if [ $? -eq 0 ] ; then 91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 92 | MAX_FD="$MAX_FD_LIMIT" 93 | fi 94 | ulimit -n $MAX_FD 95 | if [ $? -ne 0 ] ; then 96 | warn "Could not set maximum file descriptor limit: $MAX_FD" 97 | fi 98 | else 99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 100 | fi 101 | fi 102 | 103 | # For Darwin, add options to specify how the application appears in the dock 104 | if $darwin; then 105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 106 | fi 107 | 108 | # For Cygwin, switch paths to Windows format before running java 109 | if $cygwin ; then 110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 112 | JAVACMD=`cygpath --unix "$JAVACMD"` 113 | 114 | # We build the pattern for arguments to be converted via cygpath 115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 116 | SEP="" 117 | for dir in $ROOTDIRSRAW ; do 118 | ROOTDIRS="$ROOTDIRS$SEP$dir" 119 | SEP="|" 120 | done 121 | OURCYGPATTERN="(^($ROOTDIRS))" 122 | # Add a user-defined pattern to the cygpath arguments 123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 125 | fi 126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 127 | i=0 128 | for arg in "$@" ; do 129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 131 | 132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 134 | else 135 | eval `echo args$i`="\"$arg\"" 136 | fi 137 | i=$((i+1)) 138 | done 139 | case $i in 140 | (0) set -- ;; 141 | (1) set -- "$args0" ;; 142 | (2) set -- "$args0" "$args1" ;; 143 | (3) set -- "$args0" "$args1" "$args2" ;; 144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 150 | esac 151 | fi 152 | 153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules 154 | function splitJvmOpts() { 155 | JVM_OPTS=("$@") 156 | } 157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS 158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" 159 | 160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" 161 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 12 | set DEFAULT_JVM_OPTS= 13 | 14 | set DIRNAME=%~dp0 15 | if "%DIRNAME%" == "" set DIRNAME=. 16 | set APP_BASE_NAME=%~n0 17 | set APP_HOME=%DIRNAME% 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windowz variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | if "%@eval[2+2]" == "4" goto 4NT_args 53 | 54 | :win9xME_args 55 | @rem Slurp the command line arguments. 56 | set CMD_LINE_ARGS= 57 | set _SKIP=2 58 | 59 | :win9xME_args_slurp 60 | if "x%~1" == "x" goto execute 61 | 62 | set CMD_LINE_ARGS=%* 63 | goto execute 64 | 65 | :4NT_args 66 | @rem Get arguments from the 4NT Shell from JP Software 67 | set CMD_LINE_ARGS=%$ 68 | 69 | :execute 70 | @rem Setup the command line 71 | 72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if "%ERRORLEVEL%"=="0" goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 85 | exit /b 1 86 | 87 | :mainEnd 88 | if "%OS%"=="Windows_NT" endlocal 89 | 90 | :omega 91 | -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'chronix.storage' 2 | 3 | include 'chronix-storage' 4 | include 'chronix-storage-integration' 5 | 6 | --------------------------------------------------------------------------------