├── src ├── main │ ├── java │ │ ├── com │ │ │ └── everdata │ │ │ │ ├── parser │ │ │ │ ├── jjtree.bat │ │ │ │ ├── jjdoc.bat │ │ │ │ ├── javacc.bat │ │ │ │ ├── Expression.java │ │ │ │ ├── AST_Regex.java │ │ │ │ ├── AST_Delete.java │ │ │ │ ├── AST_OrExpr.java │ │ │ │ ├── AST_AndExpr.java │ │ │ │ ├── AST_EvalExpr.java │ │ │ │ ├── AST_UnaryExpr.java │ │ │ │ ├── AST_PredicateExpression.java │ │ │ │ ├── AST_FieldExpr.java │ │ │ │ ├── AST_TopOption.java │ │ │ │ ├── AST_StatsFunc.java │ │ │ │ ├── AST_SearchOption.java │ │ │ │ ├── AST_ComparisonExpression.java │ │ │ │ ├── AST_TermExpression.java │ │ │ │ ├── AST_Table.java │ │ │ │ ├── AST_IdentList.java │ │ │ │ ├── AST_Start.java │ │ │ │ ├── AST_Sort.java │ │ │ │ ├── AST_Join.java │ │ │ │ ├── Node.java │ │ │ │ ├── CommandParserTreeConstants.java │ │ │ │ ├── AST_ByIdentList.java │ │ │ │ ├── AST_Top.java │ │ │ │ ├── SimpleNode.java │ │ │ │ ├── JJTCommandParserState.java │ │ │ │ ├── Token.java │ │ │ │ ├── TokenMgrError.java │ │ │ │ ├── ParseException.java │ │ │ │ ├── CommandParserConstants.java │ │ │ │ ├── AST_Stats.java │ │ │ │ ├── SimpleCharStream.java │ │ │ │ ├── CommandParser.html │ │ │ │ ├── AST_Search.java │ │ │ │ └── JavaCharStream.java │ │ │ │ ├── command │ │ │ │ ├── ReportResponse.java │ │ │ │ ├── CommandException.java │ │ │ │ ├── Field.java │ │ │ │ ├── Function.java │ │ │ │ ├── Option.java │ │ │ │ ├── Plan.java │ │ │ │ ├── Optimizer.java │ │ │ │ └── JoinQuery.java │ │ │ │ └── xcontent │ │ │ │ ├── CsvXContent.java │ │ │ │ └── CsvXContentGenerator.java │ │ └── org │ │ │ └── elasticsearch │ │ │ └── plugin │ │ │ └── rest │ │ │ ├── CommandRestModule.java │ │ │ ├── CommandRestPlugin.java │ │ │ ├── KafkaStreamRestHandler.java │ │ │ ├── CommandRestHandler.java │ │ │ ├── TaskRestHandler.java │ │ │ └── JobRestHandler.java │ ├── resources │ │ └── es-plugin.properties │ └── assemblies │ │ └── plugin.xml └── test │ ├── commandexample │ ├── searchcommand │ └── java │ └── com │ └── everdata │ └── test │ ├── CommandActionTest.java │ └── HttpHelper.java ├── rest-command-intro.ppt ├── target ├── classes │ └── es-plugin.properties └── maven-archiver │ └── pom.properties ├── .settings ├── org.eclipse.m2e.core.prefs ├── org.eclipse.core.resources.prefs └── org.eclipse.jdt.core.prefs ├── .gitignore ├── .project ├── LICENSE ├── pom.xml └── README.md /src/main/java/com/everdata/parser/jjtree.bat: -------------------------------------------------------------------------------- 1 | java -cp ..\..\..\..\..\..\lib\javacc.jar jjtree %1 %2 -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/jjdoc.bat: -------------------------------------------------------------------------------- 1 | 2 | java -cp ..\..\..\..\..\..\lib\javacc.jar jjdoc %1 %2 -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/javacc.bat: -------------------------------------------------------------------------------- 1 | java -cp ..\..\..\..\..\..\lib\javacc.jar javacc -STATIC=false %1 %2 -------------------------------------------------------------------------------- /rest-command-intro.ppt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/huangchen007/elasticsearch-rest-command/HEAD/rest-command-intro.ppt -------------------------------------------------------------------------------- /target/classes/es-plugin.properties: -------------------------------------------------------------------------------- 1 | plugin=org.elasticsearch.plugin.rest.CommandRestPlugin 2 | version=0.2.3 3 | 4 | -------------------------------------------------------------------------------- /src/main/resources/es-plugin.properties: -------------------------------------------------------------------------------- 1 | plugin=org.elasticsearch.plugin.rest.CommandRestPlugin 2 | version=0.2.3 3 | 4 | -------------------------------------------------------------------------------- /.settings/org.eclipse.m2e.core.prefs: -------------------------------------------------------------------------------- 1 | activeProfiles= 2 | eclipse.preferences.version=1 3 | resolveWorkspaceProjects=false 4 | version=1 5 | -------------------------------------------------------------------------------- /target/maven-archiver/pom.properties: -------------------------------------------------------------------------------- 1 | #Generated by Maven 2 | #Tue Dec 16 14:28:32 CST 2014 3 | version=0.7.1 4 | groupId=com.everdata 5 | artifactId=rest-command 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | 3 | # Package Files # 4 | *.jar 5 | *.war 6 | *.ear 7 | *.zip 8 | *.classpath 9 | *.project 10 | *.gitignore 11 | *.prefs 12 | /lib 13 | /bin 14 | /data 15 | -------------------------------------------------------------------------------- /.settings/org.eclipse.core.resources.prefs: -------------------------------------------------------------------------------- 1 | eclipse.preferences.version=1 2 | encoding//src/main/java=UTF-8 3 | encoding//src/test/java=UTF-8 4 | encoding//src/test/searchcommand=UTF-8 5 | encoding/=UTF-8 6 | -------------------------------------------------------------------------------- /src/test/commandexample: -------------------------------------------------------------------------------- 1 | SEARCH INDEX=LOG_20140501,LOG_20140502,LOG_20140503,LOG_20140504,LOG_20140505,LOG_20140506,LOG_20140507,LOG_20140508,LOG_20140509,LOG_20140510,LOG_20140511,LOG_20140512,LOG_20140513,LOG_20140514,LOG_20140515,LOG_20140516 SOURCETYPE=HTTPLOG MSISDN="" STARTDATE>="2014-07-01 00:00:00" ENDDATE<="2014-07-02 21:49:50" | STATS SUM(NTOTALFLOW) BY PID -------------------------------------------------------------------------------- /src/main/java/com/everdata/command/ReportResponse.java: -------------------------------------------------------------------------------- 1 | package com.everdata.command; 2 | 3 | import java.util.ArrayList; 4 | 5 | import org.elasticsearch.action.search.SearchResponse; 6 | 7 | 8 | public class ReportResponse { 9 | 10 | public SearchResponse response = null; 11 | public ArrayList bucketFields = null; 12 | public ArrayList statsFields = null; 13 | //public Function countField = null; 14 | 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/plugin/rest/CommandRestModule.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.plugin.rest; 2 | 3 | import org.elasticsearch.common.inject.AbstractModule; 4 | 5 | public class CommandRestModule extends AbstractModule { 6 | 7 | @Override 8 | protected void configure() { 9 | bind(CommandRestHandler.class).asEagerSingleton(); 10 | bind(JobRestHandler.class).asEagerSingleton(); 11 | bind(TaskRestHandler.class).asEagerSingleton(); 12 | } 13 | 14 | } 15 | -------------------------------------------------------------------------------- /.settings/org.eclipse.jdt.core.prefs: -------------------------------------------------------------------------------- 1 | #Fri Jul 25 15:31:12 CST 2014 2 | org.eclipse.jdt.core.compiler.problem.enumIdentifier=error 3 | org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning 4 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7 5 | eclipse.preferences.version=1 6 | org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled 7 | org.eclipse.jdt.core.compiler.problem.assertIdentifier=error 8 | org.eclipse.jdt.core.compiler.source=1.7 9 | org.eclipse.jdt.core.compiler.compliance=1.7 10 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/command/CommandException.java: -------------------------------------------------------------------------------- 1 | package com.everdata.command; 2 | 3 | /** 4 | * General exception for query execution issues. 5 | */ 6 | public class CommandException extends Exception { 7 | 8 | /** 9 | * 10 | */ 11 | private static final long serialVersionUID = -916314713696048647L; 12 | 13 | /** 14 | * Constructs a CommandException, given the error message. 15 | */ 16 | public CommandException(String message) { 17 | super(message); 18 | } 19 | 20 | } // public class QueryException extends Exception 21 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/Expression.java: -------------------------------------------------------------------------------- 1 | package com.everdata.parser; 2 | 3 | public class Expression { 4 | // | | | | | 5 | public final static int EQ = 1; 6 | public final static int NEQ = 2; 7 | public final static int GT = 3; 8 | public final static int GTE = 4; 9 | public final static int LT = 5; 10 | public final static int LTE = 6; 11 | 12 | public String field; 13 | public int oper; 14 | public String value; 15 | public int valueType; 16 | 17 | public Expression() { 18 | } 19 | 20 | } -------------------------------------------------------------------------------- /.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | rest-command 4 | Restful pipeline command support plugin for Elasticsearch. NO_M2ECLIPSE_SUPPORT: Project files created with the maven-eclipse-plugin are not supported in M2Eclipse. 5 | 6 | 7 | 8 | org.eclipse.jdt.core.javabuilder 9 | 10 | 11 | 12 | org.eclipse.jdt.core.javanature 13 | 14 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/command/Field.java: -------------------------------------------------------------------------------- 1 | package com.everdata.command; 2 | 3 | 4 | public class Field { 5 | 6 | public final static int SINGLE = 1; 7 | public final static int SCRIPT = 2; 8 | 9 | public static String fieldsToScript(String[] fields){ 10 | 11 | StringBuilder script = new StringBuilder("doc['" + fields[0] + "'].value"); 12 | 13 | 14 | for(int i = 1; i< fields.length; i++){ 15 | script.append("+ '| ' + doc['"); 16 | script.append( fields[i] ); 17 | script.append("'].value"); 18 | } 19 | 20 | return script.toString(); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_Regex.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_Regex.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public 6 | class AST_Regex extends SimpleNode { 7 | public AST_Regex(int id) { 8 | super(id); 9 | } 10 | 11 | public AST_Regex(CommandParser p, int id) { 12 | super(p, id); 13 | } 14 | 15 | } 16 | /* JavaCC - OriginalChecksum=ff22ed9791dc1ff157497ab66d6f5836 (do not edit this line) */ 17 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_Delete.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_Delete.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public 6 | class AST_Delete extends SimpleNode { 7 | public AST_Delete(int id) { 8 | super(id); 9 | } 10 | 11 | public AST_Delete(CommandParser p, int id) { 12 | super(p, id); 13 | } 14 | 15 | } 16 | /* JavaCC - OriginalChecksum=86f701e98ac1441bed480141c4f146dc (do not edit this line) */ 17 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_OrExpr.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_OrExpr.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public 6 | class AST_OrExpr extends SimpleNode { 7 | public AST_OrExpr(int id) { 8 | super(id); 9 | } 10 | 11 | public AST_OrExpr(CommandParser p, int id) { 12 | super(p, id); 13 | } 14 | 15 | } 16 | /* JavaCC - OriginalChecksum=f3ec20accd73822cc748d70821a73156 (do not edit this line) */ 17 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_AndExpr.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_AndExpr.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public 6 | class AST_AndExpr extends SimpleNode { 7 | public AST_AndExpr(int id) { 8 | super(id); 9 | } 10 | 11 | public AST_AndExpr(CommandParser p, int id) { 12 | super(p, id); 13 | } 14 | 15 | } 16 | /* JavaCC - OriginalChecksum=0ba6f1cd29aaf6f33f24b2f16c1eaaf4 (do not edit this line) */ 17 | -------------------------------------------------------------------------------- /src/main/assemblies/plugin.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | plugin 4 | 5 | zip 6 | 7 | false 8 | 9 | 10 | / 11 | true 12 | true 13 | 14 | org.elasticsearch:elasticsearch 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /src/test/searchcommand: -------------------------------------------------------------------------------- 1 | SEARCH INDEX=IDC_LOG_201403261835,IDC_LOG SOURCETYPE=IDCLOGMAPPING HASCHILD=(SOURCETYPE=DDD DDD=DDD DDD) HASCHILD=(SOURCETYPE=DDD DDD=DDD DDD) HASPARENT=(SOURCETYPE=TTT TTT=TT TT) DDD _DDD="中文" 2 | search index=idc_log_201403261835,idc_log sourcetype=idcLogMapping haschild=(sourcetype=ddd ddd=ddd ddd) haschild=(sourcetype=ddd ddd=ddd ddd) hasparent=(sourcetype=ttt ttt=tt tt) ddd _ddd="中文" 3 | search sourcetype=user http://*? | stats count, sum("asdfasdfasdfasf+[]''"), sum(down_bytes) by hostt | top ddd,ddd by ddd,ddd 4 | search index=log sourcetype=* endtime="2014-04-01 00:00:00" msisdn=15527222196 1starttime>="2014-03-01 00:00:00"| stats sum(ntotalflow) by pid -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_EvalExpr.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_EvalExpr.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public 6 | class AST_EvalExpr extends SimpleNode { 7 | public AST_EvalExpr(int id) { 8 | super(id); 9 | } 10 | 11 | public AST_EvalExpr(CommandParser p, int id) { 12 | super(p, id); 13 | } 14 | 15 | public String script; 16 | 17 | } 18 | /* JavaCC - OriginalChecksum=fd4a3c16c63c691fcc89d660ee575aee (do not edit this line) */ 19 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_UnaryExpr.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_UnaryExpr.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public class AST_UnaryExpr extends SimpleNode { 6 | boolean isNot = false; 7 | 8 | public AST_UnaryExpr(int id) { 9 | super(id); 10 | } 11 | 12 | public AST_UnaryExpr(CommandParser p, int id) { 13 | super(p, id); 14 | } 15 | 16 | } 17 | /* 18 | * JavaCC - OriginalChecksum=9c5b36e4a7f4d776543a0f79ea9b64ba (do not edit this 19 | * line) 20 | */ 21 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_PredicateExpression.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_PredicateExpression.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public 6 | class AST_PredicateExpression extends SimpleNode { 7 | public AST_PredicateExpression(int id) { 8 | super(id); 9 | } 10 | 11 | public AST_PredicateExpression(CommandParser p, int id) { 12 | super(p, id); 13 | } 14 | 15 | } 16 | /* JavaCC - OriginalChecksum=a3b8e11723668e712f27e74456a4d45c (do not edit this line) */ 17 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_FieldExpr.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_FieldExpr.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public 6 | class AST_FieldExpr extends SimpleNode { 7 | public AST_FieldExpr(int id) { 8 | super(id); 9 | } 10 | 11 | public AST_FieldExpr(CommandParser p, int id) { 12 | super(p, id); 13 | } 14 | 15 | public int type; 16 | public String field; 17 | 18 | } 19 | /* JavaCC - OriginalChecksum=c162b35dd9b97b3ae37931b754c4a582 (do not edit this line) */ 20 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_TopOption.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_TopOption.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | import com.everdata.command.Option; 6 | 7 | public 8 | class AST_TopOption extends SimpleNode { 9 | public AST_TopOption(int id) { 10 | super(id); 11 | } 12 | 13 | public AST_TopOption(CommandParser p, int id) { 14 | super(p, id); 15 | } 16 | 17 | protected Option opt = new Option(); 18 | 19 | } 20 | /* JavaCC - OriginalChecksum=58f509b50d86058e0576e29411881d35 (do not edit this line) */ 21 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_StatsFunc.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_StatsFunc.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | import com.everdata.command.Function; 6 | 7 | public 8 | class AST_StatsFunc extends SimpleNode { 9 | public AST_StatsFunc(int id) { 10 | super(id); 11 | } 12 | 13 | public AST_StatsFunc(CommandParser p, int id) { 14 | super(p, id); 15 | } 16 | 17 | public Function func = new Function(); 18 | 19 | } 20 | /* JavaCC - OriginalChecksum=f227121067e732204b344960e1df54a9 (do not edit this line) */ 21 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_SearchOption.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_SearchOption.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | import com.everdata.command.Option; 6 | 7 | public 8 | class AST_SearchOption extends SimpleNode { 9 | public AST_SearchOption(int id) { 10 | super(id); 11 | } 12 | 13 | public AST_SearchOption(CommandParser p, int id) { 14 | super(p, id); 15 | } 16 | 17 | protected Option opt = new Option(); 18 | 19 | } 20 | /* JavaCC - OriginalChecksum=adb3136c09c654789d18cd772f208abd (do not edit this line) */ 21 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_ComparisonExpression.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_ComparisonExpression.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public 6 | class AST_ComparisonExpression extends SimpleNode { 7 | public AST_ComparisonExpression(int id) { 8 | super(id); 9 | } 10 | 11 | public AST_ComparisonExpression(CommandParser p, int id) { 12 | super(p, id); 13 | } 14 | 15 | 16 | protected Expression expr = new Expression(); 17 | } 18 | /* JavaCC - OriginalChecksum=7eedfcc2765c3247a645bed1085b6187 (do not edit this line) */ 19 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_TermExpression.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_TermExpression.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public class AST_TermExpression extends SimpleNode { 6 | 7 | public final static int PHRASE = 1; 8 | public final static int TERM = 2; 9 | 10 | public AST_TermExpression(int id) { 11 | super(id); 12 | } 13 | 14 | public AST_TermExpression(CommandParser p, int id) { 15 | super(p, id); 16 | } 17 | 18 | int type; 19 | String term; 20 | } 21 | /* 22 | * JavaCC - OriginalChecksum=e8f812fdfd78761973e6eff986241cd1 (do not edit this 23 | * line) 24 | */ 25 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_Table.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_Table.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public 6 | class AST_Table extends SimpleNode { 7 | 8 | public AST_Table(int id) { 9 | super(id); 10 | } 11 | 12 | public AST_Table(CommandParser p, int id) { 13 | super(p, id); 14 | } 15 | 16 | public String[] getTables(){ 17 | if( children.length == 0) 18 | return null; 19 | else{ 20 | return ((AST_IdentList) children[0]).getNames(); 21 | } 22 | } 23 | 24 | } 25 | /* JavaCC - OriginalChecksum=9f4141fe2732946f07c2bfd9950441a9 (do not edit this line) */ 26 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/plugin/rest/CommandRestPlugin.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.plugin.rest; 2 | 3 | 4 | 5 | import org.elasticsearch.common.inject.Module; 6 | import org.elasticsearch.plugins.AbstractPlugin; 7 | 8 | import java.util.ArrayList; 9 | import java.util.Collection; 10 | public class CommandRestPlugin extends AbstractPlugin { 11 | 12 | @Override 13 | public String description() { 14 | return "Restful pipeline command support plugin for Elasticsearch"; 15 | } 16 | 17 | @Override 18 | public String name() { 19 | return "rest-command"; 20 | } 21 | 22 | @Override 23 | public Collection> modules() { 24 | Collection> modules = new ArrayList>(); 25 | modules.add(CommandRestModule.class); 26 | return modules; 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_IdentList.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_IdentList.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | import java.util.ArrayList; 6 | 7 | public 8 | class AST_IdentList extends SimpleNode { 9 | public AST_IdentList(int id) { 10 | super(id); 11 | } 12 | 13 | public AST_IdentList(CommandParser p, int id) { 14 | super(p, id); 15 | } 16 | /** Names of the columns/tables. */ 17 | protected ArrayList names = new ArrayList(); 18 | 19 | /** 20 | * Gets the names of the columns/tables. 21 | */ 22 | public String[] getNames() { 23 | return names.toArray(new String[names.size()]); 24 | } 25 | } 26 | /* JavaCC - OriginalChecksum=20bbc1ecd7baf6197e342c3363d75e6e (do not edit this line) */ 27 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/command/Function.java: -------------------------------------------------------------------------------- 1 | package com.everdata.command; 2 | 3 | public class Function { 4 | public final static int SUM = 1; 5 | public final static int COUNT = 2; 6 | public final static int DC = 3; 7 | public final static int AVG = 4; 8 | public final static int MAX = 5; 9 | public final static int MIN = 6; 10 | 11 | public int type; 12 | public int fieldtype; //SCRIPT or SINGLE 13 | public String field = null; 14 | public String name = null; 15 | public String as = null; 16 | public int order = 0; 17 | public String statsField = null; 18 | 19 | 20 | public Function(){} 21 | 22 | 23 | public static String genStatField(Function func){ 24 | if(func.as != null){ 25 | return func.as; 26 | }else{ 27 | StringBuilder statField = new StringBuilder(func.name); 28 | statField.append("-"); 29 | statField.append(func.field); 30 | 31 | return statField.toString(); 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/command/Option.java: -------------------------------------------------------------------------------- 1 | package com.everdata.command; 2 | 3 | public class Option { 4 | public final static int COUNTFIELD = 1; 5 | public final static int LIMIT = 2; 6 | public final static int OTHERSTR = 3; 7 | public final static int PERCENTFIELD = 4; 8 | public final static int SHOWCOUNT = 5; 9 | public final static int SHOWPERC = 6; 10 | public final static int USEOTHER = 7; 11 | public final static int TIMEFORMAT = 8; 12 | public final static int STARTTIME = 9; 13 | public final static int ENDTIME = 10; 14 | public final static int EARLIEST = 11; 15 | public final static int LATEST = 12; 16 | public final static int SOURCETYPE = 13; 17 | public final static int INDEX = 14; 18 | public final static int HASPARENT = 15; 19 | public final static int HASCHILD = 16; 20 | public final static int MINCOUNT = 17; 21 | 22 | public int type; 23 | public String value; 24 | 25 | public Option() { 26 | } 27 | 28 | } -------------------------------------------------------------------------------- /src/main/java/com/everdata/command/Plan.java: -------------------------------------------------------------------------------- 1 | package com.everdata.command; 2 | 3 | 4 | import org.elasticsearch.action.ActionResponse; 5 | import org.elasticsearch.client.Client; 6 | import org.elasticsearch.common.logging.ESLogger; 7 | 8 | public abstract class Plan { 9 | 10 | protected String inputIndexType; 11 | protected String outputIndexType; 12 | protected Client client; 13 | protected ESLogger logger; 14 | protected int from = 0; 15 | protected int size = -1;//resultset is all 16 | 17 | public Plan(String inputIndexType, String outputIndexType, Client client, ESLogger logger){ 18 | this.inputIndexType = inputIndexType; 19 | this.outputIndexType = outputIndexType; 20 | this.client = client; 21 | this.logger = logger; 22 | } 23 | 24 | public void setResultsetRange(int from, int size){ 25 | this.from = from; 26 | this.size = size; 27 | 28 | } 29 | /** 30 | * Executes the plan and prints applicable output. 31 | */ 32 | 33 | public abstract ActionResponse execute(); 34 | 35 | } // public interface Plan 36 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_Start.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_Start.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | import org.elasticsearch.common.logging.ESLogger; 6 | 7 | public class AST_Start extends SimpleNode { 8 | public AST_Start(int id) { 9 | super(id); 10 | } 11 | 12 | public AST_Start(CommandParser p, int id) { 13 | super(p, id); 14 | } 15 | 16 | public Node[] getChildren() { 17 | return this.children; 18 | } 19 | 20 | static public void dumpWithLogger(ESLogger logger, SimpleNode node, String prefix) { 21 | 22 | logger.debug(prefix + node.toString()); 23 | if (node.children != null) { 24 | for (int i = 0; i < node.children.length; ++i) { 25 | dumpWithLogger(logger, (SimpleNode) node.children[i], prefix + " "); 26 | } 27 | } 28 | } 29 | 30 | } 31 | /* JavaCC - OriginalChecksum=f51fdb2211e0d07be2426f9234f61f97 (do not edit this line) */ 32 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_Sort.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_Sort.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | import java.util.ArrayList; 6 | 7 | public class AST_Sort extends SimpleNode { 8 | public static class SortField{ 9 | public String field; 10 | public boolean desc; 11 | } 12 | 13 | public AST_Sort(int id) { 14 | super(id); 15 | } 16 | 17 | public AST_Sort(CommandParser p, int id) { 18 | super(p, id); 19 | } 20 | 21 | public ArrayList sortFields = new ArrayList(); 22 | 23 | public void addField(String field){ 24 | SortField sf = new SortField(); 25 | sf.field = field; 26 | sf.desc = true; 27 | sortFields.add(sf); 28 | } 29 | 30 | public void setDesc(boolean desc){ 31 | sortFields.get(sortFields.size()-1).desc = desc; 32 | } 33 | 34 | public int count = 10000; 35 | 36 | } 37 | /* JavaCC - OriginalChecksum=6bc3e0dc0cc9078825f2531aed0fbbf4 (do not edit this line) */ 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This is free and unencumbered software released into the public domain. 2 | 3 | Anyone is free to copy, modify, publish, use, compile, sell, or 4 | distribute this software, either in source code form or as a compiled 5 | binary, for any purpose, commercial or non-commercial, and by any 6 | means. 7 | 8 | In jurisdictions that recognize copyright laws, the author or authors 9 | of this software dedicate any and all copyright interest in the 10 | software to the public domain. We make this dedication for the benefit 11 | of the public at large and to the detriment of our heirs and 12 | successors. We intend this dedication to be an overt act of 13 | relinquishment in perpetuity of all present and future rights to this 14 | software under copyright law. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | 24 | For more information, please refer to -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_Join.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_Join.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | import com.everdata.command.JoinQuery.Join; 6 | import com.everdata.command.Option; 7 | 8 | public 9 | class AST_Join extends SimpleNode { 10 | public AST_Join(int id) { 11 | super(id); 12 | } 13 | 14 | public AST_Join(CommandParser p, int id) { 15 | super(p, id); 16 | } 17 | private String[] fields = null; 18 | private AST_Search search = null; 19 | 20 | private Join join = null; 21 | 22 | public Join getJoin(){ 23 | 24 | if(join == null){ 25 | traverseAST(); 26 | join = new Join(fields, search); 27 | } 28 | 29 | return join; 30 | 31 | } 32 | 33 | private void traverseAST() { 34 | 35 | for (Node n : children) { 36 | if (n instanceof AST_IdentList) { 37 | fields = ((AST_IdentList) n).getNames(); 38 | }else if (n instanceof AST_Search) { 39 | search = (AST_Search) n; 40 | } 41 | } 42 | } 43 | 44 | } 45 | /* JavaCC - OriginalChecksum=dd2028b5b56d704556c128ba5dc3cf1c (do not edit this line) */ 46 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/Node.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. Node.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | /* All AST nodes must implement this interface. It provides basic 6 | machinery for constructing the parent and child relationships 7 | between nodes. */ 8 | 9 | public 10 | interface Node { 11 | 12 | /** This method is called after the node has been made the current 13 | node. It indicates that child nodes can now be added to it. */ 14 | public void jjtOpen(); 15 | 16 | /** This method is called after all the child nodes have been 17 | added. */ 18 | public void jjtClose(); 19 | 20 | /** This pair of methods are used to inform the node of its 21 | parent. */ 22 | public void jjtSetParent(Node n); 23 | public Node jjtGetParent(); 24 | 25 | /** This method tells the node to add its argument to the node's 26 | list of children. */ 27 | public void jjtAddChild(Node n, int i); 28 | 29 | /** This method returns a child node. The children are numbered 30 | from zero, left to right. */ 31 | public Node jjtGetChild(int i); 32 | 33 | /** Return the number of children the node has. */ 34 | public int jjtGetNumChildren(); 35 | } 36 | /* JavaCC - OriginalChecksum=96e4198a779a573dc67f45e07e00cf37 (do not edit this line) */ 37 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/CommandParserTreeConstants.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JavaCC: Do not edit this line. CommandParserTreeConstants.java Version 5.0 */ 2 | package com.everdata.parser; 3 | 4 | public interface CommandParserTreeConstants 5 | { 6 | public int JJT_START = 0; 7 | public int JJT_TABLE = 1; 8 | public int JJT_JOIN = 2; 9 | public int JJT_SORT = 3; 10 | public int JJT_REGEX = 4; 11 | public int JJT_STATS = 5; 12 | public int JJT_STATSFUNC = 6; 13 | public int JJT_EVALEXPR = 7; 14 | public int JJT_DELETE = 8; 15 | public int JJT_IDENTLIST = 9; 16 | public int JJT_BYIDENTLIST = 10; 17 | public int JJT_SEARCH = 11; 18 | public int JJT_OREXPR = 12; 19 | public int JJT_ANDEXPR = 13; 20 | public int JJT_UNARYEXPR = 14; 21 | public int JJT_SEARCHOPTION = 15; 22 | public int JJT_PREDICATEEXPRESSION = 16; 23 | public int JJT_TERMEXPRESSION = 17; 24 | public int JJT_COMPARISONEXPRESSION = 18; 25 | 26 | 27 | public String[] jjtNodeName = { 28 | "_Start", 29 | "_Table", 30 | "_Join", 31 | "_Sort", 32 | "_Regex", 33 | "_Stats", 34 | "_StatsFunc", 35 | "_EvalExpr", 36 | "_Delete", 37 | "_IdentList", 38 | "_ByIdentList", 39 | "_Search", 40 | "_OrExpr", 41 | "_AndExpr", 42 | "_UnaryExpr", 43 | "_SearchOption", 44 | "_PredicateExpression", 45 | "_TermExpression", 46 | "_ComparisonExpression", 47 | }; 48 | } 49 | /* JavaCC - OriginalChecksum=ac0ad954e97fc550f507838e612d0e06 (do not edit this line) */ 50 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_ByIdentList.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_ByIdentList.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | import java.util.ArrayList; 6 | import java.util.HashSet; 7 | 8 | public class AST_ByIdentList extends SimpleNode { 9 | 10 | public static class By { 11 | public String as = null; 12 | public String name = null; 13 | public boolean keyorder = true; 14 | public boolean desc = true; 15 | public boolean script = false; 16 | } 17 | 18 | public AST_ByIdentList(int id) { 19 | super(id); 20 | } 21 | 22 | public AST_ByIdentList(CommandParser p, int id) { 23 | super(p, id); 24 | } 25 | 26 | /** Names of the columns/tables. */ 27 | public ArrayList byList = new ArrayList(); 28 | 29 | public void addField(String name) { 30 | By b = new By(); 31 | b.name = name; 32 | byList.add(b); 33 | } 34 | 35 | public void setCountOrder() { 36 | byList.get(byList.size() - 1).keyorder = false; 37 | } 38 | 39 | public void setDesc(boolean desc) { 40 | byList.get(byList.size() - 1).desc = desc; 41 | } 42 | 43 | public void setScript() { 44 | byList.get(byList.size() - 1).script = true; 45 | } 46 | 47 | public void setAs(String as){ 48 | byList.get(byList.size() - 1).as = as; 49 | } 50 | 51 | } 52 | /* 53 | * JavaCC - OriginalChecksum=e20a995b9b9e507db3cad93fb586861e (do not edit this 54 | * line) 55 | */ 56 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_Top.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_Top.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | /*package com.everdata.parser; 4 | 5 | 6 | import java.util.HashMap; 7 | import java.util.Map; 8 | 9 | 10 | 11 | 12 | import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; 13 | 14 | import com.everdata.command.CommandException; 15 | import com.everdata.command.Option; 16 | import com.everdata.command.Search; 17 | 18 | public class AST_Top extends SimpleNode { 19 | 20 | Map options = new HashMap ();; 21 | String[] bucketFields = new String[0]; 22 | String[] topFields = new String[0]; 23 | TermsBuilder top = null; 24 | 25 | public AST_Top(int id) { 26 | super(id); 27 | } 28 | 29 | public AST_Top(CommandParser p, int id) { 30 | super(p, id); 31 | } 32 | 33 | public String[] bucketFields(){ 34 | return bucketFields; 35 | } 36 | 37 | public String[] topFields(){ 38 | return topFields; 39 | } 40 | 41 | // top * [] 42 | 43 | private void traverseAST() { 44 | options.put(Option.LIMIT, "10"); 45 | 46 | for (Node n : children) { 47 | if (n instanceof AST_TopOption) { 48 | options.put(((AST_TopOption) n).opt.type, ((AST_TopOption) n).opt.value); 49 | }else if (n instanceof AST_IdentList) { 50 | topFields = ((AST_IdentList) n).getNames(); 51 | }else if (n instanceof AST_ByIdentList) { 52 | bucketFields = ((AST_ByIdentList) n).getNames(); 53 | } 54 | } 55 | } 56 | 57 | 58 | 59 | 60 | private TermsBuilder genAggregation() throws CommandException{ 61 | 62 | traverseAST(); 63 | 64 | TermsBuilder local = AST_Stats.newTermsBucket("top", Integer.parseInt(options.get(Option.LIMIT)), topFields); 65 | 66 | if(options.get(Option.MINCOUNT) != null){ 67 | local.minDocCount(Long.parseLong(options.get(Option.MINCOUNT))); 68 | } 69 | 70 | if(bucketFields.length > 0){ 71 | local = Search.newTermsBucket("topWithBy", Integer.parseInt(options.get(Option.LIMIT)), bucketFields).subAggregation(local); 72 | } 73 | 74 | return local; 75 | } 76 | 77 | public TermsBuilder getTop() throws CommandException{ 78 | if(top == null) 79 | top = genAggregation(); 80 | 81 | return top; 82 | 83 | } 84 | 85 | } 86 | */ 87 | /* 88 | * JavaCC - OriginalChecksum=472d83b96c6710de8a99d69edca4d23b (do not edit this 89 | * line) 90 | */ 91 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/SimpleNode.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. SimpleNode.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | public 6 | class SimpleNode implements Node { 7 | 8 | protected Node parent; 9 | protected Node[] children; 10 | protected int id; 11 | protected Object value; 12 | protected CommandParser parser; 13 | 14 | public SimpleNode(int i) { 15 | id = i; 16 | } 17 | 18 | public SimpleNode(CommandParser p, int i) { 19 | this(i); 20 | parser = p; 21 | } 22 | 23 | public void jjtOpen() { 24 | } 25 | 26 | public void jjtClose() { 27 | } 28 | 29 | public void jjtSetParent(Node n) { parent = n; } 30 | public Node jjtGetParent() { return parent; } 31 | 32 | public void jjtAddChild(Node n, int i) { 33 | if (children == null) { 34 | children = new Node[i + 1]; 35 | } else if (i >= children.length) { 36 | Node c[] = new Node[i + 1]; 37 | System.arraycopy(children, 0, c, 0, children.length); 38 | children = c; 39 | } 40 | children[i] = n; 41 | } 42 | 43 | public Node jjtGetChild(int i) { 44 | return children[i]; 45 | } 46 | 47 | public int jjtGetNumChildren() { 48 | return (children == null) ? 0 : children.length; 49 | } 50 | 51 | public void jjtSetValue(Object value) { this.value = value; } 52 | public Object jjtGetValue() { return value; } 53 | 54 | /* You can override these two methods in subclasses of SimpleNode to 55 | customize the way the node appears when the tree is dumped. If 56 | your output uses more than one line you should override 57 | toString(String), otherwise overriding toString() is probably all 58 | you need to do. */ 59 | 60 | public String toString() { return CommandParserTreeConstants.jjtNodeName[id]; } 61 | public String toString(String prefix) { return prefix + toString(); } 62 | 63 | /* Override this method if you want to customize how the node dumps 64 | out its children. */ 65 | 66 | public void dump(String prefix) { 67 | System.out.println(toString(prefix)); 68 | if (children != null) { 69 | for (int i = 0; i < children.length; ++i) { 70 | SimpleNode n = (SimpleNode)children[i]; 71 | if (n != null) { 72 | n.dump(prefix + " "); 73 | } 74 | } 75 | } 76 | } 77 | } 78 | 79 | /* JavaCC - OriginalChecksum=6a8466f48d514fe85f40621e4d00d267 (do not edit this line) */ 80 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/xcontent/CsvXContent.java: -------------------------------------------------------------------------------- 1 | package com.everdata.xcontent; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.io.OutputStream; 6 | import java.io.Reader; 7 | import java.io.Writer; 8 | 9 | import org.elasticsearch.common.bytes.BytesReference; 10 | import org.elasticsearch.common.xcontent.XContent; 11 | import org.elasticsearch.common.xcontent.XContentGenerator; 12 | import org.elasticsearch.common.xcontent.XContentParser; 13 | import org.elasticsearch.common.xcontent.XContentType; 14 | 15 | import com.fasterxml.jackson.core.JsonEncoding; 16 | 17 | import com.fasterxml.jackson.dataformat.csv.CsvFactory; 18 | 19 | public class CsvXContent implements XContent { 20 | 21 | @Override 22 | public XContentType type() { 23 | // TODO Auto-generated method stub 24 | return null; 25 | } 26 | 27 | @Override 28 | public byte streamSeparator() { 29 | return '\n'; 30 | } 31 | 32 | private final static CsvFactory csvFactory; 33 | public final static CsvXContent csvXContent; 34 | 35 | static { 36 | csvFactory = new CsvFactory(); 37 | 38 | //csvFactory.configure(CsvGenerator.Feature., true); 39 | 40 | csvXContent = new CsvXContent(); 41 | } 42 | 43 | 44 | @Override 45 | public XContentGenerator createGenerator(OutputStream os) 46 | throws IOException { 47 | return new CsvXContentGenerator(csvFactory.createGenerator(os, JsonEncoding.UTF8)); 48 | } 49 | 50 | @Override 51 | public XContentGenerator createGenerator(Writer writer) throws IOException { 52 | return new CsvXContentGenerator(csvFactory.createGenerator(writer)); 53 | } 54 | 55 | @Override 56 | public XContentParser createParser(String content) throws IOException { 57 | throw new IOException("createParser unsupport method,really need this method?"); 58 | } 59 | 60 | @Override 61 | public XContentParser createParser(InputStream is) throws IOException { 62 | throw new IOException("createParser unsupport method,really need this method?"); 63 | } 64 | 65 | @Override 66 | public XContentParser createParser(byte[] data) throws IOException { 67 | throw new IOException("createParser unsupport method,really need this method?"); 68 | } 69 | 70 | @Override 71 | public XContentParser createParser(byte[] data, int offset, int length) 72 | throws IOException { 73 | throw new IOException("createParser unsupport method,really need this method?"); 74 | } 75 | 76 | @Override 77 | public XContentParser createParser(BytesReference bytes) throws IOException { 78 | throw new IOException("createParser unsupport method,really need this method?"); 79 | } 80 | 81 | @Override 82 | public XContentParser createParser(Reader reader) throws IOException { 83 | throw new IOException("createParser unsupport method,really need this method?"); 84 | } 85 | 86 | } 87 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/command/Optimizer.java: -------------------------------------------------------------------------------- 1 | package com.everdata.command; 2 | 3 | import java.util.ArrayList; 4 | 5 | import org.elasticsearch.client.Client; 6 | import org.elasticsearch.common.logging.ESLogger; 7 | 8 | import com.everdata.parser.AST_Search; 9 | import com.everdata.parser.AST_Start; 10 | import com.everdata.parser.CommandParserTreeConstants; 11 | import com.everdata.parser.Node; 12 | 13 | public class Optimizer implements CommandParserTreeConstants { 14 | 15 | int showFrom = 0; 16 | int showSize = 10; 17 | 18 | public void setShow(int from, int size){ 19 | this.showFrom = from; 20 | this.showSize = size; 21 | } 22 | 23 | // public void setClient(Client client){ 24 | // this.client = client; 25 | // } 26 | // 27 | // public void setLogger(ESLogger logger){ 28 | // this.logger = logger; 29 | // } 30 | 31 | static public Plan[] evaluate(AST_Start tree, Client client,ESLogger logger) throws CommandException { 32 | 33 | ArrayList al = new ArrayList(); 34 | 35 | for (Node n : tree.getChildren()) { 36 | if (n instanceof AST_Search) { 37 | // al.add(new Search((AST_Search)n, client, logger)); 38 | } 39 | } 40 | 41 | 42 | return al.toArray(new Plan[al.size()]); 43 | 44 | // dispatch optimizing to the individual classes 45 | /* 46 | * for (Node n : ((SimpleNode)tree)) { case JJT_TOP: //return new 47 | * Top((AST_Top) tree.getStmt()); 48 | * 49 | * case JJT_CREATEINDEX: // System.out.println("Creating index..."); 50 | * return new CreateIndex((AST_CreateIndex) tree.getStmt()); 51 | * 52 | * case JJT_CREATETABLE: // System.out.println("Creating table..."); 53 | * return new CreateTable((AST_CreateTable) tree.getStmt()); 54 | * 55 | * case JJT_DROPINDEX: // System.out.println("Dropping Index..."); 56 | * return new DropIndex((AST_DropIndex) tree.getStmt()); 57 | * 58 | * case JJT_DROPTABLE: // System.out.println("Dropping Table..."); 59 | * return new DropTable((AST_DropTable) tree.getStmt()); 60 | * 61 | * case JJT_DESCRIBE: // System.out.println("Describing..."); return new 62 | * Describe((AST_Describe) tree.getStmt()); 63 | * 64 | * case JJT_INSERT: // System.out.println("Inserting..."); return new 65 | * Insert((AST_Insert) tree.getStmt()); 66 | * 67 | * case JJT_SELECT: // System.out.println("Selecting..."); return new 68 | * Select((AST_Select) tree.getStmt()); 69 | * 70 | * case JJT_UPDATE: // System.out.println("Updating..."); return new 71 | * Update((AST_Update) tree.getStmt()); 72 | * 73 | * case JJT_DELETE: // System.out.println("Deleting..."); return new 74 | * Delete((AST_Delete) tree.getStmt()); 75 | * 76 | * default: throw new CommandException("unsupported query type"); 77 | * 78 | * } // switch 79 | */ 80 | } // public static Plan evaluate(AST_Start tree) throws QueryException 81 | 82 | } // public class Optimizer implements EsCmdTreeConstants 83 | -------------------------------------------------------------------------------- /src/test/java/com/everdata/test/CommandActionTest.java: -------------------------------------------------------------------------------- 1 | package com.everdata.test; 2 | 3 | import java.io.DataInputStream; 4 | import java.io.FileInputStream; 5 | import java.io.FileNotFoundException; 6 | import java.net.URISyntaxException; 7 | import java.util.ArrayList; 8 | import java.util.List; 9 | import java.util.Random; 10 | import java.util.concurrent.atomic.AtomicInteger; 11 | 12 | import junit.framework.TestCase; 13 | 14 | import org.apache.http.client.utils.URIBuilder; 15 | import org.elasticsearch.action.ActionResponse; 16 | import org.elasticsearch.client.Client; 17 | import org.elasticsearch.client.transport.TransportClient; 18 | import org.elasticsearch.common.settings.ImmutableSettings; 19 | import org.elasticsearch.common.transport.InetSocketTransportAddress; 20 | import org.elasticsearch.node.Node; 21 | import org.elasticsearch.node.NodeBuilder; 22 | import org.junit.After; 23 | import org.junit.Before; 24 | import org.junit.Test; 25 | 26 | import com.everdata.command.CommandException; 27 | import com.everdata.parser.AST_Start; 28 | import com.everdata.parser.CommandParser; 29 | import com.everdata.parser.ParseException; 30 | 31 | import static org.junit.Assert.*; 32 | 33 | 34 | public class CommandActionTest{ 35 | private String host = "http://192.168.200.121:9200/_command"; 36 | 37 | @Before 38 | public void setUp() throws Exception { 39 | 40 | } 41 | 42 | @After 43 | public void tearDown() throws Exception { 44 | 45 | } 46 | 47 | static AtomicInteger success_conn = new AtomicInteger(0); 48 | static AtomicInteger success_recv = new AtomicInteger(0); 49 | 50 | 51 | 52 | @Test 53 | public void multiThread() { 54 | Random rand = new Random(); 55 | 56 | URIBuilder uriBuilder = new URIBuilder(); 57 | uriBuilder.addParameter("q", String.valueOf(rand.nextDouble())); 58 | uriBuilder.addParameter("query", "true"); 59 | final String uri; 60 | try { 61 | uri = uriBuilder.build().toString(); 62 | } catch (URISyntaxException e) { 63 | // TODO Auto-generated catch block 64 | e.printStackTrace(); 65 | return; 66 | } 67 | 68 | List handles = new ArrayList(); 69 | for(int j = 0; j < 10; j++){ 70 | for(int i = 0; i < 1000; i++){ 71 | Thread handle = new Thread(new Runnable(){ 72 | 73 | @Override 74 | public void run() { 75 | 76 | String json = HttpHelper.sendGetUrl(host, uri); 77 | 78 | if(json != null && !json.contains("Exception")){ 79 | success_recv.incrementAndGet(); 80 | } 81 | //System.out.println(json); 82 | } 83 | 84 | }); 85 | 86 | handle.start(); 87 | handles.add(handle); 88 | 89 | 90 | 91 | 92 | } 93 | System.out.println("-----------------------"); 94 | 95 | for(Thread t: handles){ 96 | try { 97 | //System.out.print(t.isAlive()); 98 | t.join(); 99 | } catch (InterruptedException e) { 100 | // TODO Auto-generated catch block 101 | e.printStackTrace(); 102 | } 103 | } 104 | 105 | handles.clear(); 106 | } 107 | 108 | 109 | System.out.println("success_conn:" + success_conn.get()); 110 | System.out.println("success_recv:" + success_recv.get()); 111 | 112 | } 113 | 114 | } 115 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/plugin/rest/KafkaStreamRestHandler.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.plugin.rest; 2 | 3 | import static org.elasticsearch.rest.RestRequest.Method.GET; 4 | 5 | import java.util.HashMap; 6 | import java.util.Map; 7 | import java.util.concurrent.ConcurrentHashMap; 8 | 9 | import org.elasticsearch.client.Client; 10 | import org.elasticsearch.common.inject.Inject; 11 | import org.elasticsearch.common.settings.Settings; 12 | import org.elasticsearch.rest.BaseRestHandler; 13 | import org.elasticsearch.rest.BytesRestResponse; 14 | import org.elasticsearch.rest.RestChannel; 15 | import org.elasticsearch.rest.RestController; 16 | import org.elasticsearch.rest.RestRequest; 17 | import org.elasticsearch.rest.RestStatus; 18 | import org.apache.spark.SparkConf; 19 | import org.apache.spark.storage.StorageLevel; 20 | import org.apache.spark.streaming.Duration; 21 | import org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream; 22 | import org.apache.spark.streaming.api.java.JavaStreamingContext; 23 | import org.apache.spark.streaming.kafka.KafkaUtils; 24 | import org.apache.hadoop.io.BytesWritable; 25 | import org.apache.hadoop.io.Text; 26 | import org.apache.hadoop.mapred.JobConf; 27 | import org.apache.hadoop.mapred.SequenceFileOutputFormat; 28 | 29 | public class KafkaStreamRestHandler extends BaseRestHandler { 30 | 31 | static class KafkaStreamSeqOutputFormat extends SequenceFileOutputFormat { 32 | 33 | } 34 | 35 | private ConcurrentHashMap topicContextMap = new ConcurrentHashMap(); 36 | 37 | @Inject 38 | public KafkaStreamRestHandler(Settings settings, 39 | RestController controller, Client client) { 40 | super(settings, controller, client); 41 | controller.registerHandler(GET, "/_datastore", this); 42 | } 43 | 44 | @Override 45 | protected void handleRequest(RestRequest request, RestChannel channel, Client client) 46 | throws Exception { 47 | final String topic = request.param("topic", ""); 48 | final boolean schema = request.paramAsBoolean("schema", false); 49 | final String master = request.param("masterAddress", "local"); 50 | final String hdfs = request.param("hdfs", "hdfs://localhost:50070"); 51 | final String memory = request.param("memory", "2g"); 52 | final String appName = request.param("appName", "appName-"+topic); 53 | final int duration = request.paramAsInt("duration", 1000); 54 | 55 | Thread exec = new Thread(new Runnable(){ 56 | 57 | @Override 58 | public void run() { 59 | 60 | SparkConf sparkConf = new SparkConf().setAppName(appName).setMaster(master).set("spark.executor.memory", memory); 61 | JavaStreamingContext jssc = new JavaStreamingContext(sparkConf, new Duration(duration)); 62 | 63 | Map topicMap = new HashMap(); 64 | topicMap.put(topic, 3); 65 | 66 | JavaPairReceiverInputDStream kafkaStream = KafkaUtils.createStream(jssc, String.class, byte[].class, 67 | kafka.serializer.DefaultDecoder.class, kafka.serializer.DefaultDecoder.class, null, 68 | topicMap, StorageLevel.MEMORY_ONLY()); 69 | 70 | //JobConf confHadoop = new JobConf(); 71 | //confHadoop.set("mapred.output.compress", "true"); 72 | //confHadoop.set("mapred.output.compression.codec", "com.hadoop.compression.lzo.LzopCodec"); 73 | 74 | kafkaStream.saveAsHadoopFiles(hdfs, "seq", Text.class, BytesWritable.class, KafkaStreamSeqOutputFormat.class); 75 | 76 | topicContextMap.put(topic, jssc); 77 | jssc.start(); 78 | jssc.awaitTermination(); 79 | 80 | } 81 | }); 82 | 83 | exec.start(); 84 | 85 | channel.sendResponse(new BytesRestResponse(RestStatus.OK, String.format("{\"topic\":\"%s\"}", topic))); 86 | 87 | 88 | } 89 | 90 | } 91 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/JJTCommandParserState.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JavaCC: Do not edit this line. JJTCommandParserState.java Version 5.0 */ 2 | package com.everdata.parser; 3 | 4 | public class JJTCommandParserState { 5 | private java.util.List nodes; 6 | private java.util.List marks; 7 | 8 | private int sp; // number of nodes on stack 9 | private int mk; // current mark 10 | private boolean node_created; 11 | 12 | public JJTCommandParserState() { 13 | nodes = new java.util.ArrayList(); 14 | marks = new java.util.ArrayList(); 15 | sp = 0; 16 | mk = 0; 17 | } 18 | 19 | /* Determines whether the current node was actually closed and 20 | pushed. This should only be called in the final user action of a 21 | node scope. */ 22 | public boolean nodeCreated() { 23 | return node_created; 24 | } 25 | 26 | /* Call this to reinitialize the node stack. It is called 27 | automatically by the parser's ReInit() method. */ 28 | public void reset() { 29 | nodes.clear(); 30 | marks.clear(); 31 | sp = 0; 32 | mk = 0; 33 | } 34 | 35 | /* Returns the root node of the AST. It only makes sense to call 36 | this after a successful parse. */ 37 | public Node rootNode() { 38 | return nodes.get(0); 39 | } 40 | 41 | /* Pushes a node on to the stack. */ 42 | public void pushNode(Node n) { 43 | nodes.add(n); 44 | ++sp; 45 | } 46 | 47 | /* Returns the node on the top of the stack, and remove it from the 48 | stack. */ 49 | public Node popNode() { 50 | if (--sp < mk) { 51 | mk = marks.remove(marks.size()-1); 52 | } 53 | return nodes.remove(nodes.size()-1); 54 | } 55 | 56 | /* Returns the node currently on the top of the stack. */ 57 | public Node peekNode() { 58 | return nodes.get(nodes.size()-1); 59 | } 60 | 61 | /* Returns the number of children on the stack in the current node 62 | scope. */ 63 | public int nodeArity() { 64 | return sp - mk; 65 | } 66 | 67 | 68 | public void clearNodeScope(Node n) { 69 | while (sp > mk) { 70 | popNode(); 71 | } 72 | mk = marks.remove(marks.size()-1); 73 | } 74 | 75 | 76 | public void openNodeScope(Node n) { 77 | marks.add(mk); 78 | mk = sp; 79 | n.jjtOpen(); 80 | } 81 | 82 | 83 | /* A definite node is constructed from a specified number of 84 | children. That number of nodes are popped from the stack and 85 | made the children of the definite node. Then the definite node 86 | is pushed on to the stack. */ 87 | public void closeNodeScope(Node n, int num) { 88 | mk = marks.remove(marks.size()-1); 89 | while (num-- > 0) { 90 | Node c = popNode(); 91 | c.jjtSetParent(n); 92 | n.jjtAddChild(c, num); 93 | } 94 | n.jjtClose(); 95 | pushNode(n); 96 | node_created = true; 97 | } 98 | 99 | 100 | /* A conditional node is constructed if its condition is true. All 101 | the nodes that have been pushed since the node was opened are 102 | made children of the conditional node, which is then pushed 103 | on to the stack. If the condition is false the node is not 104 | constructed and they are left on the stack. */ 105 | public void closeNodeScope(Node n, boolean condition) { 106 | if (condition) { 107 | int a = nodeArity(); 108 | mk = marks.remove(marks.size()-1); 109 | while (a-- > 0) { 110 | Node c = popNode(); 111 | c.jjtSetParent(n); 112 | n.jjtAddChild(c, a); 113 | } 114 | n.jjtClose(); 115 | pushNode(n); 116 | node_created = true; 117 | } else { 118 | mk = marks.remove(marks.size()-1); 119 | node_created = false; 120 | } 121 | } 122 | } 123 | /* JavaCC - OriginalChecksum=835cdfd449437ea603c6ce8a4c2418f4 (do not edit this line) */ 124 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/Token.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JavaCC: Do not edit this line. Token.java Version 5.0 */ 2 | /* JavaCCOptions:TOKEN_EXTENDS=,KEEP_LINE_COL=null,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | /** 6 | * Describes the input token stream. 7 | */ 8 | 9 | public class Token implements java.io.Serializable { 10 | 11 | /** 12 | * The version identifier for this Serializable class. 13 | * Increment only if the serialized form of the 14 | * class changes. 15 | */ 16 | private static final long serialVersionUID = 1L; 17 | 18 | /** 19 | * An integer that describes the kind of this token. This numbering 20 | * system is determined by JavaCCParser, and a table of these numbers is 21 | * stored in the file ...Constants.java. 22 | */ 23 | public int kind; 24 | 25 | /** The line number of the first character of this Token. */ 26 | public int beginLine; 27 | /** The column number of the first character of this Token. */ 28 | public int beginColumn; 29 | /** The line number of the last character of this Token. */ 30 | public int endLine; 31 | /** The column number of the last character of this Token. */ 32 | public int endColumn; 33 | 34 | /** 35 | * The string image of the token. 36 | */ 37 | public String image; 38 | 39 | /** 40 | * A reference to the next regular (non-special) token from the input 41 | * stream. If this is the last token from the input stream, or if the 42 | * token manager has not read tokens beyond this one, this field is 43 | * set to null. This is true only if this token is also a regular 44 | * token. Otherwise, see below for a description of the contents of 45 | * this field. 46 | */ 47 | public Token next; 48 | 49 | /** 50 | * This field is used to access special tokens that occur prior to this 51 | * token, but after the immediately preceding regular (non-special) token. 52 | * If there are no such special tokens, this field is set to null. 53 | * When there are more than one such special token, this field refers 54 | * to the last of these special tokens, which in turn refers to the next 55 | * previous special token through its specialToken field, and so on 56 | * until the first special token (whose specialToken field is null). 57 | * The next fields of special tokens refer to other special tokens that 58 | * immediately follow it (without an intervening regular token). If there 59 | * is no such token, this field is null. 60 | */ 61 | public Token specialToken; 62 | 63 | /** 64 | * An optional attribute value of the Token. 65 | * Tokens which are not used as syntactic sugar will often contain 66 | * meaningful values that will be used later on by the compiler or 67 | * interpreter. This attribute value is often different from the image. 68 | * Any subclass of Token that actually wants to return a non-null value can 69 | * override this method as appropriate. 70 | */ 71 | public Object getValue() { 72 | return null; 73 | } 74 | 75 | /** 76 | * No-argument constructor 77 | */ 78 | public Token() {} 79 | 80 | /** 81 | * Constructs a new token for the specified Image. 82 | */ 83 | public Token(int kind) 84 | { 85 | this(kind, null); 86 | } 87 | 88 | /** 89 | * Constructs a new token for the specified Image and Kind. 90 | */ 91 | public Token(int kind, String image) 92 | { 93 | this.kind = kind; 94 | this.image = image; 95 | } 96 | 97 | /** 98 | * Returns the image. 99 | */ 100 | public String toString() 101 | { 102 | return image; 103 | } 104 | 105 | /** 106 | * Returns a new Token object, by default. However, if you want, you 107 | * can create and return subclass objects based on the value of ofKind. 108 | * Simply add the cases to the switch for all those special cases. 109 | * For example, if you have a subclass of Token called IDToken that 110 | * you want to create if ofKind is ID, simply add something like : 111 | * 112 | * case MyParserConstants.ID : return new IDToken(ofKind, image); 113 | * 114 | * to the following switch statement. Then you can cast matchedToken 115 | * variable to the appropriate type and use sit in your lexical actions. 116 | */ 117 | public static Token newToken(int ofKind, String image) 118 | { 119 | switch(ofKind) 120 | { 121 | default : return new Token(ofKind, image); 122 | } 123 | } 124 | 125 | public static Token newToken(int ofKind) 126 | { 127 | return newToken(ofKind, null); 128 | } 129 | 130 | } 131 | /* JavaCC - OriginalChecksum=7e0e037d0b0931ee3833b01e1e9cd9bf (do not edit this line) */ 132 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/TokenMgrError.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 5.0 */ 2 | /* JavaCCOptions: */ 3 | package com.everdata.parser; 4 | 5 | /** Token Manager Error. */ 6 | public class TokenMgrError extends Error 7 | { 8 | 9 | /** 10 | * The version identifier for this Serializable class. 11 | * Increment only if the serialized form of the 12 | * class changes. 13 | */ 14 | private static final long serialVersionUID = 1L; 15 | 16 | /* 17 | * Ordinals for various reasons why an Error of this type can be thrown. 18 | */ 19 | 20 | /** 21 | * Lexical error occurred. 22 | */ 23 | static final int LEXICAL_ERROR = 0; 24 | 25 | /** 26 | * An attempt was made to create a second instance of a static token manager. 27 | */ 28 | static final int STATIC_LEXER_ERROR = 1; 29 | 30 | /** 31 | * Tried to change to an invalid lexical state. 32 | */ 33 | static final int INVALID_LEXICAL_STATE = 2; 34 | 35 | /** 36 | * Detected (and bailed out of) an infinite loop in the token manager. 37 | */ 38 | static final int LOOP_DETECTED = 3; 39 | 40 | /** 41 | * Indicates the reason why the exception is thrown. It will have 42 | * one of the above 4 values. 43 | */ 44 | int errorCode; 45 | 46 | /** 47 | * Replaces unprintable characters by their escaped (or unicode escaped) 48 | * equivalents in the given string 49 | */ 50 | protected static final String addEscapes(String str) { 51 | StringBuffer retval = new StringBuffer(); 52 | char ch; 53 | for (int i = 0; i < str.length(); i++) { 54 | switch (str.charAt(i)) 55 | { 56 | case 0 : 57 | continue; 58 | case '\b': 59 | retval.append("\\b"); 60 | continue; 61 | case '\t': 62 | retval.append("\\t"); 63 | continue; 64 | case '\n': 65 | retval.append("\\n"); 66 | continue; 67 | case '\f': 68 | retval.append("\\f"); 69 | continue; 70 | case '\r': 71 | retval.append("\\r"); 72 | continue; 73 | case '\"': 74 | retval.append("\\\""); 75 | continue; 76 | case '\'': 77 | retval.append("\\\'"); 78 | continue; 79 | case '\\': 80 | retval.append("\\\\"); 81 | continue; 82 | default: 83 | if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { 84 | String s = "0000" + Integer.toString(ch, 16); 85 | retval.append("\\u" + s.substring(s.length() - 4, s.length())); 86 | } else { 87 | retval.append(ch); 88 | } 89 | continue; 90 | } 91 | } 92 | return retval.toString(); 93 | } 94 | 95 | /** 96 | * Returns a detailed message for the Error when it is thrown by the 97 | * token manager to indicate a lexical error. 98 | * Parameters : 99 | * EOFSeen : indicates if EOF caused the lexical error 100 | * curLexState : lexical state in which this error occurred 101 | * errorLine : line number when the error occurred 102 | * errorColumn : column number when the error occurred 103 | * errorAfter : prefix that was seen before this error occurred 104 | * curchar : the offending character 105 | * Note: You can customize the lexical error message by modifying this method. 106 | */ 107 | protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) { 108 | return("Lexical error at line " + 109 | errorLine + ", column " + 110 | errorColumn + ". Encountered: " + 111 | (EOFSeen ? " " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") + 112 | "after : \"" + addEscapes(errorAfter) + "\""); 113 | } 114 | 115 | /** 116 | * You can also modify the body of this method to customize your error messages. 117 | * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not 118 | * of end-users concern, so you can return something like : 119 | * 120 | * "Internal Error : Please file a bug report .... " 121 | * 122 | * from this method for such cases in the release version of your parser. 123 | */ 124 | public String getMessage() { 125 | return super.getMessage(); 126 | } 127 | 128 | /* 129 | * Constructors of various flavors follow. 130 | */ 131 | 132 | /** No arg constructor. */ 133 | public TokenMgrError() { 134 | } 135 | 136 | /** Constructor with message and reason. */ 137 | public TokenMgrError(String message, int reason) { 138 | super(message); 139 | errorCode = reason; 140 | } 141 | 142 | /** Full Constructor. */ 143 | public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) { 144 | this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason); 145 | } 146 | } 147 | /* JavaCC - OriginalChecksum=f8e322204aea25e2a19a3d08e48d0e45 (do not edit this line) */ 148 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | 5 | rest-command 6 | 0.7.1 7 | jar 8 | 9 | elasticsearch-rest-command 10 | Restful pipeline command support plugin for Elasticsearch 11 | http://maven.apache.org 12 | 13 | scm:git@github.com:huangchen007/elasticsearch-rest-command.git 14 | scm:git@github.com:huangchen007/elasticsearch-rest-command.git 15 | https://github.com/huangchen007/elasticsearch-rest-command 16 | 17 | 18 | 19 | org.sonatype.oss 20 | oss-parent 21 | 7 22 | 23 | 24 | 25 | 1.4.0 26 | UTF-8 27 | 4.10.2 28 | 29 | 30 | 31 | 32 | com.google.guava 33 | guava 34 | 18.0 35 | compile 36 | 37 | 38 | junit 39 | junit 40 | 4.10 41 | test 42 | 43 | 44 | 45 | com.fasterxml.jackson.dataformat 46 | jackson-dataformat-csv 47 | 2.3.3 48 | 49 | 50 | 51 | com.fasterxml.jackson.dataformat 52 | jackson-dataformat-csv 53 | 2.3.3 54 | sources 55 | 56 | 57 | org.apache.lucene 58 | lucene-core 59 | ${lucene.maven.version} 60 | compile 61 | 62 | 63 | org.elasticsearch 64 | elasticsearch 65 | ${elasticsearch.version} 66 | compile 67 | 68 | 69 | 70 | org.apache.httpcomponents 71 | httpclient 72 | 4.2.6 73 | 74 | 75 | org.apache.hadoop 76 | hadoop-client 77 | 2.4.1 78 | 79 | 80 | org.apache.spark 81 | spark-core_2.10 82 | 1.1.0 83 | 84 | 85 | org.apache.spark 86 | spark-core_2.10 87 | 1.1.0 88 | sources 89 | 90 | 91 | org.apache.spark 92 | spark-sql_2.10 93 | 1.1.0 94 | 95 | 96 | org.apache.spark 97 | spark-streaming_2.10 98 | 1.1.0 99 | 100 | 101 | org.apache.spark 102 | spark-streaming_2.10 103 | 1.1.0 104 | sources 105 | 106 | 107 | org.apache.spark 108 | spark-sql_2.10 109 | 1.1.0 110 | sources 111 | 112 | 113 | org.elasticsearch 114 | elasticsearch-spark_2.10 115 | 2.1.0.Beta2 116 | 117 | 118 | org.apache.spark 119 | spark-streaming-kafka_2.10 120 | 1.1.1 121 | 122 | 123 | 124 | 125 | 126 | src/main/resources 127 | true 128 | 129 | **/*.properties 130 | 131 | 132 | 133 | 134 | 135 | org.apache.maven.plugins 136 | maven-compiler-plugin 137 | 2.3.2 138 | 139 | 1.7 140 | 1.7 141 | 142 | 143 | 144 | 145 | 146 | maven-assembly-plugin 147 | 2.3 148 | 149 | ${project.build.directory}/releases/ 150 | 151 | ${basedir}/src/main/assemblies/plugin.xml 152 | 153 | 154 | 155 | 156 | package 157 | 158 | single 159 | 160 | 161 | 162 | 163 | 164 | 165 | com.everdata 166 | 167 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ##elasticsearch-rest-command 2 | Restful pipeline command support plugin for Elasticsearch http://www.elasticsearch.org 3 | 4 | ##语法详情请查看rest-command.ppt 5 | 6 | v0.7.0(2014年12月2日) 7 | 增加/_task接口和/_taskstatus/{taskid}接口,目的是可以使用spark引擎和sql语法来执行较长时间的数据计算。 8 | 9 | 常用计算参数有:esTable,parTable,sql,masterAddress,memory 10 | 常用结果集参数有: 11 | 默认结果集存储在es中: 12 | targetIndex:默认值为index-tasks 13 | targetType:默认值为type-XXXXXXXXXX 14 | 或者结果集存储在hdfs上: 15 | targetPar:指定的hdfs地址 16 | 17 | 示例如下: 18 | http://localhost:9200/_task?esTable=SOURCETYPE=cdr234 APN="CMNET" AND CID =41553&masterAddress=spark://evercloud134:7077&sql=select APN from esTable&memory=1g 19 | 20 | http://localhost:9200/_task?esTable=SOURCETYPE=cdr234%20APN=%22CMNET%22%20AND%20CID%20=41553&parTable=hdfs://192.168.200.134:9000/evercloud/dnsparquetbig&masterAddress=spark://spark-work2:7077&sql=select%20domain%20from%20parTable&memory=1g 21 | 22 | 23 | 24 | v0.6.2(2014年11月9日) 25 | 增加download2=true,导出的格式更加易于编程 26 | 27 | v0.6.1(2014年11月4日) 28 | 修订了一个bug:当LIMIT=-1时,排序不生效 29 | 30 | v0.6.0(2014年9月18日) 31 | 支持everdata0.5.0以上版本 32 | 为了统一语法,统计命令导出download参数取消 33 | 当LIMIT参数设置为-1时,LIMIT参数的值自动替换成基数估计出来值。 34 | 35 | v0.5.3(2014年8月1日) 36 | 为了统一语法,取消采用(+/-)号来标示排序,启用ASC/DESC关键字来做为排序的标识。 37 | 未特殊指定顺序或者倒序的,默认均为倒序。 38 | 如: 39 | STATS TIMESPAN=NA,1s COUNT(user) BY MSISDN ASC, postDate 40 | SORT user DESC 41 | 42 | 取消KEYORDER关键字,启用COUNTORDER关键字。默认均为按照字段排序,特殊指定COUNTORDER,才采用结果数排序。注意,仅有分组字段才有COUNTORDER这个特性。 43 | 如: 44 | STATS SUM(downflow) [ASC/DESC] BY MSISDN COUNTORDER 45 | 46 | 47 | 48 | v0.5.2(2014年7月28日) 49 | 统计命令支持分页。from和size参数 50 | 51 | 统计命令支持导出。download=true 52 | 当download=true时,LIMIT参数失效,LIMIT参数的值自动替换成基数估计出来值。 53 | 54 | STATS命令中的LIMIT参数生效作用域用逗号分隔 55 | 如:STATS LIMIT=100,200 SUM(downflow) BY MSISDN, IMEI 56 | 表示第一个分组域MSISDN的LIMIT的参数是100,第二个分组域IMEI的LIMIT的参数200 57 | 对于分组域为时间类型和间隔分组,LIMIT使用0作为占位符 58 | 如:STATS LIMIT=0,200 TIMESPAN=1s SUM(downflow) BY postDate, IMEI 59 | 60 | 61 | v0.5.1(2014年7月24日) 62 | 增加可以按照统计字段排序 63 | 如:SOURCETYPE=twitter | STATS SUM(downflow) [ASC/DESC] BY (+/-)MSISDN KEYORDER 64 | 65 | 增加可以按照分组字段排序 66 | 如:SOURCETYPE=twitter | STATS COUNT(user) BY (+/-)MSISDN KEYORDER 67 | +表示正序ASC, -表示倒序(默认)。KEYORDER表示使用MSISDN来排序,默认使用结果数来排序 68 | 69 | 增加可采用间隔作为分组条件,时间字段采用TIMESPAN限制,普通数字字段采用SPAN限制。若间隔字段在后,前面需要使用NA和0来占位。如下所示: 70 | 如:SOURCETYPE=twitter | STATS TIMESPAN=NA,1s COUNT(user) BY (+/-)MSISDN KEYORDER, postDate 71 | 如:SOURCETYPE=twitter | STATS SPAN=0,50 COUNT(user) BY (+/-)MSISDN KEYORDER, age 72 | 73 | 取消对TOP命令的支持,TOP语义可由STATS命令取代 74 | 75 | 76 | v0.4.0(2014年6月30日) 77 | 修改为所有命令都必须大写 78 | 修正返回字段缺少id信息的bug 79 | 80 | v0.3.6(2014年6月26日) 81 | 修正返回字段缺少id信息的bug 82 | 83 | v0.3.5(2014年6月26日) 84 | 修复join命令netty线程死锁bug 85 | 86 | v0.3.4(2014年6月16日) 87 | 修复了新版本中count统计选项不能正确返回结果的bug 88 | 新增table命令,过滤返回字段。 89 | 如:sourcetype=twitter | table user,comment 90 | 字段中支持通配符*来过滤,例如:sourcetype=twitter | table user*,comment* 91 | 92 | v0.3.2(2014年6月13日) 93 | 仅支持everdata-0.4.0 94 | search 支持多个haschild选项,这样支持针对parent表进行多个haschild条件的查询 95 | 新增stats命令去重dc统计选项 96 | 如:sourcetype=twitter | stats dc(msisdn) by domain 97 | 新增join命令 98 | 支持join ( subsearch ) 99 | 例子:index=comment foo | join user,name (search index=user) 100 | 101 | 102 | 修复统计报表中字段展示的bug 103 | 104 | v0.2.4(2014年5月20日) 105 | job endpoint支持timeline 106 | localhost:9200/jobs/299450.5410349557/timeline?interval=1s&timelineField=accessTime, 使用jobid取时间线结果 107 | 108 | v0.2.3(2014年5月8日) 109 | stats命令增加了limit选项 110 | 例如 stats limit=50 sum(upflow) by user 111 | index参数 112 | 过滤不存在的index,不然查询会失败 113 | 如果所有指定的index都不存在,那么将在“所有的index”针对条件进行查询 114 | 115 | v0.2.1(2014年4月29日) 116 | 修改了输出的json格式化,更加易读和简单。 117 | 输出如下: 118 | {"took":293,"total":1,"fields":["postDate","mm","_count"],"rows":[["1258294332000","3","3"]]} 119 | 新增job方式的访问 120 | 第一步:localhost:9200/_commandjob?q=,返回{"jobid":"299450.5410349557"} 121 | 第二步: 122 | localhost:9200/jobs/299450.5410349557/query?from=0&size=50, 使用jobid取查询结果 123 | localhost:9200/jobs/299450.5410349557/report?from=0&size=50, 使用jobid取统计结果 124 | 版本号的发布规定 125 | 当最后一个小版本号为奇数时,该版本为测试版,如v0.2.1 126 | 当最后一个小版本号为偶数时,该版本为稳定版,如v0.2.2 127 | 128 | v0.1.10(2014年4月25日) 129 | 新增命令选项 130 | Top和Stats均支持minicount选项 131 | 如:sourcetype=twitter | top minicount=3 user by pid 132 | 如:sourcetype=twitter | stats minicount=3 sum(upflow) by user 133 | 134 | V0.1.9 (2014年4月21日) 135 | 新增命令 136 | Sort(http://docs.splunk.com/Documentation/Splunk/6.0.3/SearchReference/Sort) 137 | 如:sourcetype=twitter | sort -user(-代表倒序,+代表正序) 138 | 139 | V0.1.6 (2014年4月15日) 140 | 新增命令 141 | Top(http://docs.splunk.com/Documentation/Splunk/6.0.2/SearchReference/Top) 142 | 如:sourcetype=twitter | top limit=10 user 143 | 144 | V0.1.5(2014年4月10日) 145 | 新增命令 146 | 新增stats命令,如stats count,sum() by fieldlist,完整命令如: sourcetype=twitter | stats count,sum(post_number) by user 147 | 支持简单脚本计算,如: sourcetype=twitter | stats count,sum("doc['upflow'].value+doc['downflow'].value") by user,postDate 148 | 149 | V0.1.1 (2014年4月4日) 150 | 新增命令 151 | 新增has_parent查询命令_command?q=hasparent=(sourcetype= ) 152 | 新增has_child查询命令_command?q=haschild=(sourcetype= ) 153 | 新增基于入库时间戳结果过滤命令_command?q=starttime= endtime= 154 | 155 | 新增查询参数 156 | 分页参数_command?from=&size= 157 | 导出模式参数_command?download= 158 | 159 | 新增command_head搜索UI 160 | 地址为_plugin/command_head 161 | 162 | 163 | V0.1.0 (2014年4月1日) 164 | 新增命令模式进行查询 165 | _command?q= 166 | 的语法可参考http://docs.splunk.com/Documentation/Splunk/6.0.2/SearchReference/Search 167 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/command/JoinQuery.java: -------------------------------------------------------------------------------- 1 | package com.everdata.command; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Arrays; 5 | import java.util.Collection; 6 | import java.util.HashMap; 7 | import java.util.HashSet; 8 | import java.util.Iterator; 9 | import java.util.Map; 10 | import java.util.Set; 11 | 12 | import org.elasticsearch.action.search.SearchRequestBuilder; 13 | import org.elasticsearch.action.search.SearchResponse; 14 | import org.elasticsearch.client.Client; 15 | import org.elasticsearch.common.logging.ESLogger; 16 | import org.elasticsearch.index.query.BoolQueryBuilder; 17 | import org.elasticsearch.index.query.QueryBuilder; 18 | import org.elasticsearch.index.query.QueryBuilders; 19 | import org.elasticsearch.search.SearchHit; 20 | import org.elasticsearch.search.SearchHits; 21 | 22 | import com.everdata.parser.AST_Search; 23 | 24 | public class JoinQuery { 25 | public static class Join { 26 | String[] fromFields; 27 | AST_Search searchTree; 28 | 29 | public Join(String[] fields, AST_Search search) { 30 | this.fromFields = fields; 31 | this.searchTree = search; 32 | } 33 | } 34 | 35 | public static Set getFieldsToBeChangeName(Collection first, Collection second, Collection join){ 36 | HashSet toBeChange = new HashSet(); 37 | //全部改名 38 | //toBeChange.addAll(first); 39 | toBeChange.addAll(second); 40 | 41 | toBeChange.removeAll(join); 42 | 43 | return toBeChange; 44 | } 45 | 46 | public static void changeDuplicateFieldName(Set tobeChangeFieldName, SearchHit joinHit){ 47 | for(String oldField: tobeChangeFieldName){ 48 | 49 | Object obj = joinHit.sourceAsMap().remove(oldField); 50 | joinHit.sourceAsMap().put(String.format("%s.%s", joinHit.type(),oldField), obj); 51 | 52 | } 53 | } 54 | 55 | public static HashMap> distinctRow(ArrayList> fromFieldsValue, String[] fields){ 56 | HashMap> distinct = new HashMap>(); 57 | for(Map row : fromFieldsValue){ 58 | StringBuilder dcKey = new StringBuilder(); 59 | for(String field: fields){ 60 | dcKey.append(row.get(field)); 61 | } 62 | distinct.put(dcKey.toString(), row); 63 | } 64 | return distinct; 65 | } 66 | 67 | 68 | 69 | public static void executeJoin(Join join, int size, 70 | ArrayList> fromFieldsValue, Client client, ESLogger logger) 71 | throws CommandException { 72 | 73 | 74 | if(fromFieldsValue.size() == 0) 75 | throw new CommandException("executeJoin error! fromFieldsValue==null raw resultset is null!"); 76 | 77 | // 生成joinFieldsQuery 78 | HashMap> dcRow = distinctRow(fromFieldsValue, join.fromFields); 79 | 80 | BoolQueryBuilder joinFieldsQuery = QueryBuilders.boolQuery(); 81 | 82 | if(join.fromFields.length == 1){ 83 | for(Map row : dcRow.values()){ 84 | joinFieldsQuery.should(QueryBuilders.termQuery(join.fromFields[0], row.get(join.fromFields[0]))); 85 | } 86 | 87 | }else{ 88 | for(Map row : dcRow.values()){ 89 | BoolQueryBuilder rowQuery = QueryBuilders.boolQuery(); 90 | for(int i = 0; i < join.fromFields.length; i++){ 91 | rowQuery.must(QueryBuilders.termQuery(join.fromFields[i], row.get(join.fromFields[i]))); 92 | } 93 | joinFieldsQuery.should(rowQuery); 94 | } 95 | } 96 | // 生成QueryBuilder 97 | join.searchTree.setJoinFieldsQuery(joinFieldsQuery); 98 | QueryBuilder joinQueryBuilder = join.searchTree.getQueryBuilder(); 99 | 100 | 101 | // 生成 102 | String[] indices = Search.parseIndices(join.searchTree, client); 103 | String[] sourceTypes = Search.parseTypes(join.searchTree); 104 | 105 | SearchRequestBuilder joinSearch = client.prepareSearch(indices).setTypes(sourceTypes).setFrom(0).setSize(size).setQuery(joinQueryBuilder); 106 | 107 | Search.dumpSearchScript(joinSearch, logger); 108 | SearchResponse joinResponse = joinSearch.execute().actionGet(); 109 | SearchHits joinHits = joinResponse.getHits(); 110 | 111 | if( joinHits.getTotalHits() == 0 ){ 112 | logger.info(String.format("join %s resultset is null", Arrays.toString(join.fromFields))); 113 | return; 114 | }else{ 115 | logger.info(String.format("join took %d", joinResponse.getTookInMillis())); 116 | } 117 | 118 | //search hits To hashmap,并改名 119 | //join前,求keyset的交集,求出有重名,但是不属于join范围的key 120 | //将后面的key改名,保证不会在join时被覆盖 121 | Set needToChangeName = getFieldsToBeChangeName(fromFieldsValue.get(0).keySet(), joinHits.getAt(0).sourceAsMap().keySet(), Arrays.asList(join.fromFields)); 122 | 123 | HashMap joinMap = new HashMap(); 124 | 125 | Iterator iterator = joinHits.iterator(); 126 | while(iterator.hasNext()){ 127 | SearchHit _hit = iterator.next(); 128 | 129 | StringBuilder key = new StringBuilder(); 130 | for(String field : join.fromFields){ 131 | if( field.equals("_id") ) 132 | key.append(_hit.getId()); 133 | else if(field.equals("_index")) 134 | key.append(_hit.index()); 135 | else if(field.equals("_type")) 136 | key.append(_hit.type()); 137 | else 138 | key.append(_hit.sourceAsMap().get(field)); 139 | } 140 | 141 | changeDuplicateFieldName(needToChangeName, _hit); 142 | joinMap.put(key.toString(), _hit); 143 | } 144 | 145 | logger.info(String.format("joinMap size %d", joinMap.size())); 146 | 147 | for(Map row : fromFieldsValue){ 148 | StringBuilder key = new StringBuilder(); 149 | for(String field : join.fromFields){ 150 | key.append(row.get(field)); 151 | } 152 | 153 | //join动作,求合集 154 | SearchHit joinHit = joinMap.get(key.toString()); 155 | if(joinHit != null) 156 | row.putAll(joinHit.sourceAsMap()); 157 | else 158 | logger.debug(String.format("joinMap.get(key.toString()) key = %s", key.toString())); 159 | } 160 | 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/ParseException.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JavaCC: Do not edit this line. ParseException.java Version 5.0 */ 2 | /* JavaCCOptions:KEEP_LINE_COL=null */ 3 | package com.everdata.parser; 4 | 5 | /** 6 | * This exception is thrown when parse errors are encountered. 7 | * You can explicitly create objects of this exception type by 8 | * calling the method generateParseException in the generated 9 | * parser. 10 | * 11 | * You can modify this class to customize your error reporting 12 | * mechanisms so long as you retain the public fields. 13 | */ 14 | public class ParseException extends Exception { 15 | 16 | /** 17 | * The version identifier for this Serializable class. 18 | * Increment only if the serialized form of the 19 | * class changes. 20 | */ 21 | private static final long serialVersionUID = 1L; 22 | 23 | /** 24 | * This constructor is used by the method "generateParseException" 25 | * in the generated parser. Calling this constructor generates 26 | * a new object of this type with the fields "currentToken", 27 | * "expectedTokenSequences", and "tokenImage" set. 28 | */ 29 | public ParseException(Token currentTokenVal, 30 | int[][] expectedTokenSequencesVal, 31 | String[] tokenImageVal 32 | ) 33 | { 34 | super(initialise(currentTokenVal, expectedTokenSequencesVal, tokenImageVal)); 35 | currentToken = currentTokenVal; 36 | expectedTokenSequences = expectedTokenSequencesVal; 37 | tokenImage = tokenImageVal; 38 | } 39 | 40 | /** 41 | * The following constructors are for use by you for whatever 42 | * purpose you can think of. Constructing the exception in this 43 | * manner makes the exception behave in the normal way - i.e., as 44 | * documented in the class "Throwable". The fields "errorToken", 45 | * "expectedTokenSequences", and "tokenImage" do not contain 46 | * relevant information. The JavaCC generated code does not use 47 | * these constructors. 48 | */ 49 | 50 | public ParseException() { 51 | super(); 52 | } 53 | 54 | /** Constructor with message. */ 55 | public ParseException(String message) { 56 | super(message); 57 | } 58 | 59 | 60 | /** 61 | * This is the last token that has been consumed successfully. If 62 | * this object has been created due to a parse error, the token 63 | * followng this token will (therefore) be the first error token. 64 | */ 65 | public Token currentToken; 66 | 67 | /** 68 | * Each entry in this array is an array of integers. Each array 69 | * of integers represents a sequence of tokens (by their ordinal 70 | * values) that is expected at this point of the parse. 71 | */ 72 | public int[][] expectedTokenSequences; 73 | 74 | /** 75 | * This is a reference to the "tokenImage" array of the generated 76 | * parser within which the parse error occurred. This array is 77 | * defined in the generated ...Constants interface. 78 | */ 79 | public String[] tokenImage; 80 | 81 | /** 82 | * It uses "currentToken" and "expectedTokenSequences" to generate a parse 83 | * error message and returns it. If this object has been created 84 | * due to a parse error, and you do not catch it (it gets thrown 85 | * from the parser) the correct error message 86 | * gets displayed. 87 | */ 88 | private static String initialise(Token currentToken, 89 | int[][] expectedTokenSequences, 90 | String[] tokenImage) { 91 | String eol = System.getProperty("line.separator", "\n"); 92 | StringBuffer expected = new StringBuffer(); 93 | int maxSize = 0; 94 | for (int i = 0; i < expectedTokenSequences.length; i++) { 95 | if (maxSize < expectedTokenSequences[i].length) { 96 | maxSize = expectedTokenSequences[i].length; 97 | } 98 | for (int j = 0; j < expectedTokenSequences[i].length; j++) { 99 | expected.append(tokenImage[expectedTokenSequences[i][j]]).append(' '); 100 | } 101 | if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) { 102 | expected.append("..."); 103 | } 104 | expected.append(eol).append(" "); 105 | } 106 | String retval = "Encountered \""; 107 | Token tok = currentToken.next; 108 | for (int i = 0; i < maxSize; i++) { 109 | if (i != 0) retval += " "; 110 | if (tok.kind == 0) { 111 | retval += tokenImage[0]; 112 | break; 113 | } 114 | retval += " " + tokenImage[tok.kind]; 115 | retval += " \""; 116 | retval += add_escapes(tok.image); 117 | retval += " \""; 118 | tok = tok.next; 119 | } 120 | retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn; 121 | retval += "." + eol; 122 | if (expectedTokenSequences.length == 1) { 123 | retval += "Was expecting:" + eol + " "; 124 | } else { 125 | retval += "Was expecting one of:" + eol + " "; 126 | } 127 | retval += expected.toString(); 128 | return retval; 129 | } 130 | 131 | /** 132 | * The end of line string for this machine. 133 | */ 134 | protected String eol = System.getProperty("line.separator", "\n"); 135 | 136 | /** 137 | * Used to convert raw characters to their escaped version 138 | * when these raw version cannot be used as part of an ASCII 139 | * string literal. 140 | */ 141 | static String add_escapes(String str) { 142 | StringBuffer retval = new StringBuffer(); 143 | char ch; 144 | for (int i = 0; i < str.length(); i++) { 145 | switch (str.charAt(i)) 146 | { 147 | case 0 : 148 | continue; 149 | case '\b': 150 | retval.append("\\b"); 151 | continue; 152 | case '\t': 153 | retval.append("\\t"); 154 | continue; 155 | case '\n': 156 | retval.append("\\n"); 157 | continue; 158 | case '\f': 159 | retval.append("\\f"); 160 | continue; 161 | case '\r': 162 | retval.append("\\r"); 163 | continue; 164 | case '\"': 165 | retval.append("\\\""); 166 | continue; 167 | case '\'': 168 | retval.append("\\\'"); 169 | continue; 170 | case '\\': 171 | retval.append("\\\\"); 172 | continue; 173 | default: 174 | if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { 175 | String s = "0000" + Integer.toString(ch, 16); 176 | retval.append("\\u" + s.substring(s.length() - 4, s.length())); 177 | } else { 178 | retval.append(ch); 179 | } 180 | continue; 181 | } 182 | } 183 | return retval.toString(); 184 | } 185 | 186 | } 187 | /* JavaCC - OriginalChecksum=c0712842013a5065df9502328ef96935 (do not edit this line) */ 188 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/plugin/rest/CommandRestHandler.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.plugin.rest; 2 | 3 | import java.io.IOException; 4 | import java.io.OutputStream; 5 | import java.util.HashMap; 6 | 7 | import org.elasticsearch.rest.BytesRestResponse; 8 | import org.elasticsearch.rest.RestChannel; 9 | import org.elasticsearch.rest.RestController; 10 | import org.elasticsearch.rest.RestRequest; 11 | import org.elasticsearch.rest.RestResponse; 12 | import org.elasticsearch.rest.RestStatus; 13 | import org.elasticsearch.action.deletebyquery.DeleteByQueryResponse; 14 | import org.elasticsearch.action.search.SearchResponse; 15 | import org.elasticsearch.client.Client; 16 | import org.elasticsearch.common.inject.Inject; 17 | import org.elasticsearch.common.settings.Settings; 18 | import org.elasticsearch.common.xcontent.XContent; 19 | import org.elasticsearch.common.xcontent.XContentBuilder; 20 | import org.elasticsearch.common.xcontent.XContentType; 21 | 22 | import static org.elasticsearch.rest.RestRequest.Method.GET; 23 | import static org.elasticsearch.rest.RestRequest.Method.POST; 24 | 25 | import org.elasticsearch.rest.BaseRestHandler; 26 | import org.elasticsearch.rest.action.support.RestBuilderListener; 27 | import org.elasticsearch.rest.support.RestUtils; 28 | 29 | import com.everdata.command.CommandException; 30 | import com.everdata.command.ReportResponse; 31 | import com.everdata.command.Search; 32 | import com.everdata.command.Search.QueryResponse; 33 | import com.everdata.parser.AST_Start; 34 | import com.everdata.parser.CommandParser; 35 | import com.everdata.parser.ParseException; 36 | import com.everdata.xcontent.CsvXContent; 37 | 38 | public class CommandRestHandler extends BaseRestHandler { 39 | @Inject 40 | public CommandRestHandler(Settings settings, Client client, 41 | RestController controller) { 42 | super(settings, controller, client); 43 | controller.registerHandler(GET, "/_command", this); 44 | controller.registerHandler(POST, "/_command", this); 45 | } 46 | 47 | @Override 48 | public void handleRequest(RestRequest request, final RestChannel channel, Client client) { 49 | 50 | String command = ""; 51 | 52 | if (request.method() == RestRequest.Method.GET) 53 | command = request.param("q", ""); 54 | else { 55 | HashMap post = new HashMap(); 56 | RestUtils.decodeQueryString(request.content().toUtf8(), 0, post); 57 | if (post.containsKey("q")) { 58 | command = post.get("q"); 59 | } 60 | } 61 | 62 | if (command.length() == 0) { 63 | SendFailure(request, channel, new CommandException("命令为空")); 64 | return; 65 | } else { 66 | if (!command.startsWith(Search.PREFIX_SEARCH_STRING)) 67 | command = Search.PREFIX_SEARCH_STRING + " " + command; 68 | } 69 | logger.info(command); 70 | 71 | final int from = request.paramAsInt("from", 0); 72 | final int size = request.paramAsInt("size", 10); 73 | final String format = request.param("format", "json"); 74 | final boolean download = request.paramAsBoolean("download", false); 75 | final boolean download2 = request.paramAsBoolean("download2", false); 76 | final boolean showMeta = request.paramAsBoolean("showMeta", true); 77 | 78 | XContent xContent = XContentType.JSON.xContent(); 79 | 80 | if (format.equalsIgnoreCase("csv")) 81 | xContent = CsvXContent.csvXContent; 82 | 83 | final Search search; 84 | 85 | try { 86 | 87 | CommandParser parser = new CommandParser(command); 88 | 89 | AST_Start.dumpWithLogger(logger, parser.getInnerTree(), ""); 90 | 91 | search = new Search(parser, client, logger); 92 | 93 | } catch (CommandException e2) { 94 | SendFailure(request, channel, e2); 95 | return; 96 | } catch (ParseException e1) { 97 | SendFailure(request, channel, e1); 98 | return; 99 | } catch (IOException e) { 100 | SendFailure(request, channel, e); 101 | return; 102 | } 103 | 104 | if (request.paramAsBoolean("delete", false)) { 105 | search.executeDelete(new RestBuilderListener( 106 | channel) { 107 | @Override 108 | public RestResponse buildResponse(DeleteByQueryResponse result, 109 | XContentBuilder builder) throws Exception { 110 | search.buildDelete(builder, result); 111 | return new BytesRestResponse(result.status(), builder); 112 | } 113 | }); 114 | return; 115 | } 116 | 117 | if (request.paramAsBoolean("query", true)) { 118 | if (download || download2) { 119 | 120 | search.executeDownload(new OutputStream() { 121 | byte[] innerBuffer = new byte[1200]; 122 | int idx = 0; 123 | 124 | @Override 125 | public void write(int b) throws IOException { 126 | innerBuffer[idx++] = (byte) b; 127 | if (idx == innerBuffer.length) { 128 | /* 129 | channel.sendContinuousBytes(innerBuffer, 0, idx, 130 | false); 131 | */ 132 | idx = 0; 133 | } 134 | } 135 | 136 | @Override 137 | public void close() throws IOException { 138 | /* 139 | if (idx > 0) 140 | channel.sendContinuousBytes(innerBuffer, 0, idx, true); 141 | else 142 | channel.sendContinuousBytes(null, 0, 0, true); 143 | */ 144 | } 145 | 146 | }, xContent, download2); 147 | 148 | } else if (search.joinSearchs.size() > 0) { 149 | 150 | search.executeQuery(new RestBuilderListener( 151 | channel) { 152 | @Override 153 | public RestResponse buildResponse(QueryResponse result, 154 | XContentBuilder builder) throws Exception { 155 | Search.buildQuery(from, builder, result, logger, search.tableFieldNames, showMeta); 156 | return new BytesRestResponse(RestStatus.OK, builder); 157 | } 158 | }, from, size, new String[0]); 159 | } else { 160 | search.executeQueryWithNonJoin( 161 | new RestBuilderListener(channel) { 162 | @Override 163 | public RestResponse buildResponse( 164 | SearchResponse result, 165 | XContentBuilder builder) throws Exception { 166 | Search.buildQuery(from, builder, result, logger, search.tableFieldNames, showMeta); 167 | return new BytesRestResponse(RestStatus.OK, 168 | builder); 169 | } 170 | }, from, size, new String[0]); 171 | } 172 | } else { 173 | final ReportResponse result = new ReportResponse(); 174 | result.bucketFields = search.bucketFields; 175 | result.statsFields = search.statsFields; 176 | 177 | search.executeReport(new RestBuilderListener( 178 | channel) { 179 | @Override 180 | public RestResponse buildResponse(SearchResponse response, 181 | XContentBuilder builder) throws Exception { 182 | result.response = response; 183 | Search.buildReport(from, size, builder, result, logger); 184 | return new BytesRestResponse(response.status(), builder); 185 | } 186 | 187 | }, from, size); 188 | } 189 | 190 | } 191 | 192 | public void SendFailure(RestRequest request, RestChannel channel, 193 | Throwable e) { 194 | try { 195 | channel.sendResponse(new BytesRestResponse(channel, e)); 196 | } catch (IOException e1) { 197 | logger.error("Failed to send failure response", e1); 198 | } 199 | } 200 | 201 | } 202 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/CommandParserConstants.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree&JavaCC: Do not edit this line. CommandParserConstants.java */ 2 | package com.everdata.parser; 3 | 4 | 5 | /** 6 | * Token literal values and constants. 7 | * Generated by org.javacc.parser.OtherFilesGen#start() 8 | */ 9 | public interface CommandParserConstants { 10 | 11 | /** End of File. */ 12 | int EOF = 0; 13 | /** RegularExpression Id. */ 14 | int K_AND = 6; 15 | /** RegularExpression Id. */ 16 | int K_SEARCH = 7; 17 | /** RegularExpression Id. */ 18 | int K_COUNTFIELD = 8; 19 | /** RegularExpression Id. */ 20 | int K_LIMIT = 9; 21 | /** RegularExpression Id. */ 22 | int K_OTHERSTR = 10; 23 | /** RegularExpression Id. */ 24 | int K_PERCENTFIELD = 11; 25 | /** RegularExpression Id. */ 26 | int K_SHOWCOUNT = 12; 27 | /** RegularExpression Id. */ 28 | int K_SHOWPERC = 13; 29 | /** RegularExpression Id. */ 30 | int K_USEOTHER = 14; 31 | /** RegularExpression Id. */ 32 | int K_TIMEFORMAT = 15; 33 | /** RegularExpression Id. */ 34 | int K_STARTTIME = 16; 35 | /** RegularExpression Id. */ 36 | int K_ENDTIME = 17; 37 | /** RegularExpression Id. */ 38 | int K_EARLIEST = 18; 39 | /** RegularExpression Id. */ 40 | int K_LATEST = 19; 41 | /** RegularExpression Id. */ 42 | int K_MINCOUNT = 20; 43 | /** RegularExpression Id. */ 44 | int K_SOURCETYPE = 21; 45 | /** RegularExpression Id. */ 46 | int K_TOP = 22; 47 | /** RegularExpression Id. */ 48 | int K_BY = 23; 49 | /** RegularExpression Id. */ 50 | int K_CREATE = 24; 51 | /** RegularExpression Id. */ 52 | int K_DELETE = 25; 53 | /** RegularExpression Id. */ 54 | int K_DESC = 26; 55 | /** RegularExpression Id. */ 56 | int K_ASC = 27; 57 | /** RegularExpression Id. */ 58 | int K_DESCRIBE = 28; 59 | /** RegularExpression Id. */ 60 | int K_DISTINCT = 29; 61 | /** RegularExpression Id. */ 62 | int K_DROP = 30; 63 | /** RegularExpression Id. */ 64 | int K_EXPLAIN = 31; 65 | /** RegularExpression Id. */ 66 | int K_HELP = 32; 67 | /** RegularExpression Id. */ 68 | int K_FLOAT = 33; 69 | /** RegularExpression Id. */ 70 | int K_FROM = 34; 71 | /** RegularExpression Id. */ 72 | int K_INDEX = 35; 73 | /** RegularExpression Id. */ 74 | int K_INSERT = 36; 75 | /** RegularExpression Id. */ 76 | int K_INTEGER = 37; 77 | /** RegularExpression Id. */ 78 | int K_INTO = 38; 79 | /** RegularExpression Id. */ 80 | int K_ON = 39; 81 | /** RegularExpression Id. */ 82 | int K_OR = 40; 83 | /** RegularExpression Id. */ 84 | int K_NOT = 41; 85 | /** RegularExpression Id. */ 86 | int K_ORDER = 42; 87 | /** RegularExpression Id. */ 88 | int K_QUIT = 43; 89 | /** RegularExpression Id. */ 90 | int K_SELECT = 44; 91 | /** RegularExpression Id. */ 92 | int K_SET = 45; 93 | /** RegularExpression Id. */ 94 | int K_STRING = 46; 95 | /** RegularExpression Id. */ 96 | int K_STATS = 47; 97 | /** RegularExpression Id. */ 98 | int K_TABLE = 48; 99 | /** RegularExpression Id. */ 100 | int K_UPDATE = 49; 101 | /** RegularExpression Id. */ 102 | int K_VALUES = 50; 103 | /** RegularExpression Id. */ 104 | int K_WHERE = 51; 105 | /** RegularExpression Id. */ 106 | int K_HASCHILD = 52; 107 | /** RegularExpression Id. */ 108 | int K_HASPARENT = 53; 109 | /** RegularExpression Id. */ 110 | int K_SUM = 54; 111 | /** RegularExpression Id. */ 112 | int K_DC = 55; 113 | /** RegularExpression Id. */ 114 | int K_MIN = 56; 115 | /** RegularExpression Id. */ 116 | int K_MAX = 57; 117 | /** RegularExpression Id. */ 118 | int K_AVG = 58; 119 | /** RegularExpression Id. */ 120 | int K_EVAL = 59; 121 | /** RegularExpression Id. */ 122 | int K_COUNT = 60; 123 | /** RegularExpression Id. */ 124 | int K_AS = 61; 125 | /** RegularExpression Id. */ 126 | int K_REGEX = 62; 127 | /** RegularExpression Id. */ 128 | int K_SORT = 63; 129 | /** RegularExpression Id. */ 130 | int K_JOIN = 64; 131 | /** RegularExpression Id. */ 132 | int K_SPAN = 65; 133 | /** RegularExpression Id. */ 134 | int K_TIMESPAN = 66; 135 | /** RegularExpression Id. */ 136 | int K_COUNTORDER = 67; 137 | /** RegularExpression Id. */ 138 | int K_CARD = 68; 139 | /** RegularExpression Id. */ 140 | int O_EQ = 69; 141 | /** RegularExpression Id. */ 142 | int O_NEQ = 70; 143 | /** RegularExpression Id. */ 144 | int O_GT = 71; 145 | /** RegularExpression Id. */ 146 | int O_GTE = 72; 147 | /** RegularExpression Id. */ 148 | int O_LT = 73; 149 | /** RegularExpression Id. */ 150 | int O_LTE = 74; 151 | /** RegularExpression Id. */ 152 | int O_LPAREN = 75; 153 | /** RegularExpression Id. */ 154 | int O_RPAREN = 76; 155 | /** RegularExpression Id. */ 156 | int O_COMMA = 77; 157 | /** RegularExpression Id. */ 158 | int O_SEMI = 78; 159 | /** RegularExpression Id. */ 160 | int O_QUOTE = 79; 161 | /** RegularExpression Id. */ 162 | int O_VERTICAL = 80; 163 | /** RegularExpression Id. */ 164 | int S_INTEGER = 81; 165 | /** RegularExpression Id. */ 166 | int DIGIT = 82; 167 | /** RegularExpression Id. */ 168 | int S_FLOAT = 83; 169 | /** RegularExpression Id. */ 170 | int S_IDENTIFIER = 84; 171 | /** RegularExpression Id. */ 172 | int LETTER = 85; 173 | /** RegularExpression Id. */ 174 | int SPECIAL_CHAR = 86; 175 | /** RegularExpression Id. */ 176 | int CJK = 87; 177 | /** RegularExpression Id. */ 178 | int S_QUOTED_STRING = 88; 179 | /** RegularExpression Id. */ 180 | int S_SINGLE_QUOTED_STRING = 89; 181 | /** RegularExpression Id. */ 182 | int LINE_COMMENT = 90; 183 | /** RegularExpression Id. */ 184 | int MULTI_LINE_COMMENT = 91; 185 | 186 | /** Lexical state. */ 187 | int DEFAULT = 0; 188 | 189 | /** Literal token values. */ 190 | String[] tokenImage = { 191 | "", 192 | "\" \"", 193 | "\"\\f\"", 194 | "\"\\t\"", 195 | "\"\\r\"", 196 | "\"\\n\"", 197 | "\"AND\"", 198 | "\"SEARCH\"", 199 | "\"COUNTFIELD\"", 200 | "\"LIMIT\"", 201 | "\"OTHERSTR\"", 202 | "\"PERCENTFIELD\"", 203 | "\"SHOWCOUNT\"", 204 | "\"SHOWPERC\"", 205 | "\"USEROTHER\"", 206 | "\"TIMEFORMAT\"", 207 | "\"STARTTIME\"", 208 | "\"ENDTIME\"", 209 | "\"EARLIEST\"", 210 | "\"LATEST\"", 211 | "\"MINCOUNT\"", 212 | "\"SOURCETYPE\"", 213 | "\"TOP\"", 214 | "\"BY\"", 215 | "\"CREATE\"", 216 | "\"DELETE\"", 217 | "\"DESC\"", 218 | "\"ASC\"", 219 | "\"DESCRIBE\"", 220 | "\"DISTINCT\"", 221 | "\"DROP\"", 222 | "\"EXPLAIN\"", 223 | "\"HELP\"", 224 | "\"FLOAT\"", 225 | "\"FROM\"", 226 | "\"INDEX\"", 227 | "\"INSERT\"", 228 | "\"INTEGER\"", 229 | "\"INTO\"", 230 | "\"ON\"", 231 | "\"OR\"", 232 | "\"NOT\"", 233 | "\"ORDER\"", 234 | "\"QUIT\"", 235 | "\"SELECT\"", 236 | "\"SET\"", 237 | "\"STRING\"", 238 | "\"STATS\"", 239 | "\"TABLE\"", 240 | "\"UPDATE\"", 241 | "\"VALUES\"", 242 | "\"WHERE\"", 243 | "\"HASCHILD\"", 244 | "\"HASPARENT\"", 245 | "\"SUM\"", 246 | "\"DC\"", 247 | "\"MIN\"", 248 | "\"MAX\"", 249 | "\"AVG\"", 250 | "\"EVAL\"", 251 | "\"COUNT\"", 252 | "\"AS\"", 253 | "\"REGEX\"", 254 | "\"SORT\"", 255 | "\"JOIN\"", 256 | "\"SPAN\"", 257 | "\"TIMESPAN\"", 258 | "\"COUNTORDER\"", 259 | "\"CARD\"", 260 | "\"=\"", 261 | "\"!=\"", 262 | "\">\"", 263 | "\">=\"", 264 | "\"<\"", 265 | "\"<=\"", 266 | "\"(\"", 267 | "\")\"", 268 | "\",\"", 269 | "\";\"", 270 | "\"\\\"\"", 271 | "\"|\"", 272 | "", 273 | "", 274 | "", 275 | "", 276 | "", 277 | "", 278 | "", 279 | "", 280 | "", 281 | "", 282 | "", 283 | }; 284 | 285 | } 286 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/plugin/rest/TaskRestHandler.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.plugin.rest; 2 | 3 | import static org.elasticsearch.rest.RestRequest.Method.GET; 4 | import static org.elasticsearch.rest.RestRequest.Method.POST; 5 | 6 | import java.io.IOException; 7 | import java.security.InvalidParameterException; 8 | import java.util.ArrayList; 9 | 10 | 11 | 12 | import java.util.HashMap; 13 | import java.util.List; 14 | import java.util.Map; 15 | import java.util.concurrent.TimeUnit; 16 | 17 | import org.apache.spark.SparkConf; 18 | import org.apache.spark.api.java.JavaSparkContext; 19 | import org.apache.spark.sql.SchemaRDD; 20 | import org.apache.spark.sql.api.java.JavaSQLContext; 21 | import org.apache.spark.sql.api.java.JavaSchemaRDD; 22 | import org.apache.spark.storage.StorageLevel; 23 | import org.elasticsearch.client.Client; 24 | import org.elasticsearch.cluster.ClusterState; 25 | import org.elasticsearch.cluster.node.DiscoveryNode; 26 | import org.elasticsearch.common.cache.Cache; 27 | import org.elasticsearch.common.cache.CacheBuilder; 28 | import org.elasticsearch.common.inject.Inject; 29 | import org.elasticsearch.common.settings.Settings; 30 | import org.elasticsearch.hadoop.util.StringUtils; 31 | import org.elasticsearch.rest.BaseRestHandler; 32 | import org.elasticsearch.rest.BytesRestResponse; 33 | import org.elasticsearch.rest.RestChannel; 34 | import org.elasticsearch.rest.RestController; 35 | import org.elasticsearch.rest.RestRequest; 36 | import org.elasticsearch.rest.RestStatus; 37 | import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; 38 | 39 | import com.everdata.command.CommandException; 40 | import com.everdata.command.Search; 41 | import com.everdata.parser.CommandParser; 42 | import com.everdata.parser.ParseException; 43 | 44 | 45 | public class TaskRestHandler extends BaseRestHandler { 46 | static enum TaskStatus{ 47 | RUNNING,ERROR,DONE 48 | } 49 | 50 | static class TaskResponse{ 51 | 52 | public TaskStatus status = TaskStatus.RUNNING; 53 | public Exception e = null; 54 | public String task; 55 | } 56 | 57 | @Inject 58 | public TaskRestHandler(Settings settings, Client client, 59 | RestController controller) { 60 | super(settings, controller, client); 61 | this.client = client; 62 | controller.registerHandler(GET, "/_task", this); 63 | controller.registerHandler(POST, "/_task", this); 64 | controller.registerHandler(GET, "/_taskstatus/{taskid}", this); 65 | } 66 | 67 | private Client client; 68 | private static Cache taskResponse = CacheBuilder.newBuilder() 69 | .maximumSize(200) 70 | .expireAfterAccess(20, TimeUnit.MINUTES) 71 | .build(); 72 | 73 | @Override 74 | public void handleRequest(RestRequest request, RestChannel channel, Client arg2) 75 | throws Exception { 76 | 77 | 78 | final String taskid; 79 | 80 | if( request.param("taskid") != null){ 81 | taskid = request.param("taskid"); 82 | BytesRestResponse bytesRestResponse; 83 | if(taskResponse.getIfPresent(taskid) == null){ 84 | bytesRestResponse = new BytesRestResponse(RestStatus.OK, String.format("{\"status\":\"%s\",\"message\":\"%s\"}", "error", "taskid is not exist")); 85 | }else if(taskResponse.getIfPresent(taskid).status == TaskStatus.ERROR){ 86 | bytesRestResponse = new BytesRestResponse(RestStatus.OK, String.format("{\"status\":\"%s\",\"message\":\"%s\"}", "error", taskResponse.getIfPresent(taskid).e.toString())); 87 | }else if(taskResponse.getIfPresent(taskid).status == TaskStatus.RUNNING){ 88 | bytesRestResponse = new BytesRestResponse(RestStatus.OK, String.format("{\"status\":\"%s\"}", "running")); 89 | }else{ 90 | bytesRestResponse = new BytesRestResponse(RestStatus.OK, String.format("{\"status\":\"%s\",\"message\":\"%s\"}", "done", taskResponse.getIfPresent(taskid).task)); 91 | } 92 | 93 | channel.sendResponse(bytesRestResponse); 94 | return; 95 | } 96 | 97 | taskid = java.util.UUID.randomUUID().toString(); 98 | 99 | final String esTable = request.param("esTable", ""); 100 | final String esTable2 = request.param("esTable2", ""); 101 | 102 | final String parTable = request.param("parTable", ""); 103 | final String parTable2 = request.param("parTable2", ""); 104 | 105 | final String resultIndex = request.param("targetIndex", "index-tasks"); 106 | final String resultType = request.param("targetType", "type-"+taskid); 107 | 108 | final String targetPar = request.param("targetPar"); 109 | 110 | final String appName = request.param("appName", "appName-"+taskid); 111 | 112 | final String master = request.param("masterAddress", "local"); 113 | 114 | final String sql = request.param("sql", ""); 115 | 116 | final String memory = request.param("memory", "2g"); 117 | final String resultTable = String.format("%s/%s", resultIndex, resultType); 118 | //ES_RESOURCE_READ -> resource, ES_QUERY -> query 119 | final HashMap cfg = new HashMap(); 120 | 121 | cfg.put(org.elasticsearch.hadoop.cfg.ConfigurationOptions.ES_RESOURCE_WRITE, resultTable); 122 | ClusterState cs = client.admin().cluster().prepareState().execute() 123 | .actionGet().getState(); 124 | ArrayList nodesAddr = new ArrayList(); 125 | for(DiscoveryNode node :cs.nodes()){ 126 | nodesAddr.add(node.getHostAddress()); 127 | } 128 | String esNodes = StringUtils.concatenate( nodesAddr,","); 129 | logger.info(esNodes); 130 | cfg.put(org.elasticsearch.hadoop.cfg.ConfigurationOptions.ES_NODES, esNodes); 131 | 132 | taskResponse.put(taskid, new TaskResponse()); 133 | 134 | Thread exec = new Thread(new Runnable(){ 135 | 136 | @Override 137 | public void run() { 138 | try{ 139 | executeSparkSql( sql, esTable, esTable2, 140 | parTable, parTable2, 141 | targetPar, resultTable, appName, master , memory, cfg 142 | ); 143 | 144 | taskResponse.getIfPresent(taskid).status = TaskStatus.DONE; 145 | if(targetPar == null) 146 | taskResponse.getIfPresent(taskid).task = resultTable; 147 | else 148 | taskResponse.getIfPresent(taskid).task = targetPar; 149 | 150 | }catch(Exception e){ 151 | taskResponse.getIfPresent(taskid).status = TaskStatus.ERROR; 152 | taskResponse.getIfPresent(taskid).e = e; 153 | logger.error("executeSparkSql", e); 154 | 155 | } 156 | 157 | } 158 | 159 | }); 160 | 161 | exec.start(); 162 | 163 | 164 | channel.sendResponse(new BytesRestResponse(RestStatus.OK, String.format("{\"taskid\":\"%s\"}", taskid))); 165 | 166 | 167 | } 168 | 169 | static private void registerAndUnionTable(List rdds, String tableName ){ 170 | SchemaRDD wholeRdd = rdds.get(0); 171 | 172 | for(int idx = 1; idx < rdds.size(); idx++){ 173 | wholeRdd = wholeRdd.unionAll(rdds.get(idx)); 174 | } 175 | 176 | wholeRdd.registerTempTable(tableName); 177 | } 178 | 179 | final static private String COMMA = ";"; 180 | 181 | private void executeSparkSql(String sql, String esTable, String esTable2, 182 | String parTable, String parTable2, 183 | String targetPar, String resultTable, String appName, String master, String memory, Map cfg 184 | ) throws ParseException, CommandException, IOException{ 185 | 186 | if( sql.length() <= 0 ) 187 | throw new InvalidParameterException("sql String is null"); 188 | 189 | SparkConf sparkConf = new SparkConf().setAppName(appName).setMaster(master).set("spark.executor.memory", memory); 190 | 191 | JavaSparkContext ctx = new JavaSparkContext(sparkConf); 192 | 193 | try{ 194 | JavaSQLContext sqlContext = new JavaSQLContext(ctx); 195 | 196 | if(parTable.length() > 0){ 197 | String[] parTables = parTable.split(COMMA); 198 | 199 | List rdds = new ArrayList(); 200 | 201 | for(String parTablePath: parTables){ 202 | rdds.add(sqlContext.parquetFile(parTablePath).baseSchemaRDD()); 203 | } 204 | 205 | registerAndUnionTable(rdds, "parTable"); 206 | } 207 | 208 | if(parTable2.length() > 0){ 209 | String[] parTables = parTable.split(COMMA); 210 | 211 | List rdds = new ArrayList(); 212 | 213 | for(String parTablePath: parTables){ 214 | rdds.add(sqlContext.parquetFile(parTablePath).baseSchemaRDD()); 215 | } 216 | registerAndUnionTable(rdds, "parTable2"); 217 | } 218 | 219 | if(esTable.length() > 0){ 220 | getSchemaRDD(sqlContext, Search.PREFIX_SEARCH_STRING + " " + esTable).registerTempTable("esTable"); 221 | } 222 | 223 | if(esTable2.length() > 0){ 224 | getSchemaRDD(sqlContext, Search.PREFIX_SEARCH_STRING + " " + esTable2).registerTempTable("esTable2"); } 225 | 226 | JavaSchemaRDD results = sqlContext.sql(sql); 227 | 228 | if(targetPar == null) 229 | JavaEsSparkSQL.saveToEs(results, cfg); 230 | else 231 | results.saveAsParquetFile(targetPar); 232 | 233 | }catch(Exception e){ 234 | logger.error("error", e); 235 | throw e; 236 | }finally{ 237 | ctx.stop(); 238 | } 239 | 240 | 241 | } 242 | 243 | private JavaSchemaRDD getSchemaRDD(JavaSQLContext sqlContext, String command) throws ParseException, CommandException, IOException{ 244 | CommandParser parser = new CommandParser(command); 245 | 246 | Search search = new Search(parser, client, logger); 247 | 248 | if(search.indices.length == 0 || search.indices.length > 1) 249 | throw new InvalidParameterException(String.format("indices.length = %d", search.indices.length)); 250 | 251 | if(search.sourceTypes.length == 0 || search.sourceTypes.length > 1) 252 | throw new InvalidParameterException(String.format("sourceTypes.length = %d", search.sourceTypes.length)); 253 | 254 | String query = search.querySearch.toString(); 255 | 256 | return JavaEsSparkSQL.esRDD(sqlContext, String.format("%s/%s", search.indices[0], search.sourceTypes[0]), query); 257 | } 258 | 259 | } 260 | -------------------------------------------------------------------------------- /src/test/java/com/everdata/test/HttpHelper.java: -------------------------------------------------------------------------------- 1 | package com.everdata.test; 2 | 3 | 4 | import org.apache.http.HttpEntity; 5 | import org.apache.http.HttpResponse; 6 | import org.apache.http.HttpStatus; 7 | import org.apache.http.client.methods.HttpGet; 8 | import org.apache.http.client.methods.HttpPost; 9 | import org.apache.http.entity.StringEntity; 10 | import org.apache.http.impl.client.AbstractHttpClient; 11 | import org.apache.http.impl.client.DefaultHttpClient; 12 | 13 | import java.io.*; 14 | import java.net.HttpURLConnection; 15 | import java.net.MalformedURLException; 16 | import java.net.SocketException; 17 | import java.net.URL; 18 | import java.net.URLConnection; 19 | 20 | 21 | public class HttpHelper { 22 | 23 | public static String sendPostUrl(String url, String content) { 24 | try { 25 | DefaultHttpClient httpclient = new DefaultHttpClient(); 26 | HttpPost httppost = new HttpPost(url); 27 | // content = content.replaceAll("__", "_"); 28 | StringEntity reqEntity = new StringEntity(content, "UTF-8"); 29 | httppost.setEntity(reqEntity); 30 | 31 | HttpResponse response = httpclient.execute(httppost); 32 | 33 | StringBuffer buffer = new StringBuffer(); 34 | try { 35 | BufferedReader br = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), "UTF-8")); 36 | String temp; 37 | while ((temp = br.readLine()) != null) { 38 | buffer.append(temp); 39 | } 40 | } catch (Exception e) { 41 | e.printStackTrace(); 42 | } 43 | return buffer.toString(); 44 | 45 | } catch (Exception e) { 46 | e.printStackTrace(); 47 | } 48 | return ""; 49 | } 50 | 51 | public static String getHttpDownload(String url) { 52 | int nStartPos = 0; 53 | int nRead = 0; 54 | String sName = url.substring(url.lastIndexOf("/") + 1, url.length()); 55 | // String sPath = EsConfigUtil.getBasethPath() + File.separator + "down" + File.separator; 56 | String sPath = null; 57 | File file = new File(sPath); 58 | if (!file.exists()) { 59 | file.mkdirs(); 60 | } 61 | try { 62 | URL httpurl = new URL(url); 63 | //打开连接 64 | HttpURLConnection httpConnection = (HttpURLConnection) httpurl 65 | .openConnection(); 66 | //获得文件长度 67 | long nEndPos = getFileSize(url); 68 | RandomAccessFile oSavedFile = new RandomAccessFile(sPath + sName + "", "rw"); 69 | System.out.println("===" + sPath + sName); 70 | httpConnection.setRequestProperty("User-Agent", "Internet Explorer"); 71 | String sProperty = "bytes=" + nStartPos + "-"; 72 | //告诉服务器book.rar这个文件从nStartPos字节开始传 73 | httpConnection.setRequestProperty("RANGE", sProperty); 74 | System.out.println(sProperty); 75 | InputStream input = httpConnection.getInputStream(); 76 | byte[] b = new byte[1024]; 77 | //读取网络文件,写入指定的文件中 78 | while ((nRead = input.read(b, 0, 1024)) > 0 79 | && nStartPos < nEndPos) { 80 | oSavedFile.write(b, 0, nRead); 81 | nStartPos += nRead; 82 | } 83 | httpConnection.disconnect(); 84 | } catch (Exception e) { 85 | e.printStackTrace(); 86 | return ""; 87 | } 88 | return sName; 89 | } 90 | 91 | /** 92 | * 读取文件内容 93 | */ 94 | public static String getHttpDownload(String url, String baseFolder) { 95 | // TODO Auto-generated method stub 96 | byte buffer[] = null; 97 | 98 | int byteread = 0; 99 | int bytesum = 0; 100 | 101 | StringBuffer sb = new StringBuffer(); 102 | try { 103 | 104 | AbstractHttpClient httpclient = new DefaultHttpClient();// 创建一个客户端,类似打开一个浏览器 105 | httpclient.getParams().setParameter("http.connection.timeout", 100000); 106 | HttpGet get = new HttpGet(url);// 创建一个get方法,类似在浏览器地址栏中输入一个地址 107 | 108 | get.setHeader("Accept", "Accept text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"); 109 | get.setHeader("Accept-Language", "zh-cn,zh;q=0.5"); 110 | get.setHeader("Connection", "keep-alive"); 111 | get.setHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.66 Safari/537.36"); 112 | long t = System.currentTimeMillis(); 113 | HttpResponse httpResponse = httpclient.execute(get); 114 | System.out.println(url + "请求结束:" + (System.currentTimeMillis() - t)); 115 | int statusCode = httpResponse.getStatusLine() 116 | .getStatusCode(); 117 | if (HttpStatus.SC_OK == statusCode) { 118 | HttpEntity ent = httpResponse.getEntity(); 119 | buffer = new byte[1024]; 120 | InputStream in = ent.getContent(); 121 | // fo=new FileOutputStream(new File(baseFolder+File.separator+filename),true); 122 | while ((byteread = in.read(buffer)) != -1) { 123 | bytesum += byteread; 124 | sb.append(new String(buffer, 0, byteread, "UTF-8")); 125 | // fo.write(buffer, 0, byteread); 126 | } 127 | // fo.close(); 128 | 129 | 130 | } 131 | 132 | } catch (Exception e) { 133 | // TODO: handle exception 134 | e.printStackTrace(); 135 | } 136 | return sb.toString(); 137 | } 138 | 139 | /** 140 | * 下载文件 141 | */ 142 | public static String getHttpDownloadFile(String url, String baseFolder) { 143 | // TODO Auto-generated method stub 144 | byte buffer[] = null; 145 | 146 | int byteread = 0; 147 | int bytesum = 0; 148 | FileOutputStream fo = null; 149 | StringBuffer sb = new StringBuffer(); 150 | String fileName = System.currentTimeMillis() + ".json"; 151 | try { 152 | 153 | AbstractHttpClient httpclient = new DefaultHttpClient();// 创建一个客户端,类似打开一个浏览器 154 | httpclient.getParams().setParameter("http.connection.timeout", 100000); 155 | HttpGet get = new HttpGet(url);// 创建一个get方法,类似在浏览器地址栏中输入一个地址 156 | 157 | get.setHeader("Accept", "Accept text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"); 158 | get.setHeader("Accept-Language", "zh-cn,zh;q=0.5"); 159 | get.setHeader("Connection", "keep-alive"); 160 | get.setHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.66 Safari/537.36"); 161 | long t = System.currentTimeMillis(); 162 | HttpResponse httpResponse = httpclient.execute(get); 163 | System.out.println(url + "请求结束:" + (System.currentTimeMillis() - t)); 164 | int statusCode = httpResponse.getStatusLine() 165 | .getStatusCode(); 166 | if (HttpStatus.SC_OK == statusCode) { 167 | HttpEntity ent = httpResponse.getEntity(); 168 | buffer = new byte[1024]; 169 | InputStream in = ent.getContent(); 170 | fo = new FileOutputStream(new File(baseFolder + File.separator + fileName), true); 171 | while ((byteread = in.read(buffer)) != -1) { 172 | bytesum += byteread; 173 | // sb.append(new String(buffer, 0, byteread,"UTF-8")); 174 | fo.write(buffer, 0, byteread); 175 | } 176 | fo.close(); 177 | 178 | 179 | } 180 | 181 | } catch (Exception e) { 182 | // TODO: handle exception 183 | e.printStackTrace(); 184 | } 185 | return sb.toString(); 186 | } 187 | 188 | public static long getFileSize(String sURL) { 189 | int nFileLength = -1; 190 | try { 191 | URL url = new URL(sURL); 192 | HttpURLConnection httpConnection = (HttpURLConnection) url 193 | .openConnection(); 194 | // httpConnection.setRequestProperty("User-Agent", "Internet Explorer"); 195 | int responseCode = httpConnection.getResponseCode(); 196 | if (responseCode >= 400) { 197 | System.err.println("Error Code : " + responseCode); 198 | return -2; // -2 represent access is error 199 | } 200 | String sHeader; 201 | for (int i = 1; ; i++) { 202 | sHeader = httpConnection.getHeaderFieldKey(i); 203 | if (sHeader != null) { 204 | if (sHeader.equals("Content-Length")) { 205 | nFileLength = Integer.parseInt(httpConnection 206 | .getHeaderField(sHeader)); 207 | break; 208 | } 209 | } else 210 | break; 211 | } 212 | } catch (IOException e) { 213 | e.printStackTrace(); 214 | } catch (Exception e) { 215 | e.printStackTrace(); 216 | } 217 | System.out.println(nFileLength); 218 | return nFileLength; 219 | } 220 | 221 | public static String sendGetUrl(String host, String uri) { 222 | 223 | try { 224 | StringBuffer html = new StringBuffer(); 225 | URL url = new URL(host+uri); 226 | HttpURLConnection conn = (HttpURLConnection) url.openConnection(); 227 | 228 | 229 | conn.connect(); 230 | conn.setReadTimeout(500); 231 | 232 | //success_conn++ 233 | CommandActionTest.success_conn.incrementAndGet(); 234 | 235 | 236 | InputStreamReader isr = new InputStreamReader(conn.getInputStream(), "utf-8"); 237 | BufferedReader br = new BufferedReader(isr); 238 | String temp; 239 | while ((temp = br.readLine()) != null) { 240 | html.append(temp).append("\n"); 241 | } 242 | br.close(); 243 | isr.close(); 244 | 245 | conn.disconnect(); 246 | return html.toString(); 247 | } catch (SocketException e) { 248 | //System.out.println(); 249 | } catch (MalformedURLException e) { 250 | // TODO Auto-generated catch block 251 | e.printStackTrace(); 252 | } catch (IOException e) { 253 | // TODO Auto-generated catch block 254 | e.printStackTrace(); 255 | } 256 | return null; 257 | 258 | } 259 | 260 | public static String getRequest(String host) { 261 | return null; 262 | } 263 | } 264 | 265 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_Stats.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_Stats.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | import java.util.ArrayList; 6 | 7 | import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; 8 | import org.elasticsearch.search.aggregations.AggregationBuilder; 9 | import org.elasticsearch.search.aggregations.AggregationBuilders; 10 | import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram; 11 | import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram.Interval; 12 | import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramBuilder; 13 | import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; 14 | import org.elasticsearch.search.aggregations.bucket.histogram.HistogramBuilder; 15 | import org.elasticsearch.search.aggregations.bucket.terms.Terms; 16 | import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; 17 | import org.elasticsearch.search.aggregations.metrics.avg.AvgBuilder; 18 | import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityBuilder; 19 | import org.elasticsearch.search.aggregations.metrics.max.MaxBuilder; 20 | import org.elasticsearch.search.aggregations.metrics.min.MinBuilder; 21 | import org.elasticsearch.search.aggregations.metrics.sum.SumBuilder; 22 | import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountBuilder; 23 | 24 | 25 | import com.everdata.command.CommandException; 26 | import com.everdata.command.Field; 27 | import com.everdata.command.Function; 28 | 29 | public class AST_Stats extends SimpleNode { 30 | public static class Bucket{ 31 | public String bucketField; 32 | public boolean desc = true; 33 | public boolean keyOrder = false; 34 | public boolean script = false; 35 | 36 | public int type = 0; 37 | public static final int TERM = 0; 38 | public static final int TERMWITHCARD = 3; 39 | public static final int HISTOGRAM = 1; 40 | public static final int DATEHISTOGRAM = 2; 41 | } 42 | 43 | private ArrayList> buckets = null; 44 | AbstractAggregationBuilder[] internalReport = null; 45 | ArrayList bucketFields = new ArrayList(); 46 | ArrayList funcs = new ArrayList(); 47 | ArrayList mincounts = new ArrayList(); 48 | ArrayList limits = new ArrayList(); 49 | ArrayList timespans = new ArrayList(); 50 | ArrayList spans = new ArrayList(); 51 | //ArrayList statsFields = new ArrayList(); 52 | 53 | 54 | 55 | public ArrayList bucketFields(){ 56 | return bucketFields; 57 | } 58 | 59 | public ArrayList statsFields(){ 60 | return funcs; 61 | } 62 | 63 | public AST_Stats(int id) { 64 | super(id); 65 | } 66 | 67 | public AST_Stats(CommandParser p, int id) { 68 | super(p, id); 69 | } 70 | 71 | public static AbstractAggregationBuilder newCount(Function func) { 72 | ValueCountBuilder count; 73 | count = AggregationBuilders.count(Function.genStatField(func)) 74 | .field(func.field); 75 | 76 | return count; 77 | } 78 | 79 | public static AbstractAggregationBuilder newSum(Function func) { 80 | SumBuilder sum; 81 | if (func.fieldtype == Field.SCRIPT) 82 | sum = AggregationBuilders.sum(Function.genStatField(func)) 83 | .script(func.field); 84 | else 85 | sum = AggregationBuilders.sum(Function.genStatField(func)) 86 | .field(func.field); 87 | 88 | return sum; 89 | } 90 | 91 | public static AbstractAggregationBuilder newAvg(Function func) { 92 | AvgBuilder avg; 93 | if (func.fieldtype == Field.SCRIPT) 94 | avg = AggregationBuilders.avg(Function.genStatField(func)) 95 | .script(func.field); 96 | else 97 | avg = AggregationBuilders.avg(Function.genStatField(func)) 98 | .field(func.field); 99 | 100 | return avg; 101 | } 102 | 103 | public static AbstractAggregationBuilder newMin(Function func) { 104 | MinBuilder min; 105 | if (func.fieldtype == Field.SCRIPT) 106 | min = AggregationBuilders.min(Function.genStatField(func)) 107 | .script(func.field); 108 | else 109 | min = AggregationBuilders.min(Function.genStatField(func)) 110 | .field(func.field); 111 | return min; 112 | } 113 | 114 | public static AbstractAggregationBuilder newMax(Function func) { 115 | MaxBuilder max; 116 | if (func.fieldtype == Field.SCRIPT) 117 | max = AggregationBuilders.max(Function.genStatField(func)) 118 | .script(func.field); 119 | else 120 | max = AggregationBuilders.max(Function.genStatField(func)) 121 | .field(func.field); 122 | return max; 123 | } 124 | 125 | public static AbstractAggregationBuilder newCard(Function func) { 126 | CardinalityBuilder max; 127 | if (func.fieldtype == Field.SCRIPT) 128 | max = AggregationBuilders.cardinality(Function.genStatField(func)) 129 | .script(func.field); 130 | else 131 | max = AggregationBuilders.cardinality(Function.genStatField(func)) 132 | .field(func.field); 133 | return max; 134 | } 135 | 136 | public static TermsBuilder newTermsBucket(String name, int limit, String field, int mincount, boolean script) { 137 | TermsBuilder d = AggregationBuilders.terms(name).size(limit).minDocCount(mincount); 138 | 139 | return script? d.script(field): d.field(field); 140 | } 141 | 142 | public static DateHistogramBuilder newDateHistogram(String name, String field, String interval, int mincount, boolean script) { 143 | DateHistogramBuilder d = AggregationBuilders.dateHistogram(name).interval(new Interval(interval)).minDocCount(mincount); 144 | 145 | return script? d.script(field): d.field(field); 146 | } 147 | 148 | public static HistogramBuilder newHistogram(String name, String field, int interval, int mincount, boolean script) { 149 | HistogramBuilder d = AggregationBuilders.histogram(name).field(field).interval(interval).minDocCount(mincount); 150 | 151 | return script? d.script(field): d.field(field); 152 | } 153 | 154 | private void traverseAST() { 155 | 156 | for (Node n : children) { 157 | if (n instanceof AST_ByIdentList) { 158 | AST_ByIdentList byStmt = ((AST_ByIdentList) n); 159 | 160 | for(int idx = 0; idx < byStmt.byList.size(); idx++ ){ 161 | 162 | 163 | Bucket b = new Bucket(); 164 | 165 | 166 | b.bucketField = byStmt.byList.get(idx).name; 167 | b.desc = byStmt.byList.get(idx).desc; 168 | b.keyOrder = byStmt.byList.get(idx).keyorder; 169 | b.script = byStmt.byList.get(idx).script; 170 | bucketFields.add(b); 171 | } 172 | 173 | } else if (n instanceof AST_StatsFunc) { 174 | 175 | ((AST_StatsFunc) n).func.statsField = Function.genStatField( ((AST_StatsFunc) n).func ) ; 176 | funcs.add(((AST_StatsFunc) n).func); 177 | } 178 | } 179 | } 180 | 181 | public void setBucketLimit(int idx, long limit){ 182 | if(buckets.get(idx) instanceof TermsBuilder) 183 | ((TermsBuilder)buckets.get(idx)).size((int) limit); 184 | } 185 | 186 | public static final int DEFAULT_LIMIT = 50; 187 | public static final int DEFAULT_LIMIT_CARD = -1; 188 | public static final int DEFAULT_MINICOUNT = 1; 189 | 190 | private AbstractAggregationBuilder[] genAggregation() throws CommandException { 191 | 192 | traverseAST(); 193 | 194 | 195 | 196 | //生成bucket列表 197 | buckets = new ArrayList>(); 198 | 199 | for(int i = 0 ; i < bucketFields.size(); i++){ 200 | AggregationBuilder bucket = null; 201 | int mincount = DEFAULT_MINICOUNT; 202 | int limit = DEFAULT_LIMIT; 203 | if(limits.size() > i) 204 | limit = limits.get(i); 205 | if(mincounts.size() > i) 206 | mincount = mincounts.get(i); 207 | 208 | if( spans.size() > i && spans.get(i) != 0 ){ 209 | 210 | Histogram.Order order; 211 | 212 | order = (bucketFields.get(i).desc)?( 213 | bucketFields.get(i).keyOrder?Histogram.Order.KEY_DESC: Histogram.Order.COUNT_DESC 214 | ):( 215 | bucketFields.get(i).keyOrder?Histogram.Order.KEY_ASC:Histogram.Order.COUNT_ASC 216 | ); 217 | 218 | bucketFields.get(i).type = Bucket.DATEHISTOGRAM; 219 | 220 | bucket = newHistogram("statsWithBy", bucketFields.get(i).bucketField, spans.get(i), mincount,bucketFields.get(i).script).order(order); 221 | }else if( timespans.size() > i && !timespans.get(i).equals("NA") ){ 222 | 223 | DateHistogram.Order order; 224 | 225 | order =(bucketFields.get(i).desc)?( 226 | bucketFields.get(i).keyOrder?DateHistogram.Order.KEY_DESC:DateHistogram.Order.COUNT_DESC 227 | ):( 228 | bucketFields.get(i).keyOrder?DateHistogram.Order.KEY_ASC: DateHistogram.Order.COUNT_ASC 229 | ); 230 | bucketFields.get(i).type = Bucket.DATEHISTOGRAM; 231 | bucket = newDateHistogram("statsWithBy", bucketFields.get(i).bucketField, timespans.get(i), mincount, bucketFields.get(i).script).order(order); 232 | 233 | }else{ 234 | Terms.Order order; 235 | 236 | order = (bucketFields.get(i).desc)?( 237 | bucketFields.get(i).keyOrder? Terms.Order.term(false):Terms.Order.count(false) 238 | ):( 239 | bucketFields.get(i).keyOrder?Terms.Order.term(true):Terms.Order.count(true) 240 | ); 241 | 242 | 243 | bucketFields.get(i).type = (limit == DEFAULT_LIMIT_CARD)?Bucket.TERMWITHCARD:Bucket.TERM; 244 | 245 | bucket = newTermsBucket("statsWithBy", (limit == DEFAULT_LIMIT_CARD)?DEFAULT_LIMIT:limit, bucketFields.get(i).bucketField, mincount, bucketFields.get(i).script).order(order); 246 | } 247 | 248 | buckets.add(bucket); 249 | 250 | } 251 | ArrayList stats = new ArrayList(); 252 | 253 | //生成functions列表 254 | for (Function func : funcs) { 255 | AbstractAggregationBuilder function = null; 256 | switch (func.type) { 257 | case Function.COUNT: 258 | function = newCount(func); 259 | break; 260 | case Function.SUM: 261 | function = newSum(func); 262 | break; 263 | case Function.AVG: 264 | function = newAvg(func); 265 | break; 266 | case Function.MIN: 267 | function = newMin(func); 268 | break; 269 | case Function.MAX: 270 | function = newMax(func); 271 | break; 272 | case Function.DC: 273 | function = newCard(func); 274 | break; 275 | } 276 | stats.add(function); 277 | 278 | //需要根据统计字段排序 279 | if(func.order != 0 && buckets.size() > 0){ 280 | 281 | boolean asc = func.order == 1 ? true:false; 282 | 283 | switch(bucketFields.get(0).type){ 284 | case Bucket.TERM : 285 | case Bucket.TERMWITHCARD: 286 | ((TermsBuilder)buckets.get(0)).order(Terms.Order.aggregation(Function.genStatField(func), asc)); 287 | break; 288 | case Bucket.HISTOGRAM : 289 | ((HistogramBuilder)buckets.get(0)).order(Histogram.Order.aggregation(Function.genStatField(func), asc)); 290 | break; 291 | case Bucket.DATEHISTOGRAM : 292 | ((DateHistogramBuilder)buckets.get(0)).order(DateHistogram.Order.aggregation(Function.genStatField(func), asc)); 293 | break; 294 | } 295 | } 296 | } 297 | 298 | if(buckets.size() == 0){ 299 | return stats.toArray(new AbstractAggregationBuilder[stats.size()]); 300 | }else{ 301 | 302 | AggregationBuilder prevBucket = null; 303 | 304 | for(AggregationBuilder bucket: buckets){ 305 | if(prevBucket == null){ 306 | for(AbstractAggregationBuilder func: stats){ 307 | bucket.subAggregation(func); 308 | } 309 | }else{ 310 | bucket.subAggregation(prevBucket); 311 | } 312 | prevBucket = bucket; 313 | } 314 | 315 | AbstractAggregationBuilder[] bucketArray = new AbstractAggregationBuilder[1]; 316 | bucketArray[0] = prevBucket; 317 | 318 | return bucketArray; 319 | } 320 | 321 | 322 | } 323 | 324 | public AbstractAggregationBuilder[] getStats() throws CommandException { 325 | 326 | if (internalReport == null) 327 | internalReport = genAggregation(); 328 | 329 | return internalReport; 330 | } 331 | 332 | } 333 | /* 334 | * JavaCC - OriginalChecksum=663713222972f6d3f2e7821e2216d7f2 (do not edit this 335 | * line) 336 | */ 337 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/xcontent/CsvXContentGenerator.java: -------------------------------------------------------------------------------- 1 | package com.everdata.xcontent; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.io.OutputStream; 6 | 7 | import org.elasticsearch.common.bytes.BytesReference; 8 | import org.elasticsearch.common.io.Streams; 9 | import org.elasticsearch.common.xcontent.XContentFactory; 10 | import org.elasticsearch.common.xcontent.XContentGenerator; 11 | import org.elasticsearch.common.xcontent.XContentParser; 12 | import org.elasticsearch.common.xcontent.XContentString; 13 | import org.elasticsearch.common.xcontent.XContentType; 14 | 15 | import com.fasterxml.jackson.core.base.GeneratorBase; 16 | import com.fasterxml.jackson.core.io.SerializedString; 17 | import com.fasterxml.jackson.dataformat.csv.CsvGenerator; 18 | 19 | public class CsvXContentGenerator implements XContentGenerator { 20 | 21 | protected final CsvGenerator generator; 22 | private boolean writeLineFeedAtEnd; 23 | private final GeneratorBase base; 24 | 25 | public CsvXContentGenerator(CsvGenerator generator) { 26 | this.generator = generator; 27 | if (generator instanceof GeneratorBase) { 28 | base = (GeneratorBase) generator; 29 | } else { 30 | base = null; 31 | } 32 | 33 | } 34 | 35 | 36 | @Override 37 | public XContentType contentType() { 38 | 39 | return null; 40 | } 41 | 42 | @Override 43 | public void usePrettyPrint() { 44 | generator.useDefaultPrettyPrinter(); 45 | } 46 | 47 | @Override 48 | public void usePrintLineFeedAtEnd() { 49 | writeLineFeedAtEnd = true; 50 | } 51 | 52 | @Override 53 | public void writeStartArray() throws IOException { 54 | generator.writeStartArray(); 55 | } 56 | 57 | @Override 58 | public void writeEndArray() throws IOException { 59 | generator.writeEndArray(); 60 | } 61 | 62 | @Override 63 | public void writeStartObject() throws IOException { 64 | generator.writeStartObject(); 65 | } 66 | 67 | @Override 68 | public void writeEndObject() throws IOException { 69 | generator.writeEndObject(); 70 | } 71 | 72 | @Override 73 | public void writeFieldName(String name) throws IOException { 74 | generator.writeFieldName(name); 75 | } 76 | 77 | @Override 78 | public void writeFieldName(XContentString name) throws IOException { 79 | generator.writeFieldName(name.getValue()); 80 | } 81 | 82 | @Override 83 | public void writeString(String text) throws IOException { 84 | generator.writeString(text); 85 | } 86 | 87 | @Override 88 | public void writeString(char[] text, int offset, int len) throws IOException { 89 | generator.writeString(text, offset, len); 90 | } 91 | 92 | @Override 93 | public void writeUTF8String(byte[] text, int offset, int length) throws IOException { 94 | generator.writeUTF8String(text, offset, length); 95 | } 96 | 97 | @Override 98 | public void writeBinary(byte[] data, int offset, int len) throws IOException { 99 | generator.writeBinary(data, offset, len); 100 | } 101 | 102 | @Override 103 | public void writeBinary(byte[] data) throws IOException { 104 | generator.writeBinary(data); 105 | } 106 | 107 | @Override 108 | public void writeNumber(int v) throws IOException { 109 | generator.writeNumber(v); 110 | } 111 | 112 | @Override 113 | public void writeNumber(long v) throws IOException { 114 | generator.writeNumber(v); 115 | } 116 | 117 | @Override 118 | public void writeNumber(double d) throws IOException { 119 | generator.writeNumber(d); 120 | } 121 | 122 | @Override 123 | public void writeNumber(float f) throws IOException { 124 | generator.writeNumber(f); 125 | } 126 | 127 | @Override 128 | public void writeBoolean(boolean state) throws IOException { 129 | generator.writeBoolean(state); 130 | } 131 | 132 | @Override 133 | public void writeNull() throws IOException { 134 | generator.writeNull(); 135 | } 136 | 137 | @Override 138 | public void writeStringField(String fieldName, String value) throws IOException { 139 | generator.writeStringField(fieldName, value); 140 | } 141 | 142 | @Override 143 | public void writeStringField(XContentString fieldName, String value) throws IOException { 144 | generator.writeFieldName(fieldName.getValue()); 145 | generator.writeString(value); 146 | } 147 | 148 | @Override 149 | public void writeBooleanField(String fieldName, boolean value) throws IOException { 150 | generator.writeBooleanField(fieldName, value); 151 | } 152 | 153 | @Override 154 | public void writeBooleanField(XContentString fieldName, boolean value) throws IOException { 155 | generator.writeFieldName(fieldName.getValue()); 156 | generator.writeBoolean(value); 157 | } 158 | 159 | @Override 160 | public void writeNullField(String fieldName) throws IOException { 161 | generator.writeNullField(fieldName); 162 | } 163 | 164 | @Override 165 | public void writeNullField(XContentString fieldName) throws IOException { 166 | generator.writeFieldName(fieldName.getValue()); 167 | generator.writeNull(); 168 | } 169 | 170 | @Override 171 | public void writeNumberField(String fieldName, int value) throws IOException { 172 | generator.writeNumberField(fieldName, value); 173 | } 174 | 175 | @Override 176 | public void writeNumberField(XContentString fieldName, int value) throws IOException { 177 | generator.writeFieldName(fieldName.getValue()); 178 | generator.writeNumber(value); 179 | } 180 | 181 | @Override 182 | public void writeNumberField(String fieldName, long value) throws IOException { 183 | generator.writeNumberField(fieldName, value); 184 | } 185 | 186 | @Override 187 | public void writeNumberField(XContentString fieldName, long value) throws IOException { 188 | generator.writeFieldName(fieldName.getValue()); 189 | generator.writeNumber(value); 190 | } 191 | 192 | @Override 193 | public void writeNumberField(String fieldName, double value) throws IOException { 194 | generator.writeNumberField(fieldName, value); 195 | } 196 | 197 | @Override 198 | public void writeNumberField(XContentString fieldName, double value) throws IOException { 199 | generator.writeFieldName(fieldName.getValue()); 200 | generator.writeNumber(value); 201 | } 202 | 203 | @Override 204 | public void writeNumberField(String fieldName, float value) throws IOException { 205 | generator.writeNumberField(fieldName, value); 206 | } 207 | 208 | @Override 209 | public void writeNumberField(XContentString fieldName, float value) throws IOException { 210 | generator.writeFieldName(fieldName.getValue()); 211 | generator.writeNumber(value); 212 | } 213 | 214 | @Override 215 | public void writeBinaryField(String fieldName, byte[] data) throws IOException { 216 | generator.writeBinaryField(fieldName, data); 217 | } 218 | 219 | @Override 220 | public void writeBinaryField(XContentString fieldName, byte[] value) throws IOException { 221 | generator.writeFieldName(fieldName.getValue()); 222 | generator.writeBinary(value); 223 | } 224 | 225 | @Override 226 | public void writeArrayFieldStart(String fieldName) throws IOException { 227 | generator.writeArrayFieldStart(fieldName); 228 | } 229 | 230 | @Override 231 | public void writeArrayFieldStart(XContentString fieldName) throws IOException { 232 | generator.writeFieldName(fieldName.getValue()); 233 | generator.writeStartArray(); 234 | } 235 | 236 | @Override 237 | public void writeObjectFieldStart(String fieldName) throws IOException { 238 | generator.writeObjectFieldStart(fieldName); 239 | } 240 | 241 | @Override 242 | public void writeObjectFieldStart(XContentString fieldName) throws IOException { 243 | generator.writeFieldName(fieldName.getValue()); 244 | generator.writeStartObject(); 245 | } 246 | 247 | @Override 248 | public void writeRawField(String fieldName, byte[] content, OutputStream bos) throws IOException { 249 | generator.writeFieldName(fieldName); 250 | generator.writeRaw(':'); 251 | flush(); 252 | bos.write(content); 253 | finishWriteRaw(); 254 | } 255 | 256 | @Override 257 | public void writeRawField(String fieldName, byte[] content, int offset, int length, OutputStream bos) throws IOException { 258 | generator.writeFieldName(fieldName); 259 | generator.writeRaw(':'); 260 | flush(); 261 | bos.write(content, offset, length); 262 | finishWriteRaw(); 263 | } 264 | 265 | @Override 266 | public void writeRawField(String fieldName, InputStream content, OutputStream bos) throws IOException { 267 | generator.writeFieldName(fieldName); 268 | generator.writeRaw(':'); 269 | flush(); 270 | Streams.copy(content, bos); 271 | finishWriteRaw(); 272 | } 273 | 274 | @Override 275 | public final void writeRawField(String fieldName, BytesReference content, OutputStream bos) throws IOException { 276 | XContentType contentType = XContentFactory.xContentType(content); 277 | if (contentType != null) { 278 | writeObjectRaw(fieldName, content, bos); 279 | } else { 280 | writeFieldName(fieldName); 281 | // we could potentially optimize this to not rely on exception logic... 282 | String sValue = content.toUtf8(); 283 | try { 284 | writeNumber(Long.parseLong(sValue)); 285 | } catch (NumberFormatException e) { 286 | try { 287 | writeNumber(Double.parseDouble(sValue)); 288 | } catch (NumberFormatException e1) { 289 | writeString(sValue); 290 | } 291 | } 292 | } 293 | } 294 | 295 | protected void writeObjectRaw(String fieldName, BytesReference content, OutputStream bos) throws IOException { 296 | generator.writeFieldName(fieldName); 297 | generator.writeRaw(':'); 298 | flush(); 299 | content.writeTo(bos); 300 | finishWriteRaw(); 301 | } 302 | 303 | private void finishWriteRaw() { 304 | assert base != null : "CsvGenerator should be of instance GeneratorBase but was: " + generator.getClass(); 305 | if (base != null) { 306 | base.getOutputContext().writeValue(); 307 | } 308 | } 309 | 310 | @Override 311 | public void copyCurrentStructure(XContentParser parser) throws IOException { 312 | 313 | throw new IOException("copyCurrentStructure unsupport method,really need this method?"); 314 | // the start of the parser 315 | /* 316 | if (parser.currentToken() == null) { 317 | parser.nextToken(); 318 | } 319 | if (parser instanceof CsvXContentParser) { 320 | generator.copyCurrentStructure(((CsvXContentParser) parser).parser); 321 | } else { 322 | XContentHelper.copyCurrentStructure(this, parser); 323 | } 324 | */ 325 | } 326 | 327 | @Override 328 | public void flush() throws IOException { 329 | generator.flush(); 330 | } 331 | 332 | @Override 333 | public void close() throws IOException { 334 | if (generator.isClosed()) { 335 | return; 336 | } 337 | if (writeLineFeedAtEnd) { 338 | flush(); 339 | generator.writeRaw(LF); 340 | } 341 | generator.close(); 342 | } 343 | 344 | private static final SerializedString LF = new SerializedString("\n"); 345 | 346 | } 347 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/plugin/rest/JobRestHandler.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.plugin.rest; 2 | 3 | 4 | import java.io.IOException; 5 | import java.util.HashMap; 6 | import java.util.List; 7 | import java.util.Random; 8 | import java.util.concurrent.TimeUnit; 9 | 10 | import org.elasticsearch.rest.BytesRestResponse; 11 | import org.elasticsearch.rest.RestChannel; 12 | import org.elasticsearch.rest.RestController; 13 | import org.elasticsearch.rest.RestRequest; 14 | import org.elasticsearch.rest.RestResponse; 15 | import org.elasticsearch.rest.RestStatus; 16 | import org.elasticsearch.action.ActionListener; 17 | import org.elasticsearch.action.search.SearchResponse; 18 | import org.elasticsearch.client.Client; 19 | import org.elasticsearch.common.cache.Cache; 20 | import org.elasticsearch.common.cache.CacheBuilder; 21 | import org.elasticsearch.common.inject.Inject; 22 | import org.elasticsearch.common.settings.Settings; 23 | import org.elasticsearch.common.xcontent.XContentBuilder; 24 | 25 | import static org.elasticsearch.rest.RestRequest.Method.GET; 26 | import static org.elasticsearch.rest.RestRequest.Method.POST; 27 | 28 | import org.elasticsearch.rest.BaseRestHandler; 29 | import org.elasticsearch.rest.action.support.RestBuilderListener; 30 | import org.elasticsearch.rest.support.RestUtils; 31 | 32 | import com.everdata.command.CommandException; 33 | import com.everdata.command.ReportResponse; 34 | import com.everdata.command.Search; 35 | import com.everdata.command.Search.QueryResponse; 36 | import com.everdata.parser.AST_Start; 37 | import com.everdata.parser.CommandParser; 38 | 39 | public class JobRestHandler extends BaseRestHandler { 40 | 41 | //jobid -> Search 42 | //jobid+from+size -> Query SearchResponse 43 | //jobid+from+size -> Report SearchResponse 44 | 45 | Random ran = new Random(); 46 | 47 | static Cache commandCache = CacheBuilder.newBuilder() 48 | .maximumSize(200) 49 | .expireAfterAccess(20, TimeUnit.MINUTES) 50 | .build(); 51 | 52 | static Cache queryResultCache = CacheBuilder.newBuilder() 53 | .maximumSize(2000) 54 | .expireAfterAccess(20, TimeUnit.MINUTES) 55 | .build(); 56 | 57 | static Cache reportResultCache = CacheBuilder.newBuilder() 58 | .maximumSize(2000) 59 | .expireAfterAccess(20, TimeUnit.MINUTES) 60 | .build(); 61 | 62 | static Cache timelineResultCache = CacheBuilder.newBuilder() 63 | .maximumSize(2000) 64 | .expireAfterAccess(20, TimeUnit.MINUTES) 65 | .build(); 66 | 67 | private String genJobId(){ 68 | return Double.toString(ran.nextDouble() * 1000000); 69 | } 70 | 71 | static class Id{ 72 | private StringBuffer id = new StringBuffer(); 73 | 74 | public Id append(String a){ 75 | id.append("-").append(a); 76 | return this; 77 | } 78 | 79 | public Id append(int a){ 80 | id.append("-").append(Integer.toString(a)); 81 | return this; 82 | } 83 | 84 | public String toId(){ 85 | return id.toString(); 86 | } 87 | } 88 | 89 | static enum ResponseType{ 90 | SearchResponse,QueryResponse,ReportResponse 91 | } 92 | 93 | static class Response{ 94 | ResponseType type; 95 | SearchResponse search; 96 | QueryResponse query; 97 | ReportResponse report; 98 | List fieldNames = null; 99 | } 100 | 101 | 102 | @Inject 103 | public JobRestHandler(Settings settings, Client client, 104 | RestController controller) { 105 | super(settings, controller, client); 106 | controller.registerHandler(GET, "/_commandjob", this); 107 | controller.registerHandler(POST, "/_commandjob", this); 108 | controller.registerHandler(GET, "/jobs/{jobid}/{type}", this); 109 | controller.registerHandler(POST, "/jobs/{jobid}/{type}", this); 110 | } 111 | 112 | private String getCommandStringFromRestRequest(final RestRequest request) throws CommandException{ 113 | String command = ""; 114 | if(request.method() == RestRequest.Method.GET) 115 | command = request.param("q", ""); 116 | else{ 117 | HashMap post = new HashMap(); 118 | RestUtils.decodeQueryString(request.content().toUtf8(), 0, post); 119 | if(post.containsKey("q")){ 120 | command = post.get("q"); 121 | } 122 | } 123 | 124 | if (command.length() == 0) { 125 | throw new CommandException("命令为空"); 126 | 127 | }else{ 128 | if( ! command.startsWith(Search.PREFIX_SEARCH_STRING)) 129 | command = Search.PREFIX_SEARCH_STRING+" "+command; 130 | } 131 | 132 | logger.info(command); 133 | 134 | return command; 135 | } 136 | 137 | private Search newSearchFromCommandString(String command, Client client, RestChannel channel, RestRequest request){ 138 | 139 | try{ 140 | CommandParser parser = new CommandParser(command); 141 | 142 | AST_Start.dumpWithLogger(logger, parser.getInnerTree(), ""); 143 | 144 | return new Search(parser, client, logger); 145 | } catch (Exception e1) { 146 | sendFailure(request, channel, e1); 147 | return null; 148 | } 149 | } 150 | 151 | public static final int DEFAULT_SIZE = 10; 152 | @Override 153 | public void handleRequest(final RestRequest request, 154 | final RestChannel channel, Client client) { 155 | 156 | //根据查询命令生成jobid,存储原始的command命令到cache 157 | if(request.param("jobid") == null ){ 158 | //command支持从GET和POST中获取 159 | String command; 160 | 161 | try { 162 | command = getCommandStringFromRestRequest(request); 163 | 164 | } catch (CommandException e2) { 165 | sendFailure(request, channel, e2); 166 | return; 167 | } 168 | 169 | String jobId = genJobId(); 170 | commandCache.put(jobId, command); 171 | channel.sendResponse(new BytesRestResponse(RestStatus.OK, "{\"jobid\":\"" + jobId +"\"}")); 172 | return; 173 | } 174 | 175 | if(request.param("type") == null){ 176 | sendFailure(request, channel, new CommandException("report or query endpoint 需要提供")); 177 | return; 178 | } 179 | String jobid = request.param("jobid"); 180 | String type = request.param("type"); 181 | final int from = request.paramAsInt("from", 0); 182 | final int size = request.paramAsInt("size", DEFAULT_SIZE); 183 | String sortPara = request.param("sortField"); 184 | String interval = request.param("interval"); 185 | String timelineField = request.param("timelineField"); 186 | final boolean showMeta = request.paramAsBoolean("showMeta", false); 187 | 188 | Id id = new Id(); 189 | final String retId = id.append(jobid).append(sortPara).append(interval).append(from) 190 | .append(size).append(timelineField).toId(); 191 | String[] sortFields = new String[0]; 192 | if(sortPara != null) 193 | sortFields = sortPara.split(","); 194 | 195 | if(type.equalsIgnoreCase("query")){ 196 | 197 | 198 | Response response = queryResultCache.getIfPresent(retId); 199 | 200 | if(response == null){ 201 | 202 | final Search search = newSearchFromCommandString(commandCache.getIfPresent(jobid), client, channel, request); 203 | 204 | if (search.joinSearchs.size() > 0) { 205 | 206 | search.executeQuery( 207 | new ActionListener() { 208 | @Override 209 | public void onResponse(QueryResponse response) { 210 | Response resp = new Response(); 211 | resp.type = ResponseType.QueryResponse; 212 | resp.query = response; 213 | resp.fieldNames = search.tableFieldNames; 214 | 215 | queryResultCache.put(retId, resp); 216 | sendQuery(from, request, channel, resp.query, resp.fieldNames, showMeta); 217 | } 218 | 219 | @Override 220 | public void onFailure(Throwable e) { 221 | sendFailure(request, channel, e); 222 | } 223 | }, from, size, sortFields); 224 | 225 | } else { 226 | 227 | search.executeQueryWithNonJoin( 228 | new RestBuilderListener(channel) { 229 | 230 | @Override 231 | public RestResponse buildResponse(SearchResponse result, XContentBuilder builder) throws Exception { 232 | 233 | Response resp = new Response(); 234 | resp.type = ResponseType.SearchResponse; 235 | resp.search = result; 236 | resp.fieldNames = search.tableFieldNames; 237 | 238 | queryResultCache.put(retId, resp); 239 | Search.buildQuery(from, builder, result, logger, search.tableFieldNames, showMeta); 240 | return new BytesRestResponse(RestStatus.OK, builder); 241 | } 242 | 243 | 244 | 245 | }, from, size, sortFields); 246 | 247 | } 248 | 249 | 250 | }else if(response.type == ResponseType.QueryResponse){ 251 | sendQuery(from, request, channel, response.query, response.fieldNames, showMeta); 252 | }else if(response.type == ResponseType.SearchResponse){ 253 | sendQuery(from, request, channel, response.search, response.fieldNames, showMeta); 254 | } 255 | 256 | 257 | 258 | 259 | }else if(type.equalsIgnoreCase("report")){ 260 | 261 | Response cacheResult = reportResultCache.getIfPresent(retId); 262 | 263 | if(cacheResult != null ){ 264 | sendReport(from, size, request, channel, cacheResult.report); 265 | } else { 266 | final Search search = newSearchFromCommandString(commandCache.getIfPresent(jobid), client, channel, request); 267 | 268 | 269 | final ReportResponse result = new ReportResponse(); 270 | result.bucketFields = search.bucketFields; 271 | result.statsFields = search.statsFields; 272 | 273 | search.executeReport( 274 | new ActionListener() { 275 | @Override 276 | public void onResponse(SearchResponse response) { 277 | result.response = response; 278 | 279 | Response resp = new Response(); 280 | resp.type = ResponseType.ReportResponse; 281 | resp.report = result; 282 | reportResultCache.put(retId, resp); 283 | sendReport(from, size, request, channel, result); 284 | } 285 | 286 | @Override 287 | public void onFailure(Throwable e) { 288 | sendFailure(request, channel, e); 289 | } 290 | }, from, size); 291 | } 292 | 293 | }else if(type.equalsIgnoreCase("timeline")){ 294 | 295 | Response cacheResult = timelineResultCache.getIfPresent(retId); 296 | if(cacheResult != null){ 297 | sendTimeline( request, channel, cacheResult.search); 298 | } else { 299 | Search search = newSearchFromCommandString(commandCache.getIfPresent(jobid), client, channel, request); 300 | 301 | //快速失败模式,load key 302 | 303 | search.executeTimeline( 304 | new ActionListener() { 305 | @Override 306 | public void onResponse(SearchResponse response) { 307 | Response resp = new Response(); 308 | resp.search = response; 309 | 310 | timelineResultCache.put(retId, resp); 311 | sendTimeline(request, channel, response); 312 | } 313 | 314 | @Override 315 | public void onFailure(Throwable e) { 316 | sendFailure(request, channel, e); 317 | } 318 | }, interval, timelineField); 319 | } 320 | } 321 | 322 | } 323 | 324 | private void sendQuery(int from, final RestRequest request,final RestChannel channel, SearchResponse response, List fieldNames, boolean showMeta){ 325 | XContentBuilder builder; 326 | try { 327 | builder = channel.newBuilder(); 328 | Search.buildQuery(from, builder, response, logger, fieldNames, showMeta); 329 | channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder)); 330 | } catch (IOException e) { 331 | sendFailure(request, channel, e); 332 | } 333 | } 334 | 335 | private void sendQuery(int from, final RestRequest request,final RestChannel channel, QueryResponse response, List fieldNames, boolean showMeta){ 336 | XContentBuilder builder; 337 | try { 338 | builder = channel.newBuilder(); 339 | Search.buildQuery(from, builder, response, logger, fieldNames, showMeta); 340 | channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder)); 341 | } catch (IOException e) { 342 | sendFailure(request, channel, e); 343 | } 344 | } 345 | 346 | 347 | 348 | private void sendReport(int from, int size, final RestRequest request,final RestChannel channel, ReportResponse response){ 349 | XContentBuilder builder; 350 | try { 351 | builder = channel.newBuilder(); 352 | Search.buildReport(from, size, builder, response, logger); 353 | channel.sendResponse(new BytesRestResponse(response.response.status(), builder)); 354 | } catch (IOException e) { 355 | sendFailure(request, channel, e); 356 | } catch (CommandException e) { 357 | sendFailure(request, channel, e); 358 | } 359 | } 360 | 361 | private void sendTimeline(final RestRequest request,final RestChannel channel, SearchResponse response){ 362 | XContentBuilder builder; 363 | try { 364 | builder = channel.newBuilder(); 365 | Search.buildTimeline(builder, response, logger); 366 | channel.sendResponse(new BytesRestResponse(response.status(), builder)); 367 | } catch (IOException e) { 368 | sendFailure(request, channel, e); 369 | } 370 | } 371 | 372 | public void sendFailure(RestRequest request, RestChannel channel, Throwable e) { 373 | try { 374 | channel.sendResponse(new BytesRestResponse(channel, e)); 375 | } catch (IOException e1) { 376 | logger.error("Failed to send failure response", e1); 377 | } 378 | } 379 | 380 | 381 | } 382 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/SimpleCharStream.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 5.0 */ 2 | /* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | /** 6 | * An implementation of interface CharStream, where the stream is assumed to 7 | * contain only ASCII characters (without unicode processing). 8 | */ 9 | 10 | public class SimpleCharStream 11 | { 12 | /** Whether parser is static. */ 13 | public static final boolean staticFlag = false; 14 | int bufsize; 15 | int available; 16 | int tokenBegin; 17 | /** Position in buffer. */ 18 | public int bufpos = -1; 19 | protected int bufline[]; 20 | protected int bufcolumn[]; 21 | 22 | protected int column = 0; 23 | protected int line = 1; 24 | 25 | protected boolean prevCharIsCR = false; 26 | protected boolean prevCharIsLF = false; 27 | 28 | protected java.io.Reader inputStream; 29 | 30 | protected char[] buffer; 31 | protected int maxNextCharInd = 0; 32 | protected int inBuf = 0; 33 | protected int tabSize = 8; 34 | 35 | protected void setTabSize(int i) { tabSize = i; } 36 | protected int getTabSize(int i) { return tabSize; } 37 | 38 | 39 | protected void ExpandBuff(boolean wrapAround) 40 | { 41 | char[] newbuffer = new char[bufsize + 2048]; 42 | int newbufline[] = new int[bufsize + 2048]; 43 | int newbufcolumn[] = new int[bufsize + 2048]; 44 | 45 | try 46 | { 47 | if (wrapAround) 48 | { 49 | System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); 50 | System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos); 51 | buffer = newbuffer; 52 | 53 | System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); 54 | System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos); 55 | bufline = newbufline; 56 | 57 | System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); 58 | System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos); 59 | bufcolumn = newbufcolumn; 60 | 61 | maxNextCharInd = (bufpos += (bufsize - tokenBegin)); 62 | } 63 | else 64 | { 65 | System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); 66 | buffer = newbuffer; 67 | 68 | System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); 69 | bufline = newbufline; 70 | 71 | System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); 72 | bufcolumn = newbufcolumn; 73 | 74 | maxNextCharInd = (bufpos -= tokenBegin); 75 | } 76 | } 77 | catch (Throwable t) 78 | { 79 | throw new Error(t.getMessage()); 80 | } 81 | 82 | 83 | bufsize += 2048; 84 | available = bufsize; 85 | tokenBegin = 0; 86 | } 87 | 88 | protected void FillBuff() throws java.io.IOException 89 | { 90 | if (maxNextCharInd == available) 91 | { 92 | if (available == bufsize) 93 | { 94 | if (tokenBegin > 2048) 95 | { 96 | bufpos = maxNextCharInd = 0; 97 | available = tokenBegin; 98 | } 99 | else if (tokenBegin < 0) 100 | bufpos = maxNextCharInd = 0; 101 | else 102 | ExpandBuff(false); 103 | } 104 | else if (available > tokenBegin) 105 | available = bufsize; 106 | else if ((tokenBegin - available) < 2048) 107 | ExpandBuff(true); 108 | else 109 | available = tokenBegin; 110 | } 111 | 112 | int i; 113 | try { 114 | if ((i = inputStream.read(buffer, maxNextCharInd, available - maxNextCharInd)) == -1) 115 | { 116 | inputStream.close(); 117 | throw new java.io.IOException(); 118 | } 119 | else 120 | maxNextCharInd += i; 121 | return; 122 | } 123 | catch(java.io.IOException e) { 124 | --bufpos; 125 | backup(0); 126 | if (tokenBegin == -1) 127 | tokenBegin = bufpos; 128 | throw e; 129 | } 130 | } 131 | 132 | /** Start. */ 133 | public char BeginToken() throws java.io.IOException 134 | { 135 | tokenBegin = -1; 136 | char c = readChar(); 137 | tokenBegin = bufpos; 138 | 139 | return c; 140 | } 141 | 142 | protected void UpdateLineColumn(char c) 143 | { 144 | column++; 145 | 146 | if (prevCharIsLF) 147 | { 148 | prevCharIsLF = false; 149 | line += (column = 1); 150 | } 151 | else if (prevCharIsCR) 152 | { 153 | prevCharIsCR = false; 154 | if (c == '\n') 155 | { 156 | prevCharIsLF = true; 157 | } 158 | else 159 | line += (column = 1); 160 | } 161 | 162 | switch (c) 163 | { 164 | case '\r' : 165 | prevCharIsCR = true; 166 | break; 167 | case '\n' : 168 | prevCharIsLF = true; 169 | break; 170 | case '\t' : 171 | column--; 172 | column += (tabSize - (column % tabSize)); 173 | break; 174 | default : 175 | break; 176 | } 177 | 178 | bufline[bufpos] = line; 179 | bufcolumn[bufpos] = column; 180 | } 181 | 182 | /** Read a character. */ 183 | public char readChar() throws java.io.IOException 184 | { 185 | if (inBuf > 0) 186 | { 187 | --inBuf; 188 | 189 | if (++bufpos == bufsize) 190 | bufpos = 0; 191 | 192 | return buffer[bufpos]; 193 | } 194 | 195 | if (++bufpos >= maxNextCharInd) 196 | FillBuff(); 197 | 198 | char c = buffer[bufpos]; 199 | 200 | UpdateLineColumn(c); 201 | return c; 202 | } 203 | 204 | @Deprecated 205 | /** 206 | * @deprecated 207 | * @see #getEndColumn 208 | */ 209 | 210 | public int getColumn() { 211 | return bufcolumn[bufpos]; 212 | } 213 | 214 | @Deprecated 215 | /** 216 | * @deprecated 217 | * @see #getEndLine 218 | */ 219 | 220 | public int getLine() { 221 | return bufline[bufpos]; 222 | } 223 | 224 | /** Get token end column number. */ 225 | public int getEndColumn() { 226 | return bufcolumn[bufpos]; 227 | } 228 | 229 | /** Get token end line number. */ 230 | public int getEndLine() { 231 | return bufline[bufpos]; 232 | } 233 | 234 | /** Get token beginning column number. */ 235 | public int getBeginColumn() { 236 | return bufcolumn[tokenBegin]; 237 | } 238 | 239 | /** Get token beginning line number. */ 240 | public int getBeginLine() { 241 | return bufline[tokenBegin]; 242 | } 243 | 244 | /** Backup a number of characters. */ 245 | public void backup(int amount) { 246 | 247 | inBuf += amount; 248 | if ((bufpos -= amount) < 0) 249 | bufpos += bufsize; 250 | } 251 | 252 | /** Constructor. */ 253 | public SimpleCharStream(java.io.Reader dstream, int startline, 254 | int startcolumn, int buffersize) 255 | { 256 | inputStream = dstream; 257 | line = startline; 258 | column = startcolumn - 1; 259 | 260 | available = bufsize = buffersize; 261 | buffer = new char[buffersize]; 262 | bufline = new int[buffersize]; 263 | bufcolumn = new int[buffersize]; 264 | } 265 | 266 | /** Constructor. */ 267 | public SimpleCharStream(java.io.Reader dstream, int startline, 268 | int startcolumn) 269 | { 270 | this(dstream, startline, startcolumn, 4096); 271 | } 272 | 273 | /** Constructor. */ 274 | public SimpleCharStream(java.io.Reader dstream) 275 | { 276 | this(dstream, 1, 1, 4096); 277 | } 278 | 279 | /** Reinitialise. */ 280 | public void ReInit(java.io.Reader dstream, int startline, 281 | int startcolumn, int buffersize) 282 | { 283 | inputStream = dstream; 284 | line = startline; 285 | column = startcolumn - 1; 286 | 287 | if (buffer == null || buffersize != buffer.length) 288 | { 289 | available = bufsize = buffersize; 290 | buffer = new char[buffersize]; 291 | bufline = new int[buffersize]; 292 | bufcolumn = new int[buffersize]; 293 | } 294 | prevCharIsLF = prevCharIsCR = false; 295 | tokenBegin = inBuf = maxNextCharInd = 0; 296 | bufpos = -1; 297 | } 298 | 299 | /** Reinitialise. */ 300 | public void ReInit(java.io.Reader dstream, int startline, 301 | int startcolumn) 302 | { 303 | ReInit(dstream, startline, startcolumn, 4096); 304 | } 305 | 306 | /** Reinitialise. */ 307 | public void ReInit(java.io.Reader dstream) 308 | { 309 | ReInit(dstream, 1, 1, 4096); 310 | } 311 | /** Constructor. */ 312 | public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, 313 | int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException 314 | { 315 | this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); 316 | } 317 | 318 | /** Constructor. */ 319 | public SimpleCharStream(java.io.InputStream dstream, int startline, 320 | int startcolumn, int buffersize) 321 | { 322 | this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); 323 | } 324 | 325 | /** Constructor. */ 326 | public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, 327 | int startcolumn) throws java.io.UnsupportedEncodingException 328 | { 329 | this(dstream, encoding, startline, startcolumn, 4096); 330 | } 331 | 332 | /** Constructor. */ 333 | public SimpleCharStream(java.io.InputStream dstream, int startline, 334 | int startcolumn) 335 | { 336 | this(dstream, startline, startcolumn, 4096); 337 | } 338 | 339 | /** Constructor. */ 340 | public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException 341 | { 342 | this(dstream, encoding, 1, 1, 4096); 343 | } 344 | 345 | /** Constructor. */ 346 | public SimpleCharStream(java.io.InputStream dstream) 347 | { 348 | this(dstream, 1, 1, 4096); 349 | } 350 | 351 | /** Reinitialise. */ 352 | public void ReInit(java.io.InputStream dstream, String encoding, int startline, 353 | int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException 354 | { 355 | ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); 356 | } 357 | 358 | /** Reinitialise. */ 359 | public void ReInit(java.io.InputStream dstream, int startline, 360 | int startcolumn, int buffersize) 361 | { 362 | ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); 363 | } 364 | 365 | /** Reinitialise. */ 366 | public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException 367 | { 368 | ReInit(dstream, encoding, 1, 1, 4096); 369 | } 370 | 371 | /** Reinitialise. */ 372 | public void ReInit(java.io.InputStream dstream) 373 | { 374 | ReInit(dstream, 1, 1, 4096); 375 | } 376 | /** Reinitialise. */ 377 | public void ReInit(java.io.InputStream dstream, String encoding, int startline, 378 | int startcolumn) throws java.io.UnsupportedEncodingException 379 | { 380 | ReInit(dstream, encoding, startline, startcolumn, 4096); 381 | } 382 | /** Reinitialise. */ 383 | public void ReInit(java.io.InputStream dstream, int startline, 384 | int startcolumn) 385 | { 386 | ReInit(dstream, startline, startcolumn, 4096); 387 | } 388 | /** Get token literal value. */ 389 | public String GetImage() 390 | { 391 | if (bufpos >= tokenBegin) 392 | return new String(buffer, tokenBegin, bufpos - tokenBegin + 1); 393 | else 394 | return new String(buffer, tokenBegin, bufsize - tokenBegin) + 395 | new String(buffer, 0, bufpos + 1); 396 | } 397 | 398 | /** Get the suffix. */ 399 | public char[] GetSuffix(int len) 400 | { 401 | char[] ret = new char[len]; 402 | 403 | if ((bufpos + 1) >= len) 404 | System.arraycopy(buffer, bufpos - len + 1, ret, 0, len); 405 | else 406 | { 407 | System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0, 408 | len - bufpos - 1); 409 | System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1); 410 | } 411 | 412 | return ret; 413 | } 414 | 415 | /** Reset buffer when finished. */ 416 | public void Done() 417 | { 418 | buffer = null; 419 | bufline = null; 420 | bufcolumn = null; 421 | } 422 | 423 | /** 424 | * Method to adjust line and column numbers for the start of a token. 425 | */ 426 | public void adjustBeginLineColumn(int newLine, int newCol) 427 | { 428 | int start = tokenBegin; 429 | int len; 430 | 431 | if (bufpos >= tokenBegin) 432 | { 433 | len = bufpos - tokenBegin + inBuf + 1; 434 | } 435 | else 436 | { 437 | len = bufsize - tokenBegin + bufpos + 1 + inBuf; 438 | } 439 | 440 | int i = 0, j = 0, k = 0; 441 | int nextColDiff = 0, columnDiff = 0; 442 | 443 | while (i < len && bufline[j = start % bufsize] == bufline[k = ++start % bufsize]) 444 | { 445 | bufline[j] = newLine; 446 | nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j]; 447 | bufcolumn[j] = newCol + columnDiff; 448 | columnDiff = nextColDiff; 449 | i++; 450 | } 451 | 452 | if (i < len) 453 | { 454 | bufline[j] = newLine++; 455 | bufcolumn[j] = newCol + columnDiff; 456 | 457 | while (i++ < len) 458 | { 459 | if (bufline[j = start % bufsize] != bufline[++start % bufsize]) 460 | bufline[j] = newLine++; 461 | else 462 | bufline[j] = newLine; 463 | } 464 | } 465 | 466 | line = bufline[j]; 467 | column = bufcolumn[j]; 468 | } 469 | 470 | } 471 | /* JavaCC - OriginalChecksum=59670aa6e6e7f69c154bf9afa152aed5 (do not edit this line) */ 472 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/CommandParser.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | BNF for CommandParser.jj 5 | 6 | 7 |

BNF for CommandParser.jj

8 |

TOKENS

9 | 10 | 11 | 12 | 18 | 19 | 20 | 21 | 33 | 34 | 35 | 36 | 46 | 47 | 48 | 49 | 118 | 119 | 120 | 121 | 140 | 141 | 142 | 143 | 152 | 153 | 154 | 155 | 165 | 166 | 167 | 168 | 180 | 181 | 182 | 183 | 192 | 193 |
13 |
 14 | /*****************************************************************************\
 15 |  * Lexical Analysis
 16 | \*****************************************************************************/
17 |
22 |
 23 | <DEFAULT> SKIP : {
 24 | " "
 25 | | "\f"
 26 | | "\t"
 27 | | "\r"
 28 | | "\n"
 29 | }
 30 | 
 31 |    
32 |
37 |
 38 | /*
 39 |  *  Prefix   Meaning
 40 |  * ---------------------
 41 |  *  K_       Keyword
 42 |  *  O_       Operator
 43 |  *  S_       Substitute
 44 |  */
45 |
50 |
 51 | <DEFAULT> TOKEN : {
 52 | <K_AND: "AND">
 53 | | <K_SEARCH: "SEARCH">
 54 | | <K_COUNTFIELD: "COUNTFIELD">
 55 | | <K_LIMIT: "LIMIT">
 56 | | <K_OTHERSTR: "OTHERSTR">
 57 | | <K_PERCENTFIELD: "PERCENTFIELD">
 58 | | <K_SHOWCOUNT: "SHOWCOUNT">
 59 | | <K_SHOWPERC: "SHOWPERC">
 60 | | <K_USEOTHER: "USEROTHER">
 61 | | <K_TIMEFORMAT: "TIMEFORMAT">
 62 | | <K_STARTTIME: "STARTTIME">
 63 | | <K_ENDTIME: "ENDTIME">
 64 | | <K_EARLIEST: "EARLIEST">
 65 | | <K_LATEST: "LATEST">
 66 | | <K_MINCOUNT: "MINCOUNT">
 67 | | <K_SOURCETYPE: "SOURCETYPE">
 68 | | <K_TOP: "TOP">
 69 | | <K_BY: "BY">
 70 | | <K_CREATE: "CREATE">
 71 | | <K_DELETE: "DELETE">
 72 | | <K_DESC: "DESC">
 73 | | <K_ASC: "ASC">
 74 | | <K_DESCRIBE: "DESCRIBE">
 75 | | <K_DISTINCT: "DISTINCT">
 76 | | <K_DROP: "DROP">
 77 | | <K_EXPLAIN: "EXPLAIN">
 78 | | <K_HELP: "HELP">
 79 | | <K_FLOAT: "FLOAT">
 80 | | <K_FROM: "FROM">
 81 | | <K_INDEX: "INDEX">
 82 | | <K_INSERT: "INSERT">
 83 | | <K_INTEGER: "INTEGER">
 84 | | <K_INTO: "INTO">
 85 | | <K_ON: "ON">
 86 | | <K_OR: "OR">
 87 | | <K_NOT: "NOT">
 88 | | <K_ORDER: "ORDER">
 89 | | <K_QUIT: "QUIT">
 90 | | <K_SELECT: "SELECT">
 91 | | <K_SET: "SET">
 92 | | <K_STRING: "STRING">
 93 | | <K_STATS: "STATS">
 94 | | <K_TABLE: "TABLE">
 95 | | <K_UPDATE: "UPDATE">
 96 | | <K_VALUES: "VALUES">
 97 | | <K_WHERE: "WHERE">
 98 | | <K_HASCHILD: "HASCHILD">
 99 | | <K_HASPARENT: "HASPARENT">
100 | | <K_SUM: "SUM">
101 | | <K_DC: "DC">
102 | | <K_MIN: "MIN">
103 | | <K_MAX: "MAX">
104 | | <K_AVG: "AVG">
105 | | <K_EVAL: "EVAL">
106 | | <K_COUNT: "COUNT">
107 | | <K_AS: "AS">
108 | | <K_REGEX: "REGEX">
109 | | <K_SORT: "SORT">
110 | | <K_JOIN: "JOIN">
111 | | <K_SPAN: "SPAN">
112 | | <K_TIMESPAN: "TIMESPAN">
113 | | <K_COUNTORDER: "COUNTORDER">
114 | }
115 | 
116 |    
117 |
122 |
123 | <DEFAULT> TOKEN : {
124 | <O_EQ: "=">
125 | | <O_NEQ: "!=">
126 | | <O_GT: ">">
127 | | <O_GTE: ">=">
128 | | <O_LT: "<">
129 | | <O_LTE: "<=">
130 | | <O_LPAREN: "(">
131 | | <O_RPAREN: ")">
132 | | <O_COMMA: ",">
133 | | <O_SEMI: ";">
134 | | <O_QUOTE: "\"">
135 | | <O_VERTICAL: "|">
136 | }
137 | 
138 |    
139 |
144 |
145 | <DEFAULT> TOKEN : {
146 | <S_CHINESE: (<CJK>)+ (<CJK>)*>
147 | | <#CJK: ["\u3040"-"\u318f","\u3300"-"\u337f","\u3400"-"\u3d2d","\u4e00"-"\u9fff","\uf900"-"\ufaff"]>
148 | }
149 | 
150 |    
151 |
156 |
157 | <DEFAULT> TOKEN : {
158 | <S_INTEGER: ("-")? (<DIGIT>)+>
159 | | <#DIGIT: ["0"-"9"]>
160 | | <S_FLOAT: ("-")? (<S_INTEGER>)? "." <S_INTEGER>>
161 | }
162 | 
163 |    
164 |
169 |
170 | <DEFAULT> TOKEN : {
171 | <S_IDENTIFIER: (<DIGIT> | <LETTER> | <SPECIAL_CHAR>)+>
172 | | <#LETTER: ["a"-"z","A"-"Z"]>
173 | | <#SPECIAL_CHAR: "$" | "_" | "/" | "+" | "-" | "*" | "?" | ":" | ".">
174 | | <S_QUOTED_STRING: "\"" (~["\""])* "\"">
175 | | <S_SINGLE_QUOTED_STRING: "\'" (~["\'"])* "\'">
176 | }
177 | 
178 |    
179 |
184 |
185 | <DEFAULT> SPECIAL : {
186 | <LINE_COMMENT: "--" (~["\r","\n"])*>
187 | | <MULTI_LINE_COMMENT: "/*" (~["*"])* "*" ("*" | ~["*","/"] (~["*"])* "*")* "/">
188 | }
189 | 
190 |    
191 |
194 |

NON-TERMINALS

195 | 196 | 197 | 198 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 274 | 275 | 276 | 277 | 278 | 307 | 308 | 309 | 310 | 311 | 312 | 313 | 314 | 315 | 316 | 317 | 318 | 319 | 320 | 321 | 322 | 323 | 324 | 325 | 326 | 327 | 328 | 329 | 330 | 331 | 332 | 333 | 334 | 335 | 336 | 337 | 338 | 339 | 340 | 341 | 342 | 343 | 344 | 345 | 346 | 347 | 348 | 349 | 350 | 351 | 352 | 353 |
199 |
200 | /*****************************************************************************\
201 |  * Top-Level Statements
202 | \*****************************************************************************/
203 |
Start::=SearchStatement ( <O_VERTICAL> ( JoinStatement | StatsStatement | SortStatement | TableStatement ) )* <EOF>
213 |
214 | /* table  */
215 |
TableStatement::=<K_TABLE> FieldList
225 |
226 | /*join  [ subsearch ]*/
227 |
JoinStatement::=<K_JOIN> FieldList <O_LPAREN> SearchStatement <O_RPAREN>
237 |
238 | /*sort [] ()+ [desc]*/
239 |
SortStatement::=<K_SORT> ( <S_INTEGER> )? <S_IDENTIFIER> ( <K_DESC> | <K_ASC> )? ( <O_COMMA> <S_IDENTIFIER> ( <K_DESC> | <K_ASC> )? )*
RegexStatement::=<K_REGEX> ( ( <S_IDENTIFIER> <O_EQ> <S_QUOTED_STRING> ) | <S_QUOTED_STRING> )*
StatsStatement::=<K_STATS> ( <K_MINCOUNT> <O_EQ> <S_INTEGER> ( <O_COMMA> <S_INTEGER> )* | <K_LIMIT> <O_EQ> <S_INTEGER> ( <O_COMMA> <S_INTEGER> )* | <K_SPAN> <O_EQ> <S_INTEGER> ( <O_COMMA> <S_INTEGER> )* | <K_TIMESPAN> <O_EQ> <S_IDENTIFIER> ( <O_COMMA> <S_IDENTIFIER> )* )? StatsFunction ( <O_COMMA> StatsFunction )* ( ByClause )?
StatsFunction::=( <K_COUNT> | <K_SUM> | <K_DC> | <K_MAX> | <K_MIN> | <K_AVG> ) <O_LPAREN> ( <S_IDENTIFIER> | <S_QUOTED_STRING> ) <O_RPAREN> ( <K_AS> <S_IDENTIFIER> )? ( <K_DESC> | <K_ASC> )?
EvalExpression::=<K_EVAL> <O_LPAREN> <S_QUOTED_STRING> <O_RPAREN>
DeleteStatement::=<K_DELETE>
FieldList::=<S_IDENTIFIER> ( <O_COMMA> <S_IDENTIFIER> )*
279 |
280 | /*
281 | void TopStatement() #_Top : {}
282 | {
283 |    (LOOKAHEAD(2)TopOption())* FieldList() [ ByClause() ]
284 | }
285 | void TopOption() #_TopOption : {}
286 | {
287 | 
288 |      {jjtThis.opt.type = Option.COUNTFIELD; jjtThis.opt.value = token.image;}
289 |   | 
290 |       {jjtThis.opt.type = Option.LIMIT; jjtThis.opt.value = token.image;}
291 |   | 
292 |       {jjtThis.opt.type = Option.OTHERSTR; jjtThis.opt.value = token.image;}
293 |   | 
294 |       {jjtThis.opt.type = Option.PERCENTFIELD; jjtThis.opt.value = token.image;}
295 |   | 
296 |       {jjtThis.opt.type = Option.SHOWCOUNT; jjtThis.opt.value = token.image;}
297 |   | 
298 |       {jjtThis.opt.type = Option.SHOWPERC; jjtThis.opt.value = token.image;}
299 |   | 
300 |      {jjtThis.opt.type = Option.USEOTHER; jjtThis.opt.value = token.image;}
301 |   |
302 |      {jjtThis.opt.type = Option.MINCOUNT; jjtThis.opt.value = token.image;}
303 |   
304 | }
305 | */
306 |
ByClause::=<K_BY> ( <K_EVAL> <O_LPAREN> <S_QUOTED_STRING> <O_RPAREN> | <S_IDENTIFIER> ) ( <K_COUNTORDER> )? ( <K_DESC> | <K_ASC> )? ( <O_COMMA> ( <K_EVAL> <O_LPAREN> <S_QUOTED_STRING> <O_RPAREN> | <S_IDENTIFIER> ) ( <K_COUNTORDER> )? ( <K_DESC> | <K_ASC> )? )*
SearchStatement::=<K_SEARCH> ( SearchOption )* ( BooleanExpression )?
BooleanExpression::=AndExpression ( <K_OR> AndExpression )*
AndExpression::=UnaryExpression ( <K_AND> UnaryExpression )*
UnaryExpression::=( <K_NOT> )? ( <O_LPAREN> BooleanExpression <O_RPAREN> | PredicateExpression )
SearchOption::=( <K_HASCHILD> <O_EQ> <O_LPAREN> <K_SOURCETYPE> <O_EQ> <S_IDENTIFIER> BooleanExpression <O_RPAREN> | <K_HASPARENT> <O_EQ> <O_LPAREN> <K_SOURCETYPE> <O_EQ> <S_IDENTIFIER> BooleanExpression <O_RPAREN> | <K_TIMEFORMAT> <O_EQ> <S_IDENTIFIER> | <K_STARTTIME> <O_EQ> ( <S_IDENTIFIER> | <S_QUOTED_STRING> ) | <K_ENDTIME> <O_EQ> ( <S_IDENTIFIER> | <S_QUOTED_STRING> ) | <K_EARLIEST> <O_EQ> <S_IDENTIFIER> | <K_LATEST> <O_EQ> <S_IDENTIFIER> | <K_SOURCETYPE> <O_EQ> <S_IDENTIFIER> ( <O_COMMA> <S_IDENTIFIER> )* | <K_INDEX> <O_EQ> <S_IDENTIFIER> ( <O_COMMA> <S_IDENTIFIER> )* )
PredicateExpression::=( ComparisonExpression ( ComparisonExpression | TermExpression )* | TermExpression ( ComparisonExpression | TermExpression )* )
TermExpression::=( ( <S_INTEGER> | <S_FLOAT> | <S_IDENTIFIER> ) | <S_QUOTED_STRING> )
ComparisonExpression::=<S_IDENTIFIER> ( <O_EQ> | <O_NEQ> | <O_GT> | <O_GTE> | <O_LT> | <O_LTE> ) ( ( <S_INTEGER> | <S_FLOAT> | <S_IDENTIFIER> ) | <S_QUOTED_STRING> )
354 | 355 | 356 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/AST_Search.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JJTree: Do not edit this line. AST_Search.java Version 4.3 */ 2 | /* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | 6 | import org.elasticsearch.index.query.BoolQueryBuilder; 7 | import org.elasticsearch.index.query.FilterBuilder; 8 | import org.elasticsearch.index.query.QueryBuilder; 9 | import org.elasticsearch.index.query.FilterBuilders; 10 | import org.elasticsearch.index.query.QueryBuilders; 11 | import org.elasticsearch.index.query.RangeQueryBuilder; 12 | import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; 13 | import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; 14 | 15 | import com.everdata.command.CommandException; 16 | import com.everdata.command.Option; 17 | import com.everdata.command.Search.CommandArgv; 18 | 19 | import java.util.ArrayList; 20 | import java.util.HashMap; 21 | import java.util.List; 22 | 23 | public class AST_Search extends SimpleNode { 24 | 25 | HashMap options = null; 26 | FilterBuilder filterBuilder = null; 27 | QueryBuilder queryBuilder = null; 28 | BoolQueryBuilder joinFieldsQuery = null; 29 | List childExpressions = new ArrayList(); 30 | AST_OrExpr parentExpression = null; 31 | 32 | 33 | public AST_Search(int id) { 34 | super(id); 35 | } 36 | 37 | public AST_Search(CommandParser p, int id) { 38 | super(p, id); 39 | } 40 | 41 | public boolean hasCondition(){ 42 | 43 | for (Node n : children) { 44 | if (n instanceof AST_OrExpr) { 45 | return true; 46 | } 47 | } 48 | 49 | return false; 50 | } 51 | 52 | public Object getOption(int optionType) { 53 | 54 | if (options != null) return options.get(optionType); 55 | 56 | options = new HashMap(); 57 | //options.put(Option.INDEX, "_all"); 58 | 59 | ArrayList hasChildTypes = new ArrayList(); 60 | 61 | for (Node n : children) { 62 | if (n instanceof AST_SearchOption) { 63 | 64 | if(((AST_SearchOption) n).opt.type == Option.HASCHILD){ 65 | childExpressions.add((AST_OrExpr) ((AST_SearchOption) n).children[0]); 66 | hasChildTypes.add(((AST_SearchOption) n).opt.value); 67 | options.put(((AST_SearchOption) n).opt.type, hasChildTypes); 68 | 69 | }else if(((AST_SearchOption) n).opt.type == Option.HASPARENT){ 70 | parentExpression = (AST_OrExpr) ((AST_SearchOption) n).children[0]; 71 | options.put(((AST_SearchOption) n).opt.type, ((AST_SearchOption) n).opt.value); 72 | }else{ 73 | options.put(((AST_SearchOption) n).opt.type, ((AST_SearchOption) n).opt.value); 74 | } 75 | 76 | 77 | } 78 | } 79 | 80 | return options.get(optionType); 81 | 82 | /* 83 | * options.put(Option, arg1); options.put(arg0, arg1); options.put(arg0, 84 | * arg1); 85 | */ 86 | 87 | } 88 | 89 | private static Object convert(String value){ 90 | try{ 91 | return Integer.parseInt(value); 92 | }catch( NumberFormatException e){ 93 | try{ return Long.parseLong(value); }catch( NumberFormatException e0){ 94 | try{ return Float.parseFloat(value); }catch( NumberFormatException e1){ 95 | try{ return Float.parseFloat(value); }catch( NumberFormatException e2){ 96 | return null; 97 | } 98 | } 99 | } 100 | } 101 | } 102 | 103 | private static QueryBuilder fromValueTypeQ(String field, String value, int valueType){ 104 | if(value.contains("*") || value.contains("?")){ 105 | if( value.length() > 1 && value.indexOf('*') == (value.length()-1)) 106 | return QueryBuilders.prefixQuery(field, value.substring(0, value.length()-1)); 107 | else 108 | return QueryBuilders.wildcardQuery(field, value); 109 | }else if(value.equalsIgnoreCase("")){ 110 | 111 | return QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), FilterBuilders.boolFilter() 112 | .should(FilterBuilders.scriptFilter("doc['"+field+"'].value.size() == 0")) 113 | .should(FilterBuilders.missingFilter(field).nullValue(true).existence(true))); 114 | } 115 | //全部使用对短语进行分词后再搜索 116 | return QueryBuilders.matchPhraseQuery(field, value); 117 | 118 | 119 | /* 120 | switch(valueType){ 121 | 122 | case AST_TermExpression.TERM: 123 | return QueryBuilders.termQuery(field, value); 124 | case AST_TermExpression.PHRASE: 125 | return QueryBuilders.matchPhraseQuery(field, value); 126 | } 127 | 128 | 129 | return null; 130 | */ 131 | } 132 | 133 | private static FilterBuilder fromValueType(String field, String value, int valueType){ 134 | if(value.contains("*") || value.contains("?")){ 135 | if( value.length() > 1 && value.indexOf('*') == (value.length()-1)) 136 | return FilterBuilders.prefixFilter(field, value.substring(0, value.length()-1)); 137 | else 138 | return FilterBuilders.queryFilter(QueryBuilders.wildcardQuery(field, value)); 139 | } 140 | 141 | switch(valueType){ 142 | case AST_TermExpression.TERM: 143 | return FilterBuilders.termFilter(field, value); 144 | case AST_TermExpression.PHRASE: 145 | /*for(byte b: value.getBytes()){ 146 | System.out.printf("0x%02X ", b); 147 | }*/ 148 | return FilterBuilders.queryFilter(QueryBuilders.matchPhraseQuery(field, value)); 149 | } 150 | return null; 151 | } 152 | /* 153 | private static FilterBuilder genFilterBuilder(SimpleNode tree) throws CommandException{ 154 | 155 | 156 | //logic expression 157 | 158 | 159 | switch(tree.id){ 160 | case CommandParserTreeConstants.JJT_TERMEXPRESSION: 161 | AST_TermExpression t = (AST_TermExpression)tree; 162 | return fromValueType("_all", t.term, t.type); 163 | 164 | case CommandParserTreeConstants.JJT_COMPARISONEXPRESSION: 165 | Expression expr = ((AST_ComparisonExpression)tree).expr; 166 | switch(expr.oper){ 167 | case Expression.EQ: 168 | return fromValueType(expr.field,expr.value, expr.valueType); 169 | case Expression.NEQ: 170 | return FilterBuilders.notFilter(fromValueType(expr.field,expr.value, expr.valueType)); 171 | default: 172 | //Object number = convert(expr.value); 173 | //if( number == null ) 174 | // throw new CommandException("不支持针对非数字类型的值做Range类型的查询"); 175 | if( expr.oper == Expression.GT) 176 | return FilterBuilders.rangeFilter(expr.field).gt(expr.value); 177 | else if( expr.oper == Expression.GTE) 178 | return FilterBuilders.rangeFilter(expr.field).gte(expr.value); 179 | else if( expr.oper == Expression.LT) 180 | return FilterBuilders.rangeFilter(expr.field).lt(expr.value); 181 | else if( expr.oper == Expression.LTE) 182 | return FilterBuilders.rangeFilter(expr.field).lte(expr.value); 183 | 184 | } 185 | 186 | case CommandParserTreeConstants.JJT_PREDICATEEXPRESSION: 187 | if(tree.children.length > 1){ 188 | AndFilterBuilder fb = FilterBuilders.andFilter(); 189 | for(Node n: tree.children){ 190 | fb.add(genFilterBuilder((SimpleNode)n)); 191 | } 192 | 193 | return fb; 194 | }else 195 | return genFilterBuilder((SimpleNode)tree.children[0]); 196 | case CommandParserTreeConstants.JJT_OREXPR: 197 | if(tree.children.length > 1){ 198 | OrFilterBuilder fb = FilterBuilders.orFilter(); 199 | for(Node n: tree.children){ 200 | fb.add(genFilterBuilder((SimpleNode)n)); 201 | } 202 | 203 | return fb; 204 | }else 205 | return genFilterBuilder((SimpleNode)tree.children[0]); 206 | 207 | 208 | case CommandParserTreeConstants.JJT_ANDEXPR: 209 | if(tree.children.length > 1){ 210 | AndFilterBuilder fb = FilterBuilders.andFilter(); 211 | for(Node n: tree.children){ 212 | fb.add(genFilterBuilder((SimpleNode)n)); 213 | } 214 | 215 | return fb; 216 | }else 217 | return genFilterBuilder((SimpleNode)tree.children[0]); 218 | 219 | case CommandParserTreeConstants.JJT_UNARYEXPR: 220 | if(((AST_UnaryExpr)tree).isNot){ 221 | NotFilterBuilder fb = FilterBuilders.notFilter(genFilterBuilder((SimpleNode)tree.children[0])); 222 | return fb; 223 | }else 224 | return genFilterBuilder((SimpleNode)tree.children[0]); 225 | 226 | 227 | } 228 | 229 | return genFilterBuilder((SimpleNode)tree.children[0]); 230 | 231 | } 232 | 233 | 234 | 235 | private FilterBuilder getInternalFilter() throws CommandException { 236 | 237 | 238 | if (filterBuilder != null) 239 | return filterBuilder; 240 | 241 | ArrayList allFilters = new ArrayList(); 242 | 243 | for (Node n : children) { 244 | if (n instanceof AST_OrExpr) { 245 | allFilters.add(genFilterBuilder((SimpleNode)n)); 246 | break; 247 | } 248 | } 249 | 250 | ArrayList childTypes = (ArrayList)getOption(Option.HASCHILD); 251 | String parentType = (String)getOption(Option.HASPARENT); 252 | 253 | //FilterBuilder parent_child = null; 254 | if(childTypes != null){ 255 | for(int i = 0; i < childTypes.size(); i++) 256 | allFilters.add(FilterBuilders.hasChildFilter(childTypes.get(i), genFilterBuilder(childExpressions.get(i)))); 257 | 258 | }else if(parentType != null){ 259 | 260 | allFilters.add(FilterBuilders.hasParentFilter(parentType, genFilterBuilder(parentExpression))); 261 | 262 | } 263 | 264 | String starttime = (String) getOption(Option.STARTTIME); 265 | String endtime = (String) getOption(Option.ENDTIME); 266 | 267 | if(starttime != null | endtime !=null){ 268 | RangeFilterBuilder timeFilter = FilterBuilders.rangeFilter("_timestamp").from(starttime).to(endtime); 269 | allFilters.add(timeFilter); 270 | } 271 | 272 | if( allFilters.size() == 0) 273 | filterBuilder = null; 274 | else if( allFilters.size() == 1) 275 | filterBuilder = allFilters.get(0); 276 | else 277 | filterBuilder = FilterBuilders.andFilter(allFilters.toArray(new FilterBuilder[allFilters.size()])); 278 | 279 | return filterBuilder; 280 | 281 | 282 | } 283 | */ 284 | 285 | private static QueryBuilder boostWrapMatchAllWithFunctionScore(QueryBuilder qb, String term, String boostFields){ 286 | 287 | String[] fields = boostFields.split(","); 288 | BoolQueryBuilder query = QueryBuilders.boolQuery(); 289 | for(String field : fields){ 290 | query.should(QueryBuilders.matchQuery(field, term)); 291 | } 292 | 293 | FunctionScoreQueryBuilder fsqb = QueryBuilders.functionScoreQuery(query).add(ScoreFunctionBuilders.weightFactorFunction(10)); 294 | 295 | return QueryBuilders.boolQuery().should(qb).should(fsqb); 296 | } 297 | 298 | private static QueryBuilder genQueryBuilder(SimpleNode tree, String boostFields) throws CommandException{ 299 | 300 | 301 | //logic expression 302 | 303 | 304 | switch(tree.id){ 305 | case CommandParserTreeConstants.JJT_TERMEXPRESSION: 306 | AST_TermExpression t = (AST_TermExpression)tree; 307 | QueryBuilder rawQb = fromValueTypeQ("_all", t.term, t.type); 308 | if(boostFields != null) 309 | return boostWrapMatchAllWithFunctionScore(rawQb, t.term, boostFields); 310 | else 311 | return rawQb; 312 | case CommandParserTreeConstants.JJT_COMPARISONEXPRESSION: 313 | Expression expr = ((AST_ComparisonExpression)tree).expr; 314 | switch(expr.oper){ 315 | case Expression.EQ: 316 | return fromValueTypeQ(expr.field,expr.value, expr.valueType); 317 | case Expression.NEQ: 318 | return QueryBuilders.boolQuery().mustNot(fromValueTypeQ(expr.field,expr.value, expr.valueType)); 319 | default: 320 | //Object number = convert(expr.value); 321 | //if( number == null ) 322 | // throw new CommandException("不支持针对非数字类型的值做Range类型的查询"); 323 | if( expr.oper == Expression.GT) 324 | return QueryBuilders.rangeQuery(expr.field).gt(expr.value); 325 | else if( expr.oper == Expression.GTE) 326 | return QueryBuilders.rangeQuery(expr.field).gte(expr.value); 327 | else if( expr.oper == Expression.LT) 328 | return QueryBuilders.rangeQuery(expr.field).lt(expr.value); 329 | else if( expr.oper == Expression.LTE) 330 | return QueryBuilders.rangeQuery(expr.field).lte(expr.value); 331 | 332 | } 333 | 334 | case CommandParserTreeConstants.JJT_PREDICATEEXPRESSION: 335 | if(tree.children.length > 1){ 336 | BoolQueryBuilder fb = QueryBuilders.boolQuery(); 337 | for(Node n: tree.children){ 338 | fb.must(genQueryBuilder((SimpleNode)n, boostFields)); 339 | } 340 | 341 | return fb; 342 | }else 343 | return genQueryBuilder((SimpleNode)tree.children[0], boostFields); 344 | case CommandParserTreeConstants.JJT_OREXPR: 345 | if(tree.children.length > 1){ 346 | BoolQueryBuilder fb = QueryBuilders.boolQuery(); 347 | for(Node n: tree.children){ 348 | fb.should(genQueryBuilder((SimpleNode)n, boostFields)); 349 | } 350 | 351 | return fb; 352 | }else 353 | return genQueryBuilder((SimpleNode)tree.children[0], boostFields); 354 | 355 | 356 | case CommandParserTreeConstants.JJT_ANDEXPR: 357 | if(tree.children.length > 1){ 358 | BoolQueryBuilder fb = QueryBuilders.boolQuery(); 359 | for(Node n: tree.children){ 360 | fb.must(genQueryBuilder((SimpleNode)n, boostFields)); 361 | } 362 | 363 | return fb; 364 | }else 365 | return genQueryBuilder((SimpleNode)tree.children[0], boostFields); 366 | 367 | case CommandParserTreeConstants.JJT_UNARYEXPR: 368 | if(((AST_UnaryExpr)tree).isNot){ 369 | BoolQueryBuilder fb = QueryBuilders.boolQuery().mustNot(genQueryBuilder((SimpleNode)tree.children[0], boostFields)); 370 | return fb; 371 | }else 372 | return genQueryBuilder((SimpleNode)tree.children[0], boostFields); 373 | 374 | 375 | } 376 | 377 | return genQueryBuilder((SimpleNode)tree.children[0], boostFields); 378 | 379 | } 380 | private String starttime = null, endtime = null; 381 | private CommandArgv commandString = null; 382 | 383 | public void setTime(String start, String end){ 384 | starttime = start; 385 | endtime = end; 386 | } 387 | 388 | public void setCommandArgv(CommandArgv cs){ 389 | commandString = cs; 390 | } 391 | 392 | private QueryBuilder getInternalQuery() throws CommandException { 393 | 394 | 395 | if (queryBuilder != null) 396 | return queryBuilder; 397 | 398 | ArrayList allQuerys = new ArrayList(); 399 | 400 | for (Node n : children) { 401 | if (n instanceof AST_OrExpr) { 402 | allQuerys.add(genQueryBuilder((SimpleNode)n, commandString.boostFields)); 403 | break; 404 | } 405 | } 406 | 407 | @SuppressWarnings("unchecked") 408 | ArrayList childTypes = (ArrayList)getOption(Option.HASCHILD); 409 | String parentType = (String) getOption(Option.HASPARENT); 410 | 411 | //FilterBuilder parent_child = null; 412 | if(childTypes != null){ 413 | for(int i = 0; i< childTypes.size(); i++) 414 | allQuerys.add(QueryBuilders.hasChildQuery(childTypes.get(i), genQueryBuilder(childExpressions.get(i), commandString.boostFields))); 415 | 416 | }else if(parentType != null){ 417 | 418 | allQuerys.add(QueryBuilders.hasParentQuery(parentType, genQueryBuilder(parentExpression, commandString.boostFields))); 419 | 420 | } 421 | 422 | if(starttime == null) 423 | starttime = (String) getOption(Option.STARTTIME); 424 | if(endtime == null) 425 | endtime = (String) getOption(Option.ENDTIME); 426 | 427 | if(starttime != null | endtime !=null){ 428 | RangeQueryBuilder timeFilter = QueryBuilders.rangeQuery("_timestamp").from(starttime).to(endtime); 429 | allQuerys.add(timeFilter); 430 | } 431 | 432 | if(joinFieldsQuery != null){ 433 | allQuerys.add(joinFieldsQuery); 434 | } 435 | 436 | if( allQuerys.size() == 0) 437 | queryBuilder = null; 438 | else if( allQuerys.size() == 1) 439 | queryBuilder = allQuerys.get(0); 440 | else{ 441 | queryBuilder = QueryBuilders.boolQuery(); 442 | for(QueryBuilder q: allQuerys){ 443 | queryBuilder = ((BoolQueryBuilder)queryBuilder).must(q); 444 | } 445 | } 446 | 447 | 448 | 449 | return queryBuilder; 450 | } 451 | 452 | public void setJoinFieldsQuery(BoolQueryBuilder joinFieldsQuery){ 453 | this.joinFieldsQuery = joinFieldsQuery; 454 | } 455 | 456 | public QueryBuilder getQueryBuilder() throws CommandException{ 457 | //QueryBuilders.constantScoreQuery(queryBuilder) 458 | 459 | return (getInternalQuery() == null)? QueryBuilders.matchAllQuery(): getInternalQuery() ; 460 | 461 | } 462 | 463 | } 464 | /* 465 | * JavaCC - OriginalChecksum=0784cd4733c591c3e29d4817135e748c (do not edit this 466 | * line) 467 | */ 468 | -------------------------------------------------------------------------------- /src/main/java/com/everdata/parser/JavaCharStream.java: -------------------------------------------------------------------------------- 1 | /* Generated By:JavaCC: Do not edit this line. JavaCharStream.java Version 5.0 */ 2 | /* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ 3 | package com.everdata.parser; 4 | 5 | /** 6 | * An implementation of interface CharStream, where the stream is assumed to 7 | * contain only ASCII characters (with java-like unicode escape processing). 8 | */ 9 | 10 | public 11 | class JavaCharStream 12 | { 13 | /** Whether parser is static. */ 14 | public static final boolean staticFlag = false; 15 | 16 | static final int hexval(char c) throws java.io.IOException { 17 | switch(c) 18 | { 19 | case '0' : 20 | return 0; 21 | case '1' : 22 | return 1; 23 | case '2' : 24 | return 2; 25 | case '3' : 26 | return 3; 27 | case '4' : 28 | return 4; 29 | case '5' : 30 | return 5; 31 | case '6' : 32 | return 6; 33 | case '7' : 34 | return 7; 35 | case '8' : 36 | return 8; 37 | case '9' : 38 | return 9; 39 | 40 | case 'a' : 41 | case 'A' : 42 | return 10; 43 | case 'b' : 44 | case 'B' : 45 | return 11; 46 | case 'c' : 47 | case 'C' : 48 | return 12; 49 | case 'd' : 50 | case 'D' : 51 | return 13; 52 | case 'e' : 53 | case 'E' : 54 | return 14; 55 | case 'f' : 56 | case 'F' : 57 | return 15; 58 | } 59 | 60 | throw new java.io.IOException(); // Should never come here 61 | } 62 | 63 | /** Position in buffer. */ 64 | public int bufpos = -1; 65 | int bufsize; 66 | int available; 67 | int tokenBegin; 68 | protected int bufline[]; 69 | protected int bufcolumn[]; 70 | 71 | protected int column = 0; 72 | protected int line = 1; 73 | 74 | protected boolean prevCharIsCR = false; 75 | protected boolean prevCharIsLF = false; 76 | 77 | protected java.io.Reader inputStream; 78 | 79 | protected char[] nextCharBuf; 80 | protected char[] buffer; 81 | protected int maxNextCharInd = 0; 82 | protected int nextCharInd = -1; 83 | protected int inBuf = 0; 84 | protected int tabSize = 8; 85 | 86 | protected void setTabSize(int i) { tabSize = i; } 87 | protected int getTabSize(int i) { return tabSize; } 88 | 89 | protected void ExpandBuff(boolean wrapAround) 90 | { 91 | char[] newbuffer = new char[bufsize + 2048]; 92 | int newbufline[] = new int[bufsize + 2048]; 93 | int newbufcolumn[] = new int[bufsize + 2048]; 94 | 95 | try 96 | { 97 | if (wrapAround) 98 | { 99 | System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); 100 | System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos); 101 | buffer = newbuffer; 102 | 103 | System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); 104 | System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos); 105 | bufline = newbufline; 106 | 107 | System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); 108 | System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos); 109 | bufcolumn = newbufcolumn; 110 | 111 | bufpos += (bufsize - tokenBegin); 112 | } 113 | else 114 | { 115 | System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); 116 | buffer = newbuffer; 117 | 118 | System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); 119 | bufline = newbufline; 120 | 121 | System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); 122 | bufcolumn = newbufcolumn; 123 | 124 | bufpos -= tokenBegin; 125 | } 126 | } 127 | catch (Throwable t) 128 | { 129 | throw new Error(t.getMessage()); 130 | } 131 | 132 | available = (bufsize += 2048); 133 | tokenBegin = 0; 134 | } 135 | 136 | protected void FillBuff() throws java.io.IOException 137 | { 138 | int i; 139 | if (maxNextCharInd == 4096) 140 | maxNextCharInd = nextCharInd = 0; 141 | 142 | try { 143 | if ((i = inputStream.read(nextCharBuf, maxNextCharInd, 144 | 4096 - maxNextCharInd)) == -1) 145 | { 146 | inputStream.close(); 147 | throw new java.io.IOException(); 148 | } 149 | else 150 | maxNextCharInd += i; 151 | return; 152 | } 153 | catch(java.io.IOException e) { 154 | if (bufpos != 0) 155 | { 156 | --bufpos; 157 | backup(0); 158 | } 159 | else 160 | { 161 | bufline[bufpos] = line; 162 | bufcolumn[bufpos] = column; 163 | } 164 | throw e; 165 | } 166 | } 167 | 168 | protected char ReadByte() throws java.io.IOException 169 | { 170 | if (++nextCharInd >= maxNextCharInd) 171 | FillBuff(); 172 | 173 | return nextCharBuf[nextCharInd]; 174 | } 175 | 176 | /** @return starting character for token. */ 177 | public char BeginToken() throws java.io.IOException 178 | { 179 | if (inBuf > 0) 180 | { 181 | --inBuf; 182 | 183 | if (++bufpos == bufsize) 184 | bufpos = 0; 185 | 186 | tokenBegin = bufpos; 187 | return buffer[bufpos]; 188 | } 189 | 190 | tokenBegin = 0; 191 | bufpos = -1; 192 | 193 | return readChar(); 194 | } 195 | 196 | protected void AdjustBuffSize() 197 | { 198 | if (available == bufsize) 199 | { 200 | if (tokenBegin > 2048) 201 | { 202 | bufpos = 0; 203 | available = tokenBegin; 204 | } 205 | else 206 | ExpandBuff(false); 207 | } 208 | else if (available > tokenBegin) 209 | available = bufsize; 210 | else if ((tokenBegin - available) < 2048) 211 | ExpandBuff(true); 212 | else 213 | available = tokenBegin; 214 | } 215 | 216 | protected void UpdateLineColumn(char c) 217 | { 218 | column++; 219 | 220 | if (prevCharIsLF) 221 | { 222 | prevCharIsLF = false; 223 | line += (column = 1); 224 | } 225 | else if (prevCharIsCR) 226 | { 227 | prevCharIsCR = false; 228 | if (c == '\n') 229 | { 230 | prevCharIsLF = true; 231 | } 232 | else 233 | line += (column = 1); 234 | } 235 | 236 | switch (c) 237 | { 238 | case '\r' : 239 | prevCharIsCR = true; 240 | break; 241 | case '\n' : 242 | prevCharIsLF = true; 243 | break; 244 | case '\t' : 245 | column--; 246 | column += (tabSize - (column % tabSize)); 247 | break; 248 | default : 249 | break; 250 | } 251 | 252 | bufline[bufpos] = line; 253 | bufcolumn[bufpos] = column; 254 | } 255 | 256 | /** Read a character. */ 257 | public char readChar() throws java.io.IOException 258 | { 259 | if (inBuf > 0) 260 | { 261 | --inBuf; 262 | 263 | if (++bufpos == bufsize) 264 | bufpos = 0; 265 | 266 | return buffer[bufpos]; 267 | } 268 | 269 | char c; 270 | 271 | if (++bufpos == available) 272 | AdjustBuffSize(); 273 | 274 | if ((buffer[bufpos] = c = ReadByte()) == '\\') 275 | { 276 | UpdateLineColumn(c); 277 | 278 | int backSlashCnt = 1; 279 | 280 | for (;;) // Read all the backslashes 281 | { 282 | if (++bufpos == available) 283 | AdjustBuffSize(); 284 | 285 | try 286 | { 287 | if ((buffer[bufpos] = c = ReadByte()) != '\\') 288 | { 289 | UpdateLineColumn(c); 290 | // found a non-backslash char. 291 | if ((c == 'u') && ((backSlashCnt & 1) == 1)) 292 | { 293 | if (--bufpos < 0) 294 | bufpos = bufsize - 1; 295 | 296 | break; 297 | } 298 | 299 | backup(backSlashCnt); 300 | return '\\'; 301 | } 302 | } 303 | catch(java.io.IOException e) 304 | { 305 | // We are returning one backslash so we should only backup (count-1) 306 | if (backSlashCnt > 1) 307 | backup(backSlashCnt-1); 308 | 309 | return '\\'; 310 | } 311 | 312 | UpdateLineColumn(c); 313 | backSlashCnt++; 314 | } 315 | 316 | // Here, we have seen an odd number of backslash's followed by a 'u' 317 | try 318 | { 319 | while ((c = ReadByte()) == 'u') 320 | ++column; 321 | 322 | buffer[bufpos] = c = (char)(hexval(c) << 12 | 323 | hexval(ReadByte()) << 8 | 324 | hexval(ReadByte()) << 4 | 325 | hexval(ReadByte())); 326 | 327 | column += 4; 328 | } 329 | catch(java.io.IOException e) 330 | { 331 | throw new Error("Invalid escape character at line " + line + 332 | " column " + column + "."); 333 | } 334 | 335 | if (backSlashCnt == 1) 336 | return c; 337 | else 338 | { 339 | backup(backSlashCnt - 1); 340 | return '\\'; 341 | } 342 | } 343 | else 344 | { 345 | UpdateLineColumn(c); 346 | return c; 347 | } 348 | } 349 | 350 | @Deprecated 351 | /** 352 | * @deprecated 353 | * @see #getEndColumn 354 | */ 355 | public int getColumn() { 356 | return bufcolumn[bufpos]; 357 | } 358 | 359 | @Deprecated 360 | /** 361 | * @deprecated 362 | * @see #getEndLine 363 | */ 364 | public int getLine() { 365 | return bufline[bufpos]; 366 | } 367 | 368 | /** Get end column. */ 369 | public int getEndColumn() { 370 | return bufcolumn[bufpos]; 371 | } 372 | 373 | /** Get end line. */ 374 | public int getEndLine() { 375 | return bufline[bufpos]; 376 | } 377 | 378 | /** @return column of token start */ 379 | public int getBeginColumn() { 380 | return bufcolumn[tokenBegin]; 381 | } 382 | 383 | /** @return line number of token start */ 384 | public int getBeginLine() { 385 | return bufline[tokenBegin]; 386 | } 387 | 388 | /** Retreat. */ 389 | public void backup(int amount) { 390 | 391 | inBuf += amount; 392 | if ((bufpos -= amount) < 0) 393 | bufpos += bufsize; 394 | } 395 | 396 | /** Constructor. */ 397 | public JavaCharStream(java.io.Reader dstream, 398 | int startline, int startcolumn, int buffersize) 399 | { 400 | inputStream = dstream; 401 | line = startline; 402 | column = startcolumn - 1; 403 | 404 | available = bufsize = buffersize; 405 | buffer = new char[buffersize]; 406 | bufline = new int[buffersize]; 407 | bufcolumn = new int[buffersize]; 408 | nextCharBuf = new char[4096]; 409 | } 410 | 411 | /** Constructor. */ 412 | public JavaCharStream(java.io.Reader dstream, 413 | int startline, int startcolumn) 414 | { 415 | this(dstream, startline, startcolumn, 4096); 416 | } 417 | 418 | /** Constructor. */ 419 | public JavaCharStream(java.io.Reader dstream) 420 | { 421 | this(dstream, 1, 1, 4096); 422 | } 423 | /** Reinitialise. */ 424 | public void ReInit(java.io.Reader dstream, 425 | int startline, int startcolumn, int buffersize) 426 | { 427 | inputStream = dstream; 428 | line = startline; 429 | column = startcolumn - 1; 430 | 431 | if (buffer == null || buffersize != buffer.length) 432 | { 433 | available = bufsize = buffersize; 434 | buffer = new char[buffersize]; 435 | bufline = new int[buffersize]; 436 | bufcolumn = new int[buffersize]; 437 | nextCharBuf = new char[4096]; 438 | } 439 | prevCharIsLF = prevCharIsCR = false; 440 | tokenBegin = inBuf = maxNextCharInd = 0; 441 | nextCharInd = bufpos = -1; 442 | } 443 | 444 | /** Reinitialise. */ 445 | public void ReInit(java.io.Reader dstream, 446 | int startline, int startcolumn) 447 | { 448 | ReInit(dstream, startline, startcolumn, 4096); 449 | } 450 | 451 | /** Reinitialise. */ 452 | public void ReInit(java.io.Reader dstream) 453 | { 454 | ReInit(dstream, 1, 1, 4096); 455 | } 456 | /** Constructor. */ 457 | public JavaCharStream(java.io.InputStream dstream, String encoding, int startline, 458 | int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException 459 | { 460 | this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); 461 | } 462 | 463 | /** Constructor. */ 464 | public JavaCharStream(java.io.InputStream dstream, int startline, 465 | int startcolumn, int buffersize) 466 | { 467 | this(new java.io.InputStreamReader(dstream), startline, startcolumn, 4096); 468 | } 469 | 470 | /** Constructor. */ 471 | public JavaCharStream(java.io.InputStream dstream, String encoding, int startline, 472 | int startcolumn) throws java.io.UnsupportedEncodingException 473 | { 474 | this(dstream, encoding, startline, startcolumn, 4096); 475 | } 476 | 477 | /** Constructor. */ 478 | public JavaCharStream(java.io.InputStream dstream, int startline, 479 | int startcolumn) 480 | { 481 | this(dstream, startline, startcolumn, 4096); 482 | } 483 | 484 | /** Constructor. */ 485 | public JavaCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException 486 | { 487 | this(dstream, encoding, 1, 1, 4096); 488 | } 489 | 490 | /** Constructor. */ 491 | public JavaCharStream(java.io.InputStream dstream) 492 | { 493 | this(dstream, 1, 1, 4096); 494 | } 495 | 496 | /** Reinitialise. */ 497 | public void ReInit(java.io.InputStream dstream, String encoding, int startline, 498 | int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException 499 | { 500 | ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); 501 | } 502 | 503 | /** Reinitialise. */ 504 | public void ReInit(java.io.InputStream dstream, int startline, 505 | int startcolumn, int buffersize) 506 | { 507 | ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); 508 | } 509 | /** Reinitialise. */ 510 | public void ReInit(java.io.InputStream dstream, String encoding, int startline, 511 | int startcolumn) throws java.io.UnsupportedEncodingException 512 | { 513 | ReInit(dstream, encoding, startline, startcolumn, 4096); 514 | } 515 | /** Reinitialise. */ 516 | public void ReInit(java.io.InputStream dstream, int startline, 517 | int startcolumn) 518 | { 519 | ReInit(dstream, startline, startcolumn, 4096); 520 | } 521 | /** Reinitialise. */ 522 | public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException 523 | { 524 | ReInit(dstream, encoding, 1, 1, 4096); 525 | } 526 | 527 | /** Reinitialise. */ 528 | public void ReInit(java.io.InputStream dstream) 529 | { 530 | ReInit(dstream, 1, 1, 4096); 531 | } 532 | 533 | /** @return token image as String */ 534 | public String GetImage() 535 | { 536 | if (bufpos >= tokenBegin) 537 | return new String(buffer, tokenBegin, bufpos - tokenBegin + 1); 538 | else 539 | return new String(buffer, tokenBegin, bufsize - tokenBegin) + 540 | new String(buffer, 0, bufpos + 1); 541 | } 542 | 543 | /** @return suffix */ 544 | public char[] GetSuffix(int len) 545 | { 546 | char[] ret = new char[len]; 547 | 548 | if ((bufpos + 1) >= len) 549 | System.arraycopy(buffer, bufpos - len + 1, ret, 0, len); 550 | else 551 | { 552 | System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0, 553 | len - bufpos - 1); 554 | System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1); 555 | } 556 | 557 | return ret; 558 | } 559 | 560 | /** Set buffers back to null when finished. */ 561 | public void Done() 562 | { 563 | nextCharBuf = null; 564 | buffer = null; 565 | bufline = null; 566 | bufcolumn = null; 567 | } 568 | 569 | /** 570 | * Method to adjust line and column numbers for the start of a token. 571 | */ 572 | public void adjustBeginLineColumn(int newLine, int newCol) 573 | { 574 | int start = tokenBegin; 575 | int len; 576 | 577 | if (bufpos >= tokenBegin) 578 | { 579 | len = bufpos - tokenBegin + inBuf + 1; 580 | } 581 | else 582 | { 583 | len = bufsize - tokenBegin + bufpos + 1 + inBuf; 584 | } 585 | 586 | int i = 0, j = 0, k = 0; 587 | int nextColDiff = 0, columnDiff = 0; 588 | 589 | while (i < len && bufline[j = start % bufsize] == bufline[k = ++start % bufsize]) 590 | { 591 | bufline[j] = newLine; 592 | nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j]; 593 | bufcolumn[j] = newCol + columnDiff; 594 | columnDiff = nextColDiff; 595 | i++; 596 | } 597 | 598 | if (i < len) 599 | { 600 | bufline[j] = newLine++; 601 | bufcolumn[j] = newCol + columnDiff; 602 | 603 | while (i++ < len) 604 | { 605 | if (bufline[j = start % bufsize] != bufline[++start % bufsize]) 606 | bufline[j] = newLine++; 607 | else 608 | bufline[j] = newLine; 609 | } 610 | } 611 | 612 | line = bufline[j]; 613 | column = bufcolumn[j]; 614 | } 615 | 616 | } 617 | /* JavaCC - OriginalChecksum=2f4af09614f2d08f359b8abe379e5d76 (do not edit this line) */ 618 | --------------------------------------------------------------------------------