diff --git a/eagle-core/eagle-query/eagle-antlr/pom.xml b/eagle-core/eagle-query/eagle-antlr/pom.xml index e0648a4164..023cfd2c97 100644 --- a/eagle-core/eagle-query/eagle-antlr/pom.xml +++ b/eagle-core/eagle-query/eagle-antlr/pom.xml @@ -40,4 +40,18 @@ commons-lang + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + true + true + org/apache/eagle/query/antlr/generated/* + + + + diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/antlr/generated/EagleFilterLexer.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/antlr/generated/EagleFilterLexer.java index 0899732beb..ff73131531 100755 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/antlr/generated/EagleFilterLexer.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/antlr/generated/EagleFilterLexer.java @@ -34,26 +34,26 @@ public class EagleFilterLexer extends Lexer { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - WHITESPACE=1, OP=2, AND=3, OR=4, ID=5, VALUE=6, SINGLE_VALUE=7, EXPR=8, - NUMBER=9, NULL=10, SET=11, DOUBLEQUOTED_STRING=12, LPAREN=13, RPAREN=14, + WHITESPACE=1, OP=2, AND=3, OR=4, ID=5, VALUE=6, SINGLE_VALUE=7, EXPR=8, + NUMBER=9, NULL=10, SET=11, DOUBLEQUOTED_STRING=12, LPAREN=13, RPAREN=14, LBRACE=15, RBRACE=16; public static String[] modeNames = { "DEFAULT_MODE" }; public static final String[] ruleNames = { - "WHITESPACE", "OP", "AND", "OR", "ID", "VALUE", "SINGLE_VALUE", "EXPR", - "NUMBER", "NULL", "SET", "DOUBLEQUOTED_STRING", "UNSIGN_INT", "STRING", + "WHITESPACE", "OP", "AND", "OR", "ID", "VALUE", "SINGLE_VALUE", "EXPR", + "NUMBER", "NULL", "SET", "DOUBLEQUOTED_STRING", "UNSIGN_INT", "STRING", "LPAREN", "RPAREN", "LBRACE", "RBRACE" }; private static final String[] _LITERAL_NAMES = { - null, null, null, null, null, null, null, null, null, null, null, null, + null, null, null, null, null, null, null, null, null, null, null, null, null, "'('", "')'", "'{'", "'}'" }; private static final String[] _SYMBOLIC_NAMES = { - null, "WHITESPACE", "OP", "AND", "OR", "ID", "VALUE", "SINGLE_VALUE", - "EXPR", "NUMBER", "NULL", "SET", "DOUBLEQUOTED_STRING", "LPAREN", "RPAREN", + null, "WHITESPACE", "OP", "AND", "OR", "ID", "VALUE", "SINGLE_VALUE", + "EXPR", "NUMBER", "NULL", "SET", "DOUBLEQUOTED_STRING", "LPAREN", "RPAREN", "LBRACE", "RBRACE" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/antlr/generated/EagleFilterParser.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/antlr/generated/EagleFilterParser.java index 3016bd758e..707173e544 100755 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/antlr/generated/EagleFilterParser.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/antlr/generated/EagleFilterParser.java @@ -31,8 +31,8 @@ public class EagleFilterParser extends Parser { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - WHITESPACE=1, OP=2, AND=3, OR=4, ID=5, VALUE=6, SINGLE_VALUE=7, EXPR=8, - NUMBER=9, NULL=10, SET=11, DOUBLEQUOTED_STRING=12, LPAREN=13, RPAREN=14, + WHITESPACE=1, OP=2, AND=3, OR=4, ID=5, VALUE=6, SINGLE_VALUE=7, EXPR=8, + NUMBER=9, NULL=10, SET=11, DOUBLEQUOTED_STRING=12, LPAREN=13, RPAREN=14, LBRACE=15, RBRACE=16; public static final int RULE_filter = 0, RULE_combine = 1, RULE_equation = 2; @@ -41,12 +41,12 @@ public class EagleFilterParser extends Parser { }; private static final String[] _LITERAL_NAMES = { - null, null, null, null, null, null, null, null, null, null, null, null, + null, null, null, null, null, null, null, null, null, null, null, null, null, "'('", "')'", "'{'", "'}'" }; private static final String[] _SYMBOLIC_NAMES = { - null, "WHITESPACE", "OP", "AND", "OR", "ID", "VALUE", "SINGLE_VALUE", - "EXPR", "NUMBER", "NULL", "SET", "DOUBLEQUOTED_STRING", "LPAREN", "RPAREN", + null, "WHITESPACE", "OP", "AND", "OR", "ID", "VALUE", "SINGLE_VALUE", + "EXPR", "NUMBER", "NULL", "SET", "DOUBLEQUOTED_STRING", "LPAREN", "RPAREN", "LBRACE", "RBRACE" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -241,7 +241,7 @@ private CombineContext combine(int _p) throws RecognitionException { } break; } - } + } } setState(27); _errHandler.sync(this); diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ANDExpression.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ANDExpression.java index f575e0fc55..7136343856 100644 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ANDExpression.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ANDExpression.java @@ -20,14 +20,14 @@ import java.util.List; public class ANDExpression { - // TODO use Set data structure to dedupe for optimization? - private List atomicExpressionList = new ArrayList(); + // TODO use Set data structure to dedupe for optimization? + private List atomicExpressionList = new ArrayList(); - public List getAtomicExprList() { - return atomicExpressionList; - } + public List getAtomicExprList() { + return atomicExpressionList; + } - public void setAtomicExprList(List list) { - this.atomicExpressionList = list; - } + public void setAtomicExprList(List list) { + this.atomicExpressionList = list; + } } diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/AtomicExpression.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/AtomicExpression.java index f831edeb76..391755cca2 100755 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/AtomicExpression.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/AtomicExpression.java @@ -17,51 +17,57 @@ package org.apache.eagle.query.parser; public class AtomicExpression { - private String key; - private ComparisonOperator op; - private String value; - private TokenType keyType; - private TokenType valueType; + private String key; + private ComparisonOperator op; + private String value; + private TokenType keyType; + private TokenType valueType; - public String getKey() { - return key; - } - public void setKey(String key) { - this.key = key; - } - public ComparisonOperator getOp() { - return op; - } - public void setOp(ComparisonOperator op) { - this.op = op; - } - public String getValue() { - return value; - } - public void setValue(String value) { - this.value = value; - } - public String toString(){ - if(this.valueType == TokenType.STRING){ - return key + op + "\"" + value + "\""; - }else{ - return key + op + value; - } - } + public String getKey() { + return key; + } - public TokenType getKeyType() { - return keyType; - } + public void setKey(String key) { + this.key = key; + } - public void setKeyType(TokenType keyType) { - this.keyType = keyType; - } - - public TokenType getValueType() { - return valueType; - } + public ComparisonOperator getOp() { + return op; + } - public void setValueType(TokenType type) { - this.valueType = type; - } + public void setOp(ComparisonOperator op) { + this.op = op; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String toString() { + if (this.valueType == TokenType.STRING) { + return key + op + "\"" + value + "\""; + } else { + return key + op + value; + } + } + + public TokenType getKeyType() { + return keyType; + } + + public void setKeyType(TokenType keyType) { + this.keyType = keyType; + } + + public TokenType getValueType() { + return valueType; + } + + public void setValueType(TokenType type) { + this.valueType = type; + } } diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ComparisonOperator.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ComparisonOperator.java index 3a1be3757e..e5edfa69a7 100755 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ComparisonOperator.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ComparisonOperator.java @@ -17,39 +17,40 @@ package org.apache.eagle.query.parser; public enum ComparisonOperator { - EQUAL("="), - LIKE("=~"), - IN("IN"), - NOT_IN("NOT IN"), - LESS("<"), - LESS_OR_EQUAL("<="), - GREATER(">"), - GREATER_OR_EQUAL(">="), - NOT_EQUAL("!="), - NOT_LIKE("!=~"), - CONTAINS("CONTAINS"), - NOT_CONTAINS("NOT CONTAINS"), - IS("IS"), - IS_NOT("IS NOT"); + EQUAL("="), + LIKE("=~"), + IN("IN"), + NOT_IN("NOT IN"), + LESS("<"), + LESS_OR_EQUAL("<="), + GREATER(">"), + GREATER_OR_EQUAL(">="), + NOT_EQUAL("!="), + NOT_LIKE("!=~"), + CONTAINS("CONTAINS"), + NOT_CONTAINS("NOT CONTAINS"), + IS("IS"), + IS_NOT("IS NOT"); - private final String _op; - private ComparisonOperator(String op){ - _op = op; - } - - public String toString(){ - return _op; - } - - public static ComparisonOperator locateOperator(String op){ - op = op.replaceAll("\\s+"," "); - for(ComparisonOperator o : ComparisonOperator.values()){ - if(op.toUpperCase().equals(o._op)){ - return o; - } - } - throw new UnsupportedExpressionOperatorException(op); - } + private final String op; + + private ComparisonOperator(String op) { + this.op = op; + } + + public String toString() { + return op; + } + + public static ComparisonOperator locateOperator(String op) { + op = op.replaceAll("\\s+", " "); + for (ComparisonOperator o : ComparisonOperator.values()) { + if (op.toUpperCase().equals(o.op)) { + return o; + } + } + throw new UnsupportedExpressionOperatorException(op); + } } diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleANTLRErrorStrategy.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleANTLRErrorStrategy.java index e016ea9037..f0c68ef7f8 100644 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleANTLRErrorStrategy.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleANTLRErrorStrategy.java @@ -19,30 +19,31 @@ import org.antlr.v4.runtime.*; public class EagleANTLRErrorStrategy extends DefaultErrorStrategy { - /** Instead of recovering from exception {@code e}, re-throw it wrapped - * in a {@link org.antlr.v4.runtime.misc.ParseCancellationException} so it is not caught by the - * rule function catches. Use {@link Exception#getCause()} to get the - * original {@link org.antlr.v4.runtime.RecognitionException}. - */ - @Override - public void recover(Parser recognizer, RecognitionException e) { - for (ParserRuleContext context = recognizer.getContext(); context != null; context = context.getParent()) { - context.exception = e; - } - super.recover(recognizer,e); - } + /** + * Instead of recovering from exception {@code e}, re-throw it wrapped + * in a {@link org.antlr.v4.runtime.misc.ParseCancellationException} so it is not caught by the + * rule function catches. Use {@link Exception#getCause()} to get the + * original {@link org.antlr.v4.runtime.RecognitionException}. + */ + @Override + public void recover(Parser recognizer, RecognitionException e) { + for (ParserRuleContext context = recognizer.getContext(); context != null; context = context.getParent()) { + context.exception = e; + } + super.recover(recognizer, e); + } - /** Make sure we don't attempt to recover inline; if the parser - * successfully recovers, it won't throw an exception. - */ - @Override - public Token recoverInline(Parser recognizer) - throws RecognitionException - { - InputMismatchException e = new InputMismatchException(recognizer); - for (ParserRuleContext context = recognizer.getContext(); context != null; context = context.getParent()) { - context.exception = e; - } - return super.recoverInline(recognizer); - } + /** + * Make sure we don't attempt to recover inline; if the parser + * successfully recovers, it won't throw an exception. + */ + @Override + public Token recoverInline(Parser recognizer) + throws RecognitionException { + InputMismatchException e = new InputMismatchException(recognizer); + for (ParserRuleContext context = recognizer.getContext(); context != null; context = context.getParent()) { + context.exception = e; + } + return super.recoverInline(recognizer); + } } diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryFilterListenerImpl.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryFilterListenerImpl.java index a631e53c60..70057667db 100755 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryFilterListenerImpl.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryFilterListenerImpl.java @@ -16,12 +16,12 @@ */ package org.apache.eagle.query.parser; -import org.apache.eagle.query.antlr.generated.EagleFilterListener; -import org.apache.eagle.query.antlr.generated.EagleFilterParser; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ErrorNode; import org.antlr.v4.runtime.tree.TerminalNode; import org.apache.commons.lang.StringEscapeUtils; +import org.apache.eagle.query.antlr.generated.EagleFilterListener; +import org.apache.eagle.query.antlr.generated.EagleFilterParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -29,148 +29,165 @@ import java.util.Stack; import java.util.regex.Matcher; -public class EagleQueryFilterListenerImpl implements EagleFilterListener{ - private final static Logger LOG = LoggerFactory.getLogger(EagleQueryFilterListenerImpl.class); - private Stack _stack = new Stack(); - - public ORExpression result(){ - return _stack.pop(); - } - - public void enterEquation(EagleFilterParser.EquationContext ctx){ - } - - public void exitEquation(EagleFilterParser.EquationContext ctx){ - TerminalNode id = ctx.ID(); - TerminalNode op = ctx.OP(); - List values = ctx.VALUE(); - TerminalNode value = values.get(0); - - if(values.size() == 2){ - // value op value - id = values.get(0); - value = values.get(1); - } - - if(LOG.isDebugEnabled()) LOG.debug("ID:" + id.getText() + ", OP:" + op.getText() + ", VALUE:" + value); - - AtomicExpression kv = new AtomicExpression(); - kv.setKey(id.getText()); - kv.setOp(ComparisonOperator.locateOperator(op.getText())); - - try{ - kv.setValueType(TokenType.locate(value.getText())); - }catch (Exception ex){ - LOG.error("Failed to locate value type for: " + value.getText() + " due to exception: " + ex.getMessage(), ex); - } - - try{ - kv.setKeyType(TokenType.locate(id.getText())); - }catch (Exception ex){ - LOG.error("Failed to locate id type for: " + id.getText() + " due to exception: " + ex.getMessage(), ex); - } - -// if(id != null){ - kv.setKey(postProcessNode(id.getText(),kv.getKeyType())); -// } - -// if(value != null){ - kv.setValue(postProcessNode(value.getText(),kv.getValueType())); - // As to List value, it will escape in List parser but not here - if(kv.getValueType() != TokenType.LIST) kv.setValue(StringEscapeUtils.unescapeJava(kv.getValue())); -// } - - // push to stack - ORExpression orExpr = new ORExpression(); - ANDExpression andExpr = new ANDExpression(); - andExpr.getAtomicExprList().add(kv); - orExpr.getANDExprList().add(andExpr); - _stack.push(orExpr); - } - - private String postProcessNode(String text,TokenType type){ - int len = text.length(); - int start=0,end = len; - if(text.startsWith("\"")) start=1; - if(text.endsWith("\"")) end = len -1; - text = text.substring(start, end); - if(type == TokenType.EXP){ - Matcher matcher = TokenConstant.EXP_PATTERN.matcher(text); - if(matcher.find()){ - text = matcher.group(1); - } - text = text.replace(TokenConstant.ID_PREFIX,TokenConstant.WHITE_SPACE); - } - return text; - } - - public void enterCombine(EagleFilterParser.CombineContext ctx){ - - } - - public void exitCombine(EagleFilterParser.CombineContext ctx){ - int numChild = ctx.getChildCount(); - if(numChild == 1){ - if(LOG.isDebugEnabled()) LOG.debug("Only one child, skip ..."); - return; // does nothing for a combine which has only one equation - } - - if((ctx.LPAREN() != null) && (ctx.RPAREN() != null)){ - if(LOG.isDebugEnabled()) LOG.debug("LPAREN + RPAREN rule matched, skip ..."); - return; // does nothing for a combine which is within parenthesis - } - ORExpression orExprRight = _stack.pop(); - ORExpression orExprLeft = _stack.pop(); - TerminalNode node = ctx.AND(); - if(node != null){ - ORExpression newORExpr = new ORExpression(); - for(ANDExpression left : orExprLeft.getANDExprList()){ - for(ANDExpression right : orExprRight.getANDExprList()){ - ANDExpression tmp = new ANDExpression(); - tmp.getAtomicExprList().addAll(left.getAtomicExprList()); - tmp.getAtomicExprList().addAll(right.getAtomicExprList()); - newORExpr.getANDExprList().add(tmp); - } - } - _stack.push(newORExpr); - return; - } - - node = ctx.OR(); - if(node != null){ - ORExpression newORExpr = new ORExpression(); - for(ANDExpression andExpr : orExprLeft.getANDExprList()){ - newORExpr.getANDExprList().add(andExpr); - } - for(ANDExpression andExpr : orExprRight.getANDExprList()){ - newORExpr.getANDExprList().add(andExpr); - } - _stack.push(newORExpr); - return; - } - LOG.warn("Should never come here!"); - } - - public void enterFilter(EagleFilterParser.FilterContext ctx){ - - } - - public void exitFilter(EagleFilterParser.FilterContext ctx){ - // print all relations (KeyValueFilter AND KeyValueFilter) OR (KeyValueFilter AND KeyValueFilter) OR (KeyValueFilter AND KeyValueFilter)" - ORExpression orExpr = _stack.peek(); - if(LOG.isDebugEnabled()) LOG.debug(orExpr.toString()); - } - - public void visitTerminal(TerminalNode node){ - - } - public void visitErrorNode(ErrorNode node){ - - } - public void enterEveryRule(ParserRuleContext ctx){ +public class EagleQueryFilterListenerImpl implements EagleFilterListener { + private static final Logger LOG = LoggerFactory.getLogger(EagleQueryFilterListenerImpl.class); + private Stack stack = new Stack(); + + public ORExpression result() { + return stack.pop(); } - - public void exitEveryRule(ParserRuleContext ctx){ - + + public void enterEquation(EagleFilterParser.EquationContext ctx) { + } + + public void exitEquation(EagleFilterParser.EquationContext ctx) { + TerminalNode id = ctx.ID(); + TerminalNode op = ctx.OP(); + List values = ctx.VALUE(); + TerminalNode value = values.get(0); + + if (values.size() == 2) { + // value op value + id = values.get(0); + value = values.get(1); + } + + if (LOG.isDebugEnabled()) { + LOG.debug("ID:" + id.getText() + ", OP:" + op.getText() + ", VALUE:" + value); + } + + AtomicExpression kv = new AtomicExpression(); + kv.setKey(id.getText()); + kv.setOp(ComparisonOperator.locateOperator(op.getText())); + + try { + kv.setValueType(TokenType.locate(value.getText())); + } catch (Exception ex) { + LOG.error("Failed to locate value type for: " + value.getText() + " due to exception: " + ex.getMessage(), ex); + } + + try { + kv.setKeyType(TokenType.locate(id.getText())); + } catch (Exception ex) { + LOG.error("Failed to locate id type for: " + id.getText() + " due to exception: " + ex.getMessage(), ex); + } + + //if(id != null){ + kv.setKey(postProcessNode(id.getText(), kv.getKeyType())); + //} + + //if(value != null){ + kv.setValue(postProcessNode(value.getText(), kv.getValueType())); + // As to List value, it will escape in List parser but not here + if (kv.getValueType() != TokenType.LIST) { + kv.setValue(StringEscapeUtils.unescapeJava(kv.getValue())); + } + //} + + // push to stack + ORExpression orExpr = new ORExpression(); + ANDExpression andExpr = new ANDExpression(); + andExpr.getAtomicExprList().add(kv); + orExpr.getANDExprList().add(andExpr); + stack.push(orExpr); + } + + private String postProcessNode(String text, TokenType type) { + int len = text.length(); + int start = 0; + int end = len; + if (text.startsWith("\"")) { + start = 1; + } + if (text.endsWith("\"")) { + end = len - 1; + } + text = text.substring(start, end); + if (type == TokenType.EXP) { + Matcher matcher = TokenConstant.EXP_PATTERN.matcher(text); + if (matcher.find()) { + text = matcher.group(1); + } + text = text.replace(TokenConstant.ID_PREFIX, TokenConstant.WHITE_SPACE); + } + return text; + } + + public void enterCombine(EagleFilterParser.CombineContext ctx) { + + } + + public void exitCombine(EagleFilterParser.CombineContext ctx) { + int numChild = ctx.getChildCount(); + if (numChild == 1) { + if (LOG.isDebugEnabled()) { + LOG.debug("Only one child, skip ..."); + } + return; // does nothing for a combine which has only one equation + } + + if ((ctx.LPAREN() != null) && (ctx.RPAREN() != null)) { + if (LOG.isDebugEnabled()) { + LOG.debug("lparen + RPAREN rule matched, skip ..."); + } + return; // does nothing for a combine which is within parenthesis + } + ORExpression orExprRight = stack.pop(); + ORExpression orExprLeft = stack.pop(); + TerminalNode node = ctx.AND(); + if (node != null) { + ORExpression newORExpr = new ORExpression(); + for (ANDExpression left : orExprLeft.getANDExprList()) { + for (ANDExpression right : orExprRight.getANDExprList()) { + ANDExpression tmp = new ANDExpression(); + tmp.getAtomicExprList().addAll(left.getAtomicExprList()); + tmp.getAtomicExprList().addAll(right.getAtomicExprList()); + newORExpr.getANDExprList().add(tmp); + } + } + stack.push(newORExpr); + return; + } + + node = ctx.OR(); + if (node != null) { + ORExpression newORExpr = new ORExpression(); + for (ANDExpression andExpr : orExprLeft.getANDExprList()) { + newORExpr.getANDExprList().add(andExpr); + } + for (ANDExpression andExpr : orExprRight.getANDExprList()) { + newORExpr.getANDExprList().add(andExpr); + } + stack.push(newORExpr); + return; + } + LOG.warn("Should never come here!"); + } + + public void enterFilter(EagleFilterParser.FilterContext ctx) { + + } + + public void exitFilter(EagleFilterParser.FilterContext ctx) { + // print all relations (KeyValueFilter AND KeyValueFilter) OR (KeyValueFilter AND KeyValueFilter) OR (KeyValueFilter AND KeyValueFilter)" + ORExpression orExpr = stack.peek(); + if (LOG.isDebugEnabled()) { + LOG.debug(orExpr.toString()); + } + } + + public void visitTerminal(TerminalNode node) { + + } + + public void visitErrorNode(ErrorNode node) { + + } + + public void enterEveryRule(ParserRuleContext ctx) { + } + + public void exitEveryRule(ParserRuleContext ctx) { + } } diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryParseException.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryParseException.java index 48fc694118..c8321ca66f 100755 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryParseException.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryParseException.java @@ -16,14 +16,14 @@ */ package org.apache.eagle.query.parser; -public class EagleQueryParseException extends Exception{ - private static final long serialVersionUID = -8440811651318471641L; +public class EagleQueryParseException extends Exception { + private static final long serialVersionUID = -8440811651318471641L; - public EagleQueryParseException(String message){ - super(message); - } + public EagleQueryParseException(String message) { + super(message); + } - public EagleQueryParseException(String message, Throwable cause) { - super(message, cause); - } + public EagleQueryParseException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryParser.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryParser.java index f2315b27db..b5698805d3 100755 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryParser.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/EagleQueryParser.java @@ -16,39 +16,40 @@ */ package org.apache.eagle.query.parser; -import org.apache.eagle.query.antlr.generated.EagleFilterLexer; -import org.apache.eagle.query.antlr.generated.EagleFilterParser; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.CommonTokenStream; +import org.apache.eagle.query.antlr.generated.EagleFilterLexer; +import org.apache.eagle.query.antlr.generated.EagleFilterParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class EagleQueryParser { - private static final Logger LOG = LoggerFactory.getLogger(EagleQueryParser.class); - private String _query; - public EagleQueryParser(String query){ - _query = query; - } + private static final Logger LOG = LoggerFactory.getLogger(EagleQueryParser.class); + private String query; + + public EagleQueryParser(String query) { + this.query = query; + } - public ORExpression parse() throws EagleQueryParseException{ - try{ - EagleFilterLexer lexer = new EagleFilterLexer(new ANTLRInputStream(_query)); - CommonTokenStream tokens = new CommonTokenStream(lexer); - tokens.fill(); - EagleFilterParser p = new EagleFilterParser(tokens); - p.setErrorHandler(new EagleANTLRErrorStrategy()); - p.setBuildParseTree(true); - EagleQueryFilterListenerImpl listener = new EagleQueryFilterListenerImpl(); - p.addParseListener(listener); - EagleFilterParser.FilterContext fc = p.filter(); - if(fc.exception != null){ - LOG.error("Can not successfully parse the query:" + _query, fc.exception); - throw fc.exception; - } - return listener.result(); - }catch(Exception ex){ - LOG.error("Can not successfully parse the query:", ex); - throw new EagleQueryParseException("can not successfully parse the query:" + _query); - } - } + public ORExpression parse() throws EagleQueryParseException { + try { + EagleFilterLexer lexer = new EagleFilterLexer(new ANTLRInputStream(query)); + CommonTokenStream tokens = new CommonTokenStream(lexer); + tokens.fill(); + EagleFilterParser p = new EagleFilterParser(tokens); + p.setErrorHandler(new EagleANTLRErrorStrategy()); + p.setBuildParseTree(true); + EagleQueryFilterListenerImpl listener = new EagleQueryFilterListenerImpl(); + p.addParseListener(listener); + EagleFilterParser.FilterContext fc = p.filter(); + if (fc.exception != null) { + LOG.error("Can not successfully parse the query:" + query, fc.exception); + throw fc.exception; + } + return listener.result(); + } catch (Exception ex) { + LOG.error("Can not successfully parse the query:", ex); + throw new EagleQueryParseException("can not successfully parse the query:" + query); + } + } } diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/LogicalOperator.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/LogicalOperator.java index 374186e59f..8c95991da8 100644 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/LogicalOperator.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/LogicalOperator.java @@ -17,7 +17,7 @@ package org.apache.eagle.query.parser; public enum LogicalOperator { - AND, - OR, - NOT + AND, + OR, + NOT } diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ORExpression.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ORExpression.java index 3a43a6d4ed..2c414f4d63 100644 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ORExpression.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/ORExpression.java @@ -20,41 +20,41 @@ import java.util.List; public class ORExpression { - private List andExprList = new ArrayList(); + private List andExprList = new ArrayList(); - public List getANDExprList() { - return andExprList; - } + public List getANDExprList() { + return andExprList; + } - public void setANDExprList(List list) { - this.andExprList = list; - } - - public String toString(){ - StringBuffer sb = new StringBuffer(); - boolean first = true; - for(ANDExpression andRel : andExprList){ - if(first) - first = false; - else{ - sb.append(" "); - sb.append(LogicalOperator.OR); - sb.append(" "); - } - sb.append("("); - boolean firstAND = true; - for(AtomicExpression kv : andRel.getAtomicExprList()){ - if(firstAND) - firstAND = false; - else{ - sb.append(" "); - sb.append(LogicalOperator.AND); - sb.append(" "); - } - sb.append(kv); - } - sb.append(")"); - } - return sb.toString(); - } + public void setANDExprList(List list) { + this.andExprList = list; + } + + public String toString() { + StringBuffer sb = new StringBuffer(); + boolean first = true; + for (ANDExpression andRel : andExprList) { + if (first) { + first = false; + } else { + sb.append(" "); + sb.append(LogicalOperator.OR); + sb.append(" "); + } + sb.append("("); + boolean firstAND = true; + for (AtomicExpression kv : andRel.getAtomicExprList()) { + if (firstAND) { + firstAND = false; + } else { + sb.append(" "); + sb.append(LogicalOperator.AND); + sb.append(" "); + } + sb.append(kv); + } + sb.append(")"); + } + return sb.toString(); + } } diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/TokenConstant.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/TokenConstant.java index f93a63e6e0..010cd37144 100755 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/TokenConstant.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/TokenConstant.java @@ -20,31 +20,33 @@ import java.util.regex.Pattern; public final class TokenConstant { - public final static Pattern EXP_PATTERN= Pattern.compile("^EXP\\{(.+)\\}(\\s+AS\\s+)?\\s*(.+)?\\s*$",Pattern.CASE_INSENSITIVE); - public final static Pattern STRING_PATTERN= Pattern.compile("^(\"(.*?\n)*.*\")$"); - public final static Pattern ARRAY_PATTERN= Pattern.compile("^(\\(.*\\))$"); - public final static Pattern NUMBER_PATTERN= Pattern.compile("^((-|\\+)?\\s*[0-9]+(\\.[0-9]+)?)$"); - public final static Pattern NULL_PATTERN= Pattern.compile("^(NULL|null)$"); - public final static Pattern ID_PATTERN= Pattern.compile("^@(.+)$"); + public static final Pattern EXP_PATTERN = Pattern.compile("^EXP\\{(.+)\\}(\\s+AS\\s+)?\\s*(.+)?\\s*$", Pattern.CASE_INSENSITIVE); + public static final Pattern STRING_PATTERN = Pattern.compile("^(\"(.*?\n)*.*\")$"); + public static final Pattern ARRAY_PATTERN = Pattern.compile("^(\\(.*\\))$"); + public static final Pattern NUMBER_PATTERN = Pattern.compile("^((-|\\+)?\\s*[0-9]+(\\.[0-9]+)?)$"); + public static final Pattern NULL_PATTERN = Pattern.compile("^(NULL|null)$"); + public static final Pattern ID_PATTERN = Pattern.compile("^@(.+)$"); - public final static String ID_PREFIX = "@"; - public final static String WHITE_SPACE = ""; + public static final String ID_PREFIX = "@"; + public static final String WHITE_SPACE = ""; - public static boolean isExpression(String query){ - if(query == null) return false; + public static boolean isExpression(String query) { + if (query == null) { + return false; + } Matcher matcher = EXP_PATTERN.matcher(query); return matcher.matches(); } /** - * EXP{ expression } AS alias => expression + * EXP{ expression } AS alias => expression. * * @param expression * @return */ - public static String parseExpressionContent(String expression){ + public static String parseExpressionContent(String expression) { Matcher matcher = EXP_PATTERN.matcher(expression); - if(matcher.find()){ + if (matcher.find()) { expression = matcher.group(1); } return expression; diff --git a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/UnsupportedExpressionOperatorException.java b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/UnsupportedExpressionOperatorException.java index 40997c90d0..de30b8b870 100644 --- a/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/UnsupportedExpressionOperatorException.java +++ b/eagle-core/eagle-query/eagle-antlr/src/main/java/org/apache/eagle/query/parser/UnsupportedExpressionOperatorException.java @@ -16,10 +16,10 @@ */ package org.apache.eagle.query.parser; -public class UnsupportedExpressionOperatorException extends RuntimeException{ - private static final long serialVersionUID = 565210592983703093L; +public class UnsupportedExpressionOperatorException extends RuntimeException { + private static final long serialVersionUID = 565210592983703093L; - public UnsupportedExpressionOperatorException(String message){ - super(message); - } + public UnsupportedExpressionOperatorException(String message) { + super(message); + } } diff --git a/eagle-core/eagle-query/eagle-antlr/src/test/java/org/apache/eagle/query/parser/test/TestEagleQueryParser.java b/eagle-core/eagle-query/eagle-antlr/src/test/java/org/apache/eagle/query/parser/test/TestEagleQueryParser.java index 2f5b47b624..7eff01fcc9 100755 --- a/eagle-core/eagle-query/eagle-antlr/src/test/java/org/apache/eagle/query/parser/test/TestEagleQueryParser.java +++ b/eagle-core/eagle-query/eagle-antlr/src/test/java/org/apache/eagle/query/parser/test/TestEagleQueryParser.java @@ -23,810 +23,813 @@ import org.slf4j.LoggerFactory; public class TestEagleQueryParser { - - private static final Logger LOG = LoggerFactory.getLogger(TestEagleQueryParser.class); - - @Test - public void testSingleExpression(){ - String query = "@cluster=\"a\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@cluster", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("a", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - LOG.info(or.toString()); - Assert.assertEquals("(@cluster=\"a\")", or.toString()); - } - - @Test - public void testLessThanExpression(){ - String query = "@field1<\"1\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@field1", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("<", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - LOG.info(or.toString()); - Assert.assertEquals("(@field1<\"1\")", or.toString()); - } - - @Test - public void testLessOrEqualExpression(){ - String query = "@field1<=\"1\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@field1", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("<=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - LOG.info(or.toString()); - Assert.assertEquals("(@field1<=\"1\")", or.toString()); - } - - @Test - public void testGreaterThanExpression(){ - String query = "@field1>\"1\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@field1", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals(">", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - LOG.info(or.toString()); - Assert.assertEquals("(@field1>\"1\")", or.toString()); - } - - @Test - public void testGreaterOrEqualExpression(){ - String query = "@field1>=\"1\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@field1", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals(">=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - LOG.info(or.toString()); - Assert.assertEquals("(@field1>=\"1\")", or.toString()); - } - - @Test - public void testMultipleANDExpression(){ - String query = "@cluster=\"abc\" AND @host=\"dc123.xyz.com\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(2, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@cluster", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("abc", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals("@host", or.getANDExprList().get(0).getAtomicExprList().get(1).getKey()); - Assert.assertEquals("dc123.xyz.com", or.getANDExprList().get(0).getAtomicExprList().get(1).getValue()); - Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(1).getValueType()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(1).getOp().toString()); - LOG.info(or.toString()); - Assert.assertEquals("(@cluster=\"abc\" AND @host=\"dc123.xyz.com\")", or.toString()); - - query = "@datacenter=\"dc1\" AND @cluster=\"abc\" AND @host=\"dc123.xyz.com\" "; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(3, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@datacenter", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("dc1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals("@cluster", or.getANDExprList().get(0).getAtomicExprList().get(1).getKey()); - Assert.assertEquals("abc", or.getANDExprList().get(0).getAtomicExprList().get(1).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(1).getOp().toString()); - Assert.assertEquals("@host", or.getANDExprList().get(0).getAtomicExprList().get(2).getKey()); - Assert.assertEquals("dc123.xyz.com", or.getANDExprList().get(0).getAtomicExprList().get(2).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(2).getOp().toString()); - LOG.info(or.toString()); - Assert.assertEquals("(@datacenter=\"dc1\" AND @cluster=\"abc\" AND @host=\"dc123.xyz.com\")", or.toString()); - } - - @Test - public void testMultipleORExpression(){ - String query = "@cluster=\"abc\" OR @host=\"dc123.xyz.com\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==2); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals(1, or.getANDExprList().get(1).getAtomicExprList().size()); - Assert.assertEquals("@cluster", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("abc", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals("@host", or.getANDExprList().get(1).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("dc123.xyz.com", or.getANDExprList().get(1).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(1).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals("(@cluster=\"abc\") OR (@host=\"dc123.xyz.com\")", or.toString()); - - query = "@datacenter=\"dc1\" OR @cluster=\"abc\" OR @host=\"dc123.xyz.com\""; - parser = new EagleQueryParser(query); - or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } Assert.assertTrue(or.getANDExprList().size()==3); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals(1, or.getANDExprList().get(1).getAtomicExprList().size()); - Assert.assertEquals(1, or.getANDExprList().get(2).getAtomicExprList().size()); - Assert.assertEquals("@datacenter", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("dc1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals("@cluster", or.getANDExprList().get(1).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("abc", or.getANDExprList().get(1).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(1).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals("@host", or.getANDExprList().get(2).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("dc123.xyz.com", or.getANDExprList().get(2).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(2).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals("(@datacenter=\"dc1\") OR (@cluster=\"abc\") OR (@host=\"dc123.xyz.com\")", or.toString()); - } - - @Test - public void testANDORCombination(){ - String query = "@cluster=\"abc\" OR @host=\"dc123.xyz.com\" AND @datacenter=\"dc1\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } LOG.info(or.toString()); - Assert.assertEquals("(@cluster=\"abc\") OR (@host=\"dc123.xyz.com\" AND @datacenter=\"dc1\")", or.toString()); - - query = "(@cluster=\"abc\" AND @host=\"dc123.xyz.com\") AND @datacenter=\"dc1\""; - parser = new EagleQueryParser(query); - or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } LOG.info(or.toString()); - Assert.assertEquals("(@cluster=\"abc\" AND @host=\"dc123.xyz.com\" AND @datacenter=\"dc1\")", or.toString()); - - query = "(@cluster=\"abc\" OR @host=\"dc123.xyz.com\") AND @datacenter=\"dc1\""; - parser = new EagleQueryParser(query); - or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - LOG.info(or.toString()); - Assert.assertEquals("(@cluster=\"abc\" AND @datacenter=\"dc1\") OR (@host=\"dc123.xyz.com\" AND @datacenter=\"dc1\")", or.toString()); - - query = "(@cluster=\"abc\" OR @host=\"dc123.xyz.com\") AND (@datacenter=\"dc1\" OR @cluster=\"bcd\")"; - parser = new EagleQueryParser(query); - or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - LOG.info(or.toString()); - Assert.assertEquals("(@cluster=\"abc\" AND @datacenter=\"dc1\") OR (@cluster=\"abc\" AND @cluster=\"bcd\") OR (@host=\"dc123.xyz.com\" AND @datacenter=\"dc1\") OR (@host=\"dc123.xyz.com\" AND @cluster=\"bcd\")", or.toString()); - - query = "(@cluster=\"abc\" OR @host=\"dc123.xyz.com\") AND (@datacenter=\"dc1\" AND @cluster=\"bcd\")"; - parser = new EagleQueryParser(query); - or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - LOG.info(or.toString()); - Assert.assertEquals("(@cluster=\"abc\" AND @datacenter=\"dc1\" AND @cluster=\"bcd\") OR (@host=\"dc123.xyz.com\" AND @datacenter=\"dc1\" AND @cluster=\"bcd\")", or.toString()); - } - - @Test - public void testNegativeCase(){ - String query = "@cluster = \"a\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - LOG.info(or.toString()); - Assert.assertEquals("(@cluster=\"a\")", or.toString()); - - query = "@cluster = a\""; - parser = new EagleQueryParser(query); - or = null; - try{ - or = parser.parse(); - }catch(Exception ex){ - LOG.error("Can not successfully parse the query:" + query, ex); - } - Assert.assertTrue(or == null); - - query = "@cluster = \"\"a\""; - parser = new EagleQueryParser(query); - or = null; - try{ - or = parser.parse(); - }catch(Exception ex){ - LOG.error("Can not successfully parse the query:" + query, ex); - } - Assert.assertNotNull(or); - - query = "@cluster=\"cluster1\" AND @datacenter=\"dc1\" AND @remediationID=8888\" AND @remediationStatus=\"status\""; - parser = new EagleQueryParser(query); - or = null; - try{ - or = parser.parse(); - }catch(Exception ex){ - LOG.error("Can not successfully parse the query:" + query, ex); - } - Assert.assertTrue(or == null); - } - - @Test - public void testSimpleWildcardMatchQuery(){ - String expected = "-[]/{}()*+?.\\^$|"; - String query = "@user=\"-[]/{}()*+?.\\\\^$|\""; - System.out.println(query); - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@user", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals(expected, or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - LOG.info(or.toString()); - Assert.assertEquals("(@user=\""+expected+"\")", or.toString()); - } - - @Test - public void testNumberQuery() { - String query = "@field1 >= -1.234"; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@field1", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals(-1.234, Double.parseDouble(or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()), 0.0001); - Assert.assertEquals(">=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.NUMBER, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - } - - @Test - public void testContainQuery() { - String query = "@name contains \"jame\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("jame", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("CONTAINS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - } - - @Test - public void testNotContainQuery() { - String query = "@name not contains \"jame\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("jame", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("NOT CONTAINS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name NOT CONTAINS \"jame\""; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("jame", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("NOT CONTAINS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name NOT CONTAINS \"jame\""; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("jame", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("NOT CONTAINS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - } - - @Test - public void testNullQuery() { - String query = "@name is null"; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("null", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("IS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name IS NULL"; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("NULL", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("IS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name is not null"; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("null", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("IS NOT", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name is not NULL"; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("NULL", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("IS NOT", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name = NULL"; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("NULL", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name != NULL"; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("NULL", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("!=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - } - - @Test - public void testIsOrIsNotQuery(){ - String query = "@name is \"james\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("james", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("IS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name is not \"james\""; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("james", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("IS NOT", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name is 1.234"; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("1.234", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("IS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.NUMBER, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name is not 1.234"; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("1.234", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("IS NOT", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.NUMBER, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - } - - @Test - public void testINListQuery() { - String query = "@name in (\"jame\",\"lebron\")"; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("(\"jame\",\"lebron\")", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("IN", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.LIST, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name NOT IN (1,\"lebron\")"; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("(1,\"lebron\")", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("NOT IN", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.LIST, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@name not in (1,\"lebron\")"; - parser = new EagleQueryParser(query); - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("(1,\"lebron\")", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("NOT IN", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.LIST, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - } - - @Test - public void testEmptyString() { - String query = "@name = \"\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertNotNull(or); - Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - } - - /** - * Will split tokens for escaped string - * - * "va\"lue" => "va\"lue" - * ("va\"lue","va,lue") => ["va\\\"lue","va,lue"] - * - */ - @Test - public void testEscapedQuotesString(){ - String query = "@value = \"value\\\"content, and another content\""; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertNotNull(or); - Assert.assertEquals("@value", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("value\"content, and another content", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@value in (\"value\\\"content, and another content\",\"others item\")"; - parser = new EagleQueryParser(query); - or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertNotNull(or); - Assert.assertEquals("@value", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("(\"value\\\"content, and another content\",\"others item\")", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("IN", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.LIST, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - - query = "@value in (\"value\\\"content, and another content\",\"others item\",-1.2345)"; - parser = new EagleQueryParser(query); - or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertNotNull(or); - Assert.assertEquals("@value", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("(\"value\\\"content, and another content\",\"others item\",-1.2345)", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("IN", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals(TokenType.LIST, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); - } - - @Test - public void testCompareAtomicExpression(){ - String query = "EXP{@mapProgress} < EXP{@reduceProgress}"; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); - Assert.assertEquals("mapProgress", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); - Assert.assertEquals("reduceProgress", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); - Assert.assertEquals("<", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); - Assert.assertEquals("(mapProgress3", and.getAtomicExprList().get(0).toString()); - Assert.assertEquals("@b>10", and.getAtomicExprList().get(1).toString()); - - AtomicExpression leftExpression = and.getAtomicExprList().get(0); - Assert.assertEquals("a + b", leftExpression.getKey()); - Assert.assertEquals(TokenType.EXP, leftExpression.getKeyType()); - Assert.assertEquals(">", leftExpression.getOp().toString()); - Assert.assertEquals("3", leftExpression.getValue()); - Assert.assertEquals(TokenType.NUMBER, leftExpression.getValueType()); - AtomicExpression rightExpression = and.getAtomicExprList().get(1); - Assert.assertEquals("@b", rightExpression.getKey()); - Assert.assertEquals(TokenType.ID, rightExpression.getKeyType()); - Assert.assertEquals(">", rightExpression.getOp().toString()); - Assert.assertEquals("10",rightExpression.getValue()); - Assert.assertEquals(TokenType.NUMBER, rightExpression.getValueType()); - } - - @Test - public void testComplexExpressionWithConditionAndNestedBrace(){ - String query = "(EXP{(@a + @b) / ((@c + @d)*(@e)/(@d))} > EXP{@c + @d}) AND (EXP{@e + @f} > EXP{@h + @i})"; - EagleQueryParser parser = new EagleQueryParser(query); - ORExpression or = null; - try{ - or = parser.parse(); - }catch(EagleQueryParseException ex){ - Assert.fail(ex.getMessage()); - } - Assert.assertTrue(or.getANDExprList().size()==1); - ANDExpression and = or.getANDExprList().get(0); - Assert.assertEquals(2, and.getAtomicExprList().size()); - Assert.assertEquals("(a + b) / ((c + d)*(e)/(d))>c + d", and.getAtomicExprList().get(0).toString()); - Assert.assertEquals("e + f>h + i", and.getAtomicExprList().get(1).toString()); - - AtomicExpression leftExpression = and.getAtomicExprList().get(0); - Assert.assertEquals("(a + b) / ((c + d)*(e)/(d))", leftExpression.getKey()); - Assert.assertEquals(">", leftExpression.getOp().toString()); - Assert.assertEquals("c + d", leftExpression.getValue()); - - AtomicExpression rightExpression = and.getAtomicExprList().get(1); - Assert.assertEquals("e + f", rightExpression.getKey()); - Assert.assertEquals(">", rightExpression.getOp().toString()); - Assert.assertEquals("h + i",rightExpression.getValue()); - } - - @Test - public void testNegativeExpressionCase(){ - String query = "(EXP{(@a + @b) / ((@c + @d)*(@e)/(@d))}} > EXP{@c + @d}) AND (EXP{@e + @f} > EXP{@h + @i})"; - EagleQueryParser parser = new EagleQueryParser(query); - boolean parseFail = true; - try{ - parser.parse(); - }catch(EagleQueryParseException ex){ - parseFail = false; - } - Assert.assertFalse(parseFail); - - query = "(EXP{{(@a + @b) / ((@c + @d)*(@e)/(@d))}} > EXP{@c + @d}) AND (EXP{@e + @f} > EXP{@h + @i})"; - parser = new EagleQueryParser(query); - parseFail = true; - try{ - parser.parse(); - }catch(EagleQueryParseException ex){ - parseFail = false; - } - Assert.assertFalse(parseFail); - - query = "(EXP{(@a + @b) / ((@c + @d)*(@e)/(@d))} > EXP{@c + @d}) AND EXP{})"; - parser = new EagleQueryParser(query); - parseFail = true; - try{ - parser.parse(); - }catch(EagleQueryParseException ex){ - parseFail = false; - } - Assert.assertFalse(parseFail); - - } - - @Test - public void testIsExpression(){ - Assert.assertTrue(TokenConstant.isExpression("EXP{ count }")); - Assert.assertFalse(TokenConstant.isExpression("count")); - } + + private static final Logger LOG = LoggerFactory.getLogger(TestEagleQueryParser.class); + + @Test + public void testSingleExpression() { + String query = "@cluster=\"a\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@cluster", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("a", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + LOG.info(or.toString()); + Assert.assertEquals("(@cluster=\"a\")", or.toString()); + } + + @Test + public void testLessThanExpression() { + String query = "@field1<\"1\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@field1", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("<", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + LOG.info(or.toString()); + Assert.assertEquals("(@field1<\"1\")", or.toString()); + } + + @Test + public void testLessOrEqualExpression() { + String query = "@field1<=\"1\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@field1", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("<=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + LOG.info(or.toString()); + Assert.assertEquals("(@field1<=\"1\")", or.toString()); + } + + @Test + public void testGreaterThanExpression() { + String query = "@field1>\"1\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@field1", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals(">", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + LOG.info(or.toString()); + Assert.assertEquals("(@field1>\"1\")", or.toString()); + } + + @Test + public void testGreaterOrEqualExpression() { + String query = "@field1>=\"1\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@field1", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals(">=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + LOG.info(or.toString()); + Assert.assertEquals("(@field1>=\"1\")", or.toString()); + } + + @Test + public void testMultipleANDExpression() { + String query = "@cluster=\"abc\" AND @host=\"dc123.xyz.com\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(2, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@cluster", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("abc", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals("@host", or.getANDExprList().get(0).getAtomicExprList().get(1).getKey()); + Assert.assertEquals("dc123.xyz.com", or.getANDExprList().get(0).getAtomicExprList().get(1).getValue()); + Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(1).getValueType()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(1).getOp().toString()); + LOG.info(or.toString()); + Assert.assertEquals("(@cluster=\"abc\" AND @host=\"dc123.xyz.com\")", or.toString()); + + query = "@datacenter=\"dc1\" AND @cluster=\"abc\" AND @host=\"dc123.xyz.com\" "; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(3, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@datacenter", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("dc1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals("@cluster", or.getANDExprList().get(0).getAtomicExprList().get(1).getKey()); + Assert.assertEquals("abc", or.getANDExprList().get(0).getAtomicExprList().get(1).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(1).getOp().toString()); + Assert.assertEquals("@host", or.getANDExprList().get(0).getAtomicExprList().get(2).getKey()); + Assert.assertEquals("dc123.xyz.com", or.getANDExprList().get(0).getAtomicExprList().get(2).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(2).getOp().toString()); + LOG.info(or.toString()); + Assert.assertEquals("(@datacenter=\"dc1\" AND @cluster=\"abc\" AND @host=\"dc123.xyz.com\")", or.toString()); + } + + @Test + public void testMultipleORExpression() { + String query = "@cluster=\"abc\" OR @host=\"dc123.xyz.com\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 2); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals(1, or.getANDExprList().get(1).getAtomicExprList().size()); + Assert.assertEquals("@cluster", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("abc", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals("@host", or.getANDExprList().get(1).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("dc123.xyz.com", or.getANDExprList().get(1).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(1).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals("(@cluster=\"abc\") OR (@host=\"dc123.xyz.com\")", or.toString()); + + query = "@datacenter=\"dc1\" OR @cluster=\"abc\" OR @host=\"dc123.xyz.com\""; + parser = new EagleQueryParser(query); + or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 3); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals(1, or.getANDExprList().get(1).getAtomicExprList().size()); + Assert.assertEquals(1, or.getANDExprList().get(2).getAtomicExprList().size()); + Assert.assertEquals("@datacenter", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("dc1", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals("@cluster", or.getANDExprList().get(1).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("abc", or.getANDExprList().get(1).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(1).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals("@host", or.getANDExprList().get(2).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("dc123.xyz.com", or.getANDExprList().get(2).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(2).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals("(@datacenter=\"dc1\") OR (@cluster=\"abc\") OR (@host=\"dc123.xyz.com\")", or.toString()); + } + + @Test + public void testANDORCombination() { + String query = "@cluster=\"abc\" OR @host=\"dc123.xyz.com\" AND @datacenter=\"dc1\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + LOG.info(or.toString()); + Assert.assertEquals("(@cluster=\"abc\") OR (@host=\"dc123.xyz.com\" AND @datacenter=\"dc1\")", or.toString()); + + query = "(@cluster=\"abc\" AND @host=\"dc123.xyz.com\") AND @datacenter=\"dc1\""; + parser = new EagleQueryParser(query); + or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + LOG.info(or.toString()); + Assert.assertEquals("(@cluster=\"abc\" AND @host=\"dc123.xyz.com\" AND @datacenter=\"dc1\")", or.toString()); + + query = "(@cluster=\"abc\" OR @host=\"dc123.xyz.com\") AND @datacenter=\"dc1\""; + parser = new EagleQueryParser(query); + or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + LOG.info(or.toString()); + Assert.assertEquals("(@cluster=\"abc\" AND @datacenter=\"dc1\") OR (@host=\"dc123.xyz.com\" AND @datacenter=\"dc1\")", or.toString()); + + query = "(@cluster=\"abc\" OR @host=\"dc123.xyz.com\") AND (@datacenter=\"dc1\" OR @cluster=\"bcd\")"; + parser = new EagleQueryParser(query); + or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + LOG.info(or.toString()); + Assert.assertEquals("(@cluster=\"abc\" AND @datacenter=\"dc1\") OR (@cluster=\"abc\" AND @cluster=\"bcd\") OR (@host=\"dc123.xyz.com\" AND @datacenter=\"dc1\") OR (@host=\"dc123.xyz.com\" " + + "AND @cluster=\"bcd\")", or.toString()); + + query = "(@cluster=\"abc\" OR @host=\"dc123.xyz.com\") AND (@datacenter=\"dc1\" AND @cluster=\"bcd\")"; + parser = new EagleQueryParser(query); + or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + LOG.info(or.toString()); + Assert.assertEquals("(@cluster=\"abc\" AND @datacenter=\"dc1\" AND @cluster=\"bcd\") OR (@host=\"dc123.xyz.com\" AND @datacenter=\"dc1\" AND @cluster=\"bcd\")", or.toString()); + } + + @Test + public void testNegativeCase() { + String query = "@cluster = \"a\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + LOG.info(or.toString()); + Assert.assertEquals("(@cluster=\"a\")", or.toString()); + + query = "@cluster = a\""; + parser = new EagleQueryParser(query); + or = null; + try { + or = parser.parse(); + } catch (Exception ex) { + LOG.error("Can not successfully parse the query:" + query, ex); + } + Assert.assertTrue(or == null); + + query = "@cluster = \"\"a\""; + parser = new EagleQueryParser(query); + or = null; + try { + or = parser.parse(); + } catch (Exception ex) { + LOG.error("Can not successfully parse the query:" + query, ex); + } + Assert.assertNotNull(or); + + query = "@cluster=\"cluster1\" AND @datacenter=\"dc1\" AND @remediationID=8888\" AND @remediationStatus=\"status\""; + parser = new EagleQueryParser(query); + or = null; + try { + or = parser.parse(); + } catch (Exception ex) { + LOG.error("Can not successfully parse the query:" + query, ex); + } + Assert.assertTrue(or == null); + } + + @Test + public void testSimpleWildcardMatchQuery() { + String expected = "-[]/{}()*+?.\\^$|"; + String query = "@user=\"-[]/{}()*+?.\\\\^$|\""; + System.out.println(query); + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@user", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals(expected, or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + LOG.info(or.toString()); + Assert.assertEquals("(@user=\"" + expected + "\")", or.toString()); + } + + @Test + public void testNumberQuery() { + String query = "@field1 >= -1.234"; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@field1", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals(-1.234, Double.parseDouble(or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()), 0.0001); + Assert.assertEquals(">=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.NUMBER, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + } + + @Test + public void testContainQuery() { + String query = "@name contains \"jame\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("jame", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("CONTAINS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + } + + @Test + public void testNotContainQuery() { + String query = "@name not contains \"jame\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("jame", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("NOT CONTAINS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name NOT CONTAINS \"jame\""; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("jame", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("NOT CONTAINS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name NOT CONTAINS \"jame\""; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("jame", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("NOT CONTAINS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + } + + @Test + public void testNullQuery() { + String query = "@name is null"; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("null", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("IS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name IS NULL"; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("NULL", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("IS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name is not null"; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("null", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("IS NOT", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name is not NULL"; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("NULL", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("IS NOT", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name = NULL"; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("NULL", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name != NULL"; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("NULL", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("!=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.NULL, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + } + + @Test + public void testIsOrIsNotQuery() { + String query = "@name is \"james\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("james", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("IS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name is not \"james\""; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("james", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("IS NOT", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name is 1.234"; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("1.234", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("IS", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.NUMBER, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name is not 1.234"; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("1.234", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("IS NOT", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.NUMBER, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + } + + @Test + public void testINListQuery() { + String query = "@name in (\"jame\",\"lebron\")"; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("(\"jame\",\"lebron\")", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("IN", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.LIST, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name NOT IN (1,\"lebron\")"; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("(1,\"lebron\")", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("NOT IN", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.LIST, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@name not in (1,\"lebron\")"; + parser = new EagleQueryParser(query); + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("(1,\"lebron\")", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("NOT IN", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.LIST, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + } + + @Test + public void testEmptyString() { + String query = "@name = \"\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertNotNull(or); + Assert.assertEquals("@name", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + } + + /** + * Will split tokens for escaped string + *

+ * "va\"lue" => "va\"lue" + * ("va\"lue","va,lue") => ["va\\\"lue","va,lue"] + */ + @Test + public void testEscapedQuotesString() { + String query = "@value = \"value\\\"content, and another content\""; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertNotNull(or); + Assert.assertEquals("@value", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("value\"content, and another content", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("=", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.STRING, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@value in (\"value\\\"content, and another content\",\"others item\")"; + parser = new EagleQueryParser(query); + or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertNotNull(or); + Assert.assertEquals("@value", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("(\"value\\\"content, and another content\",\"others item\")", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("IN", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.LIST, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + + query = "@value in (\"value\\\"content, and another content\",\"others item\",-1.2345)"; + parser = new EagleQueryParser(query); + or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertNotNull(or); + Assert.assertEquals("@value", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("(\"value\\\"content, and another content\",\"others item\",-1.2345)", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("IN", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals(TokenType.LIST, or.getANDExprList().get(0).getAtomicExprList().get(0).getValueType()); + } + + @Test + public void testCompareAtomicExpression() { + String query = "EXP{@mapProgress} < EXP{@reduceProgress}"; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + Assert.assertEquals(1, or.getANDExprList().get(0).getAtomicExprList().size()); + Assert.assertEquals("mapProgress", or.getANDExprList().get(0).getAtomicExprList().get(0).getKey()); + Assert.assertEquals("reduceProgress", or.getANDExprList().get(0).getAtomicExprList().get(0).getValue()); + Assert.assertEquals("<", or.getANDExprList().get(0).getAtomicExprList().get(0).getOp().toString()); + Assert.assertEquals("(mapProgress3", and.getAtomicExprList().get(0).toString()); + Assert.assertEquals("@b>10", and.getAtomicExprList().get(1).toString()); + + AtomicExpression leftExpression = and.getAtomicExprList().get(0); + Assert.assertEquals("a + b", leftExpression.getKey()); + Assert.assertEquals(TokenType.EXP, leftExpression.getKeyType()); + Assert.assertEquals(">", leftExpression.getOp().toString()); + Assert.assertEquals("3", leftExpression.getValue()); + Assert.assertEquals(TokenType.NUMBER, leftExpression.getValueType()); + AtomicExpression rightExpression = and.getAtomicExprList().get(1); + Assert.assertEquals("@b", rightExpression.getKey()); + Assert.assertEquals(TokenType.ID, rightExpression.getKeyType()); + Assert.assertEquals(">", rightExpression.getOp().toString()); + Assert.assertEquals("10", rightExpression.getValue()); + Assert.assertEquals(TokenType.NUMBER, rightExpression.getValueType()); + } + + @Test + public void testComplexExpressionWithConditionAndNestedBrace() { + String query = "(EXP{(@a + @b) / ((@c + @d)*(@e)/(@d))} > EXP{@c + @d}) AND (EXP{@e + @f} > EXP{@h + @i})"; + EagleQueryParser parser = new EagleQueryParser(query); + ORExpression or = null; + try { + or = parser.parse(); + } catch (EagleQueryParseException ex) { + Assert.fail(ex.getMessage()); + } + Assert.assertTrue(or.getANDExprList().size() == 1); + ANDExpression and = or.getANDExprList().get(0); + Assert.assertEquals(2, and.getAtomicExprList().size()); + Assert.assertEquals("(a + b) / ((c + d)*(e)/(d))>c + d", and.getAtomicExprList().get(0).toString()); + Assert.assertEquals("e + f>h + i", and.getAtomicExprList().get(1).toString()); + + AtomicExpression leftExpression = and.getAtomicExprList().get(0); + Assert.assertEquals("(a + b) / ((c + d)*(e)/(d))", leftExpression.getKey()); + Assert.assertEquals(">", leftExpression.getOp().toString()); + Assert.assertEquals("c + d", leftExpression.getValue()); + + AtomicExpression rightExpression = and.getAtomicExprList().get(1); + Assert.assertEquals("e + f", rightExpression.getKey()); + Assert.assertEquals(">", rightExpression.getOp().toString()); + Assert.assertEquals("h + i", rightExpression.getValue()); + } + + @Test + public void testNegativeExpressionCase() { + String query = "(EXP{(@a + @b) / ((@c + @d)*(@e)/(@d))}} > EXP{@c + @d}) AND (EXP{@e + @f} > EXP{@h + @i})"; + EagleQueryParser parser = new EagleQueryParser(query); + boolean parseFail = true; + try { + parser.parse(); + } catch (EagleQueryParseException ex) { + parseFail = false; + } + Assert.assertFalse(parseFail); + + query = "(EXP{{(@a + @b) / ((@c + @d)*(@e)/(@d))}} > EXP{@c + @d}) AND (EXP{@e + @f} > EXP{@h + @i})"; + parser = new EagleQueryParser(query); + parseFail = true; + try { + parser.parse(); + } catch (EagleQueryParseException ex) { + parseFail = false; + } + Assert.assertFalse(parseFail); + + query = "(EXP{(@a + @b) / ((@c + @d)*(@e)/(@d))} > EXP{@c + @d}) AND EXP{})"; + parser = new EagleQueryParser(query); + parseFail = true; + try { + parser.parse(); + } catch (EagleQueryParseException ex) { + parseFail = false; + } + Assert.assertFalse(parseFail); + + } + + @Test + public void testIsExpression() { + Assert.assertTrue(TokenConstant.isExpression("EXP{ count }")); + Assert.assertFalse(TokenConstant.isExpression("count")); + } } diff --git a/eagle-core/eagle-query/eagle-antlr/src/test/java/org/apache/eagle/query/parser/test/TestValueType.java b/eagle-core/eagle-query/eagle-antlr/src/test/java/org/apache/eagle/query/parser/test/TestValueType.java index 9627b7852a..01912e702a 100755 --- a/eagle-core/eagle-query/eagle-antlr/src/test/java/org/apache/eagle/query/parser/test/TestValueType.java +++ b/eagle-core/eagle-query/eagle-antlr/src/test/java/org/apache/eagle/query/parser/test/TestValueType.java @@ -22,52 +22,52 @@ import org.junit.Test; public class TestValueType { - @Test - public void testLocateValueType(){ - Assert.assertEquals(TokenType.EXP, TokenType.locate("EXP{ 1+1 = 2 }")); - Assert.assertEquals(TokenType.EXP, TokenType.locate("EXP{ sum(a + b) > 1 }")); + @Test + public void testLocateValueType() { + Assert.assertEquals(TokenType.EXP, TokenType.locate("EXP{ 1+1 = 2 }")); + Assert.assertEquals(TokenType.EXP, TokenType.locate("EXP{ sum(a + b) > 1 }")); - Assert.assertEquals(TokenType.STRING, TokenType.locate("\"\"")); - Assert.assertEquals(TokenType.STRING, TokenType.locate("\"abc\"")); + Assert.assertEquals(TokenType.STRING, TokenType.locate("\"\"")); + Assert.assertEquals(TokenType.STRING, TokenType.locate("\"abc\"")); - Assert.assertEquals(TokenType.LIST, TokenType.locate("(1,\"ab\")")); - Assert.assertEquals(TokenType.LIST, TokenType.locate("(\"\",\"ab\")")); + Assert.assertEquals(TokenType.LIST, TokenType.locate("(1,\"ab\")")); + Assert.assertEquals(TokenType.LIST, TokenType.locate("(\"\",\"ab\")")); - Assert.assertEquals(TokenType.NUMBER, TokenType.locate("1")); - Assert.assertEquals(TokenType.NUMBER, TokenType.locate("1.234")); - Assert.assertEquals(TokenType.NUMBER, TokenType.locate("-1.234")); - Assert.assertEquals(TokenType.NUMBER, TokenType.locate("+1.234")); - Assert.assertEquals(TokenType.NUMBER, TokenType.locate("- 1.234")); - Assert.assertEquals(TokenType.NUMBER, TokenType.locate("+ 1.234")); - Assert.assertEquals(TokenType.NUMBER, TokenType.locate(" + 1.234 ")); - Assert.assertEquals(TokenType.NUMBER, TokenType.locate(" + 1.234 ")); + Assert.assertEquals(TokenType.NUMBER, TokenType.locate("1")); + Assert.assertEquals(TokenType.NUMBER, TokenType.locate("1.234")); + Assert.assertEquals(TokenType.NUMBER, TokenType.locate("-1.234")); + Assert.assertEquals(TokenType.NUMBER, TokenType.locate("+1.234")); + Assert.assertEquals(TokenType.NUMBER, TokenType.locate("- 1.234")); + Assert.assertEquals(TokenType.NUMBER, TokenType.locate("+ 1.234")); + Assert.assertEquals(TokenType.NUMBER, TokenType.locate(" + 1.234 ")); + Assert.assertEquals(TokenType.NUMBER, TokenType.locate(" + 1.234 ")); - Assert.assertEquals(TokenType.NULL, TokenType.locate("null")); - Assert.assertEquals(TokenType.NULL, TokenType.locate("NULL")); + Assert.assertEquals(TokenType.NULL, TokenType.locate("null")); + Assert.assertEquals(TokenType.NULL, TokenType.locate("NULL")); - Assert.assertEquals(TokenType.STRING,TokenType.locate("\"SELECT start.hr AS hr,\n" + - " ...details.inst_type(Stage-10)\"")); + Assert.assertEquals(TokenType.STRING, TokenType.locate("\"SELECT start.hr AS hr,\n" + + " ...details.inst_type(Stage-10)\"")); - // Bad format - boolean gotEx = false; - try{ - TokenType.locate("+ 1.234.567"); - }catch (IllegalArgumentException ex){ - gotEx = true; - } - Assert.assertTrue(gotEx); - } + // Bad format + boolean gotEx = false; + try { + TokenType.locate("+ 1.234.567"); + } catch (IllegalArgumentException ex) { + gotEx = true; + } + Assert.assertTrue(gotEx); + } - @Test - public void testParseExpressionContent(){ - String expression = "EXP{ @fieldName /2 } AS a"; - Assert.assertEquals(" @fieldName /2 ", TokenConstant.parseExpressionContent(expression)); + @Test + public void testParseExpressionContent() { + String expression = "EXP{ @fieldName /2 } AS a"; + Assert.assertEquals(" @fieldName /2 ", TokenConstant.parseExpressionContent(expression)); - expression = "EXP{ @fieldName /2 } a"; - Assert.assertEquals(" @fieldName /2 ", TokenConstant.parseExpressionContent(expression)); + expression = "EXP{ @fieldName /2 } a"; + Assert.assertEquals(" @fieldName /2 ", TokenConstant.parseExpressionContent(expression)); - expression = "EXP{ @fieldName /2 }"; - Assert.assertEquals(" @fieldName /2 ", TokenConstant.parseExpressionContent(expression)); - } + expression = "EXP{ @fieldName /2 }"; + Assert.assertEquals(" @fieldName /2 ", TokenConstant.parseExpressionContent(expression)); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditConstants.java b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditConstants.java index d8143d99ec..953e4802ef 100644 --- a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditConstants.java +++ b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditConstants.java @@ -19,17 +19,17 @@ public class AuditConstants { - public static final String AUDIT_SERVICE_ENDPOINT = "AuditService"; - - // HBase Operations - public static final String AUDIT_EVENT_CREATE = "CREATE"; - public static final String AUDIT_EVENT_UPDATE = "UPDATE"; - public static final String AUDIT_EVENT_DELETE = "DELETE"; - - // Audit table details - public static final String AUDIT_TABLE = "serviceAudit"; - public static final String AUDIT_COLUMN_SERVICE_NAME = "serviceName"; - public static final String AUDIT_COLUMN_USER_ID = "userID"; - public static final String AUDIT_COLUMN_OPERATION = "operation"; - public static final String AUDIT_COLUMN_TIMESTAMP = "auditTimestamp"; + public static final String AUDIT_SERVICE_ENDPOINT = "AuditService"; + + // HBase Operations + public static final String AUDIT_EVENT_CREATE = "CREATE"; + public static final String AUDIT_EVENT_UPDATE = "UPDATE"; + public static final String AUDIT_EVENT_DELETE = "DELETE"; + + // Audit table details + public static final String AUDIT_TABLE = "serviceAudit"; + public static final String AUDIT_COLUMN_SERVICE_NAME = "serviceName"; + public static final String AUDIT_COLUMN_USER_ID = "userID"; + public static final String AUDIT_COLUMN_OPERATION = "operation"; + public static final String AUDIT_COLUMN_TIMESTAMP = "auditTimestamp"; } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditEvent.java b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditEvent.java index 8f34a9146a..b4d1cedff7 100644 --- a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditEvent.java +++ b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditEvent.java @@ -17,35 +17,35 @@ package org.apache.eagle.audit.common; -import java.util.List; - import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; +import java.util.List; + public class AuditEvent extends java.util.EventObject { - public AuditEvent(Object source, String serviceName, List auditEntities) { - super(source); - this.serviceName = serviceName; - this.auditEntities = auditEntities; - } - - private String serviceName; - private List auditEntities; - - public String getServiceName() { - return serviceName; - } - - public List getAuditEntities() { - return auditEntities; - } - - public String toString() { + public AuditEvent(Object source, String serviceName, List auditEntities) { + super(source); + this.serviceName = serviceName; + this.auditEntities = auditEntities; + } + + private String serviceName; + private List auditEntities; + + public String getServiceName() { + return serviceName; + } + + public List getAuditEntities() { + return auditEntities; + } + + public String toString() { StringBuilder returnString = new StringBuilder(getClass().getName()); returnString.append("["); returnString.append("serviceName=").append(getServiceName()); returnString.append("; source=").append(getSource().getClass()); returnString.append("]"); return returnString.toString(); - } + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditListenerMap.java b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditListenerMap.java index e433f53d1b..44c5576e33 100644 --- a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditListenerMap.java +++ b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/AuditListenerMap.java @@ -21,7 +21,7 @@ import org.apache.eagle.audit.listener.AuditListenerProxy; public final class AuditListenerMap extends BaseAuditListenerMap { - + private static final AuditListener[] EMPTY = {}; @Override diff --git a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/BaseAuditListenerMap.java b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/BaseAuditListenerMap.java index a2c70a9b5f..e201d8f8ed 100644 --- a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/BaseAuditListenerMap.java +++ b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/common/BaseAuditListenerMap.java @@ -17,16 +17,10 @@ package org.apache.eagle.audit.common; -import java.util.ArrayList; -import java.util.Collections; -import java.util.EventListener; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; import java.util.Map.Entry; -abstract class BaseAuditListenerMap { +abstract class BaseAuditListenerMap { private Map map; @@ -40,8 +34,8 @@ public final synchronized void add(String name, L listener) { } L[] array = this.map.get(name); int size = (array != null) - ? array.length - : 0; + ? array.length + : 0; L[] clone = newArray(size + 1); clone[size] = listener; @@ -63,8 +57,7 @@ public final synchronized void remove(String name, L listener) { System.arraycopy(array, 0, clone, 0, i); System.arraycopy(array, i + 1, clone, i, size - i); this.map.put(name, clone); - } - else { + } else { this.map.remove(name); if (this.map.isEmpty()) { this.map = null; @@ -79,8 +72,8 @@ public final synchronized void remove(String name, L listener) { public final synchronized L[] get(String name) { return (this.map != null) - ? this.map.get(name) - : null; + ? this.map.get(name) + : null; } public final void set(String name, L[] listeners) { @@ -89,8 +82,7 @@ public final void set(String name, L[] listeners) { this.map = new HashMap(); } this.map.put(name, listeners); - } - else if (this.map != null) { + } else if (this.map != null) { this.map.remove(name); if (this.map.isEmpty()) { this.map = null; @@ -141,8 +133,8 @@ public final synchronized boolean hasListeners(String name) { public final Set> getEntries() { return (this.map != null) - ? this.map.entrySet() - : Collections.>emptySet(); + ? this.map.entrySet() + : Collections.>emptySet(); } public abstract L extract(L listener); diff --git a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/dao/ServiceAuditDAO.java b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/dao/ServiceAuditDAO.java index c76708eec2..69eb57470c 100644 --- a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/dao/ServiceAuditDAO.java +++ b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/dao/ServiceAuditDAO.java @@ -17,74 +17,82 @@ package org.apache.eagle.audit.dao; -import java.util.List; - import org.apache.eagle.audit.entity.GenericAuditEntity; +import java.util.List; + public interface ServiceAuditDAO { - /** - * Retrieve all audits of alert definition for a specific site and data source. - * @return - * @throws Exception - */ - List findPolicyAudit(String site, String dataSource) throws Exception; + /** + * Retrieve all audits of alert definition for a specific site and data source. + * + * @return + * @throws Exception + */ + List findPolicyAudit(String site, String dataSource) throws Exception; + + /** + * Retrieve all audits of site definition for the given site. + * + * @return + * @throws Exception + */ + List findSiteAudit(String site) throws Exception; + + /** + * Retrieve all audits of datasource definition for the given data source. + * + * @param dataSource + * @return + * @throws Exception + */ + List findDataSourceAudit(String dataSource) throws Exception; + + /** + * Retrieve all audits specific to a service. + * + * @param serviceName + * @return + * @throws Exception + */ + List findServiceAudit(String serviceName) throws Exception; + + /** + * Retrieve all audits specific to a service and specific to a userID. + * + * @param serviceName + * @param userID + * @return + * @throws Exception + */ + List findServiceAuditByUser(String serviceName, String userID) throws Exception; + + /** + * Retrieve all audits specific to a service and specific to an action. + * + * @param serviceName + * @param action + * @return + * @throws Exception + */ + List findServiceAuditByAction(String serviceName, String action) throws Exception; + + /** + * Retrieve all audits specific to a user. + * + * @param userID + * @return + * @throws Exception + */ + List findUserServiceAudit(String userID) throws Exception; - /** - * Retrieve all audits of site definition for the given site. - * @return - * @throws Exception - */ - List findSiteAudit(String site) throws Exception; - - /** - * Retrieve all audits of datasource definition for the given data source. - * @param dataSource - * @return - * @throws Exception - */ - List findDataSourceAudit(String dataSource) throws Exception; - - /** - * Retrieve all audits specific to a service. - * @param serviceName - * @return - * @throws Exception - */ - List findServiceAudit(String serviceName) throws Exception; - - /** - * Retrieve all audits specific to a service and specific to a userID. - * @param serviceName - * @param userID - * @return - * @throws Exception - */ - List findServiceAuditByUser(String serviceName, String userID) throws Exception; - - /** - * Retrieve all audits specific to a service and specific to an action. - * @param serviceName - * @param action - * @return - * @throws Exception - */ - List findServiceAuditByAction(String serviceName, String action) throws Exception; - - /** - * Retrieve all audits specific to a user. - * @param userID - * @return - * @throws Exception - */ - List findUserServiceAudit(String userID) throws Exception; - - /** - * Retrieve all audits specific to a user and specific to an action. - * @param userID - * @param action - * @return - * @throws Exception - */ - List findUserServiceAuditByAction(String userID, String action) throws Exception; + /** + * Retrieve all audits specific to a user and specific to an action. + * + * @param userID + * @param action + * @return + * @throws Exception + */ + List findUserServiceAuditByAction(String userID, String action) throws Exception; } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/dao/ServiceAuditDAOImpl.java b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/dao/ServiceAuditDAOImpl.java index 41332ae1f2..d3d2a0d50e 100644 --- a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/dao/ServiceAuditDAOImpl.java +++ b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/dao/ServiceAuditDAOImpl.java @@ -17,8 +17,6 @@ package org.apache.eagle.audit.dao; -import java.util.List; - import org.apache.commons.lang.time.DateUtils; import org.apache.eagle.audit.common.AuditConstants; import org.apache.eagle.audit.entity.GenericAuditEntity; @@ -29,148 +27,150 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.List; + public class ServiceAuditDAOImpl implements ServiceAuditDAO { - - private final Logger LOG = LoggerFactory.getLogger(ServiceAuditDAOImpl.class); + + private static final Logger LOG = LoggerFactory.getLogger(ServiceAuditDAOImpl.class); private final EagleServiceConnector connector; - - public ServiceAuditDAOImpl(EagleServiceConnector connector){ + + public ServiceAuditDAOImpl(EagleServiceConnector connector) { this.connector = connector; } - @Override + @Override public List findPolicyAudit(String site, String application) throws Exception { - try { - IEagleServiceClient client = new EagleServiceClientImpl(connector); - String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"AlertDefinitionService\" AND @site=\"" + site + "\" AND @application=\"" + application + "\"]{*}"; - GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); + try { + IEagleServiceClient client = new EagleServiceClientImpl(connector); + String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"AlertDefinitionService\" AND @site=\"" + site + "\" AND @application=\"" + application + "\"]{*}"; + GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); client.close(); if (response.getException() != null) { throw new Exception("Exception in querying eagle service: " + response.getException()); } return response.getObj(); - } catch (Exception exception) { - LOG.error("Exception in retrieving audit entry: " + exception); - throw new IllegalStateException(exception); - } + } catch (Exception exception) { + LOG.error("Exception in retrieving audit entry: " + exception); + throw new IllegalStateException(exception); + } } - @Override + @Override public List findSiteAudit(String site) throws Exception { - try { - IEagleServiceClient client = new EagleServiceClientImpl(connector); - String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"AlertDataSourceService\" AND @site=\"" + site + "\"]{*}"; - GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); + try { + IEagleServiceClient client = new EagleServiceClientImpl(connector); + String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"AlertDataSourceService\" AND @site=\"" + site + "\"]{*}"; + GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); client.close(); if (response.getException() != null) { throw new Exception("Exception in querying eagle service: " + response.getException()); } return response.getObj(); - } catch (Exception exception) { - LOG.error("Exception in retrieving audit entry: " + exception); - throw new IllegalStateException(exception); - } + } catch (Exception exception) { + LOG.error("Exception in retrieving audit entry: " + exception); + throw new IllegalStateException(exception); + } } - - @Override + + @Override public List findDataSourceAudit(String application) throws Exception { - try { - IEagleServiceClient client = new EagleServiceClientImpl(connector); - String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"AlertDataSourceService\" AND @application=\"" + application + "\"]{*}"; - GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); + try { + IEagleServiceClient client = new EagleServiceClientImpl(connector); + String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"AlertDataSourceService\" AND @application=\"" + application + "\"]{*}"; + GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); client.close(); if (response.getException() != null) { throw new Exception("Exception in querying eagle service: " + response.getException()); } return response.getObj(); - } catch (Exception exception) { - LOG.error("Exception in retrieving audit entry: " + exception); - throw new IllegalStateException(exception); - } + } catch (Exception exception) { + LOG.error("Exception in retrieving audit entry: " + exception); + throw new IllegalStateException(exception); + } } - - @Override - public List findServiceAudit(String serviceName) throws Exception { - try { - IEagleServiceClient client = new EagleServiceClientImpl(connector); - String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"" + serviceName + "\"]{*}"; - GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); + + @Override + public List findServiceAudit(String serviceName) throws Exception { + try { + IEagleServiceClient client = new EagleServiceClientImpl(connector); + String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"" + serviceName + "\"]{*}"; + GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); client.close(); if (response.getException() != null) { throw new Exception("Exception in querying eagle service: " + response.getException()); } return response.getObj(); - } catch (Exception exception) { - LOG.error("Exception in retrieving audit entry: " + exception); - throw new IllegalStateException(exception); - } - } + } catch (Exception exception) { + LOG.error("Exception in retrieving audit entry: " + exception); + throw new IllegalStateException(exception); + } + } - @Override - public List findServiceAuditByUser(String serviceName, String userID) throws Exception { - try { - IEagleServiceClient client = new EagleServiceClientImpl(connector); - String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"" + serviceName + "\" AND @userID=\"" + userID + "\"]{*}"; - GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); + @Override + public List findServiceAuditByUser(String serviceName, String userID) throws Exception { + try { + IEagleServiceClient client = new EagleServiceClientImpl(connector); + String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"" + serviceName + "\" AND @userID=\"" + userID + "\"]{*}"; + GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); client.close(); if (response.getException() != null) { throw new Exception("Exception in querying eagle service: " + response.getException()); } return response.getObj(); - } catch (Exception exception) { - LOG.error("Exception in retrieving audit entry: " + exception); - throw new IllegalStateException(exception); - } - } + } catch (Exception exception) { + LOG.error("Exception in retrieving audit entry: " + exception); + throw new IllegalStateException(exception); + } + } - @Override - public List findServiceAuditByAction(String serviceName, String action) throws Exception { - try { - IEagleServiceClient client = new EagleServiceClientImpl(connector); - String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"" + serviceName + "\" AND @actionTaken=\"" + action + "\"]{*}"; - GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); + @Override + public List findServiceAuditByAction(String serviceName, String action) throws Exception { + try { + IEagleServiceClient client = new EagleServiceClientImpl(connector); + String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@serviceName=\"" + serviceName + "\" AND @actionTaken=\"" + action + "\"]{*}"; + GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); client.close(); if (response.getException() != null) { throw new Exception("Exception in querying eagle service: " + response.getException()); } return response.getObj(); - } catch (Exception exception) { - LOG.error("Exception in retrieving audit entry: " + exception); - throw new IllegalStateException(exception); - } - } + } catch (Exception exception) { + LOG.error("Exception in retrieving audit entry: " + exception); + throw new IllegalStateException(exception); + } + } - @Override - public List findUserServiceAudit(String userID) throws Exception { - try { - IEagleServiceClient client = new EagleServiceClientImpl(connector); - String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@userID=\"" + userID + "\"]{*}"; - GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); + @Override + public List findUserServiceAudit(String userID) throws Exception { + try { + IEagleServiceClient client = new EagleServiceClientImpl(connector); + String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@userID=\"" + userID + "\"]{*}"; + GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); client.close(); if (response.getException() != null) { throw new Exception("Exception in querying eagle service: " + response.getException()); } return response.getObj(); - } catch (Exception exception) { - LOG.error("Exception in retrieving audit entry: " + exception); - throw new IllegalStateException(exception); - } - } + } catch (Exception exception) { + LOG.error("Exception in retrieving audit entry: " + exception); + throw new IllegalStateException(exception); + } + } - @Override - public List findUserServiceAuditByAction(String userID, String action) throws Exception { - try { - IEagleServiceClient client = new EagleServiceClientImpl(connector); - String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@userID=\"" + userID + "\" AND @actionTaken=\"" + action + "\"]{*}"; - GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); + @Override + public List findUserServiceAuditByAction(String userID, String action) throws Exception { + try { + IEagleServiceClient client = new EagleServiceClientImpl(connector); + String query = AuditConstants.AUDIT_SERVICE_ENDPOINT + "[@userID=\"" + userID + "\" AND @actionTaken=\"" + action + "\"]{*}"; + GenericServiceAPIResponseEntity response = client.search().startTime(0).endTime(10 * DateUtils.MILLIS_PER_DAY).pageSize(Integer.MAX_VALUE).query(query).send(); client.close(); if (response.getException() != null) { throw new Exception("Exception in querying eagle service: " + response.getException()); } return response.getObj(); - } catch (Exception exception) { - LOG.error("Exception in retrieving audit entry: " + exception); - throw new IllegalStateException(exception); - } - } + } catch (Exception exception) { + LOG.error("Exception in retrieving audit entry: " + exception); + throw new IllegalStateException(exception); + } + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/entity/GenericAuditEntity.java b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/entity/GenericAuditEntity.java index 1a365ff691..4618276c71 100644 --- a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/entity/GenericAuditEntity.java +++ b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/entity/GenericAuditEntity.java @@ -19,62 +19,58 @@ import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.apache.eagle.log.entity.meta.ColumnFamily; -import org.apache.eagle.log.entity.meta.Prefix; -import org.apache.eagle.log.entity.meta.Service; -import org.apache.eagle.log.entity.meta.Table; -import org.apache.eagle.log.entity.meta.Tags; -import org.apache.eagle.log.entity.meta.TimeSeries; +import org.apache.eagle.log.entity.meta.*; import org.codehaus.jackson.annotate.JsonIgnoreProperties; import org.codehaus.jackson.map.annotate.JsonSerialize; -import static org.apache.eagle.audit.common.AuditConstants.AUDIT_COLUMN_OPERATION; -import static org.apache.eagle.audit.common.AuditConstants.AUDIT_COLUMN_SERVICE_NAME; -import static org.apache.eagle.audit.common.AuditConstants.AUDIT_COLUMN_TIMESTAMP; -import static org.apache.eagle.audit.common.AuditConstants.AUDIT_COLUMN_USER_ID; -import static org.apache.eagle.audit.common.AuditConstants.AUDIT_SERVICE_ENDPOINT; -import static org.apache.eagle.audit.common.AuditConstants.AUDIT_TABLE; +import static org.apache.eagle.audit.common.AuditConstants.*; -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) @Table(AUDIT_TABLE) @ColumnFamily("f") @Prefix(AUDIT_TABLE) @Service(AUDIT_SERVICE_ENDPOINT) @JsonIgnoreProperties(ignoreUnknown = true) @TimeSeries(false) -@Tags({AUDIT_COLUMN_SERVICE_NAME, AUDIT_COLUMN_USER_ID, AUDIT_COLUMN_OPERATION, AUDIT_COLUMN_TIMESTAMP}) +@Tags( {AUDIT_COLUMN_SERVICE_NAME, AUDIT_COLUMN_USER_ID, AUDIT_COLUMN_OPERATION, AUDIT_COLUMN_TIMESTAMP}) public class GenericAuditEntity extends TaggedLogAPIEntity { - - public boolean equals (Object obj) { - if(obj == this) - return true; - if(!(obj instanceof GenericAuditEntity)) - return false; - GenericAuditEntity that = (GenericAuditEntity) obj; - if(compare(that.getTags().get(AUDIT_COLUMN_SERVICE_NAME), this.getTags().get(AUDIT_COLUMN_SERVICE_NAME)) && - compare(that.getTags().get(AUDIT_COLUMN_USER_ID), this.getTags().get(AUDIT_COLUMN_USER_ID)) && - compare(that.getTags().get(AUDIT_COLUMN_OPERATION), this.getTags().get(AUDIT_COLUMN_OPERATION)) && - compare(that.getTags().get(AUDIT_COLUMN_TIMESTAMP), this.getTags().get(AUDIT_COLUMN_TIMESTAMP))) - return true; - return false; - } - - private boolean compare(String a, String b){ - if(a == b) - return true; - if(a == null || b == null) - return false; - if(a.equals(b)) - return true; - return false; - } - public int hashCode(){ - HashCodeBuilder builder = new HashCodeBuilder(); - builder.append(getTags().get(AUDIT_COLUMN_SERVICE_NAME)); - builder.append(getTags().get(AUDIT_COLUMN_USER_ID)); - builder.append(getTags().get(AUDIT_COLUMN_OPERATION)); - builder.append(getTags().get(AUDIT_COLUMN_TIMESTAMP)); - return builder.toHashCode(); - } + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof GenericAuditEntity)) { + return false; + } + GenericAuditEntity that = (GenericAuditEntity) obj; + if (compare(that.getTags().get(AUDIT_COLUMN_SERVICE_NAME), this.getTags().get(AUDIT_COLUMN_SERVICE_NAME)) + && compare(that.getTags().get(AUDIT_COLUMN_USER_ID), this.getTags().get(AUDIT_COLUMN_USER_ID)) + && compare(that.getTags().get(AUDIT_COLUMN_OPERATION), this.getTags().get(AUDIT_COLUMN_OPERATION)) + && compare(that.getTags().get(AUDIT_COLUMN_TIMESTAMP), this.getTags().get(AUDIT_COLUMN_TIMESTAMP))) { + return true; + } + return false; + } + + private boolean compare(String a, String b) { + if (a == b) { + return true; + } + if (a == null || b == null) { + return false; + } + if (a.equals(b)) { + return true; + } + return false; + } + + public int hashCode() { + HashCodeBuilder builder = new HashCodeBuilder(); + builder.append(getTags().get(AUDIT_COLUMN_SERVICE_NAME)); + builder.append(getTags().get(AUDIT_COLUMN_USER_ID)); + builder.append(getTags().get(AUDIT_COLUMN_OPERATION)); + builder.append(getTags().get(AUDIT_COLUMN_TIMESTAMP)); + return builder.toHashCode(); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/entity/repo/AuditEntityRepository.java b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/entity/repo/AuditEntityRepository.java index bfcc5f6452..a9963b4d79 100755 --- a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/entity/repo/AuditEntityRepository.java +++ b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/entity/repo/AuditEntityRepository.java @@ -22,7 +22,7 @@ public class AuditEntityRepository extends EntityRepository { - public AuditEntityRepository() { - entitySet.add(GenericAuditEntity.class); - } + public AuditEntityRepository() { + entitySet.add(GenericAuditEntity.class); + } } diff --git a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditListener.java b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditListener.java index 422cc99e66..1716a38210 100644 --- a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditListener.java +++ b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditListener.java @@ -20,6 +20,6 @@ import org.apache.eagle.audit.common.AuditEvent; public interface AuditListener extends java.util.EventListener { - - void auditEvent(AuditEvent event); + + void auditEvent(AuditEvent event); } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditListenerProxy.java b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditListenerProxy.java index a63ca38b3d..d0a6a73b3f 100644 --- a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditListenerProxy.java +++ b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditListenerProxy.java @@ -17,25 +17,25 @@ package org.apache.eagle.audit.listener; -import java.util.EventListenerProxy; - import org.apache.eagle.audit.common.AuditEvent; +import java.util.EventListenerProxy; + public class AuditListenerProxy extends EventListenerProxy implements AuditListener { - private final String propertyName; - - public String getPropertyName() { - return propertyName; - } - - public AuditListenerProxy(String propertyName, AuditListener listener) { - super(listener); - this.propertyName = propertyName; - } - - @Override - public void auditEvent(AuditEvent event) { - getListener().auditEvent(event); - } + private final String propertyName; + + public String getPropertyName() { + return propertyName; + } + + public AuditListenerProxy(String propertyName, AuditListener listener) { + super(listener); + this.propertyName = propertyName; + } + + @Override + public void auditEvent(AuditEvent event) { + getListener().auditEvent(event); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditSupport.java b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditSupport.java index 39ac46f21f..837724e445 100644 --- a/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditSupport.java +++ b/eagle-core/eagle-query/eagle-audit-base/src/main/java/org/apache/eagle/audit/listener/AuditSupport.java @@ -17,76 +17,83 @@ package org.apache.eagle.audit.listener; -import java.io.Serializable; -import java.util.List; - import org.apache.eagle.audit.common.AuditEvent; import org.apache.eagle.audit.common.AuditListenerMap; import org.apache.eagle.audit.entity.GenericAuditEntity; +import java.io.Serializable; +import java.util.List; + public class AuditSupport implements Serializable { - - private AuditListenerMap map = new AuditListenerMap(); + + private AuditListenerMap map = new AuditListenerMap(); public AuditSupport(Object sourceBean) { - if (sourceBean == null) + if (sourceBean == null) { throw new NullPointerException(); + } source = sourceBean; } - - public void addAuditListener(AuditListener listener) { - if (listener == null) - return; - if (listener instanceof AuditListenerProxy) { - AuditListenerProxy proxy = (AuditListenerProxy) listener; - addAuditListener(proxy.getPropertyName(), proxy.getListener()); - } else { - this.map.add(null, listener); - } - } - + + public void addAuditListener(AuditListener listener) { + if (listener == null) { + return; + } + if (listener instanceof AuditListenerProxy) { + AuditListenerProxy proxy = (AuditListenerProxy) listener; + addAuditListener(proxy.getPropertyName(), proxy.getListener()); + } else { + this.map.add(null, listener); + } + } + public void addAuditListener(String propertyName, AuditListener listener) { - if (listener == null || propertyName == null) - return; - listener = this.map.extract(listener); - if (listener != null) - this.map.add(propertyName, listener); + if (listener == null || propertyName == null) { + return; + } + listener = this.map.extract(listener); + if (listener != null) { + this.map.add(propertyName, listener); + } } - - public void removeAuditListener(AuditListener listener) { - if (listener == null) - return; - if (listener instanceof AuditListenerProxy) { - AuditListenerProxy proxy = (AuditListenerProxy) listener; - removeAuditListener(proxy.getPropertyName(), proxy.getListener()); - } else { - this.map.remove(null, listener); - } - } - + + public void removeAuditListener(AuditListener listener) { + if (listener == null) { + return; + } + if (listener instanceof AuditListenerProxy) { + AuditListenerProxy proxy = (AuditListenerProxy) listener; + removeAuditListener(proxy.getPropertyName(), proxy.getListener()); + } else { + this.map.remove(null, listener); + } + } + public void removeAuditListener(String propertyName, AuditListener listener) { - if (listener == null || propertyName == null) - return; - listener = this.map.extract(listener); - if (listener != null) - this.map.remove(propertyName, listener); + if (listener == null || propertyName == null) { + return; + } + listener = this.map.extract(listener); + if (listener != null) { + this.map.remove(propertyName, listener); + } } - + public void fireAudit(String serviceName, List auditEntities) { if (null != serviceName && null != auditEntities && 0 != auditEntities.size()) { - fireAudit(new AuditEvent(this.source, serviceName, auditEntities)); + fireAudit(new AuditEvent(this.source, serviceName, auditEntities)); } } - + public void fireAudit(AuditEvent event) { - if (null != event.getAuditEntities() && 0 != event.getAuditEntities().size()) { - AuditListener[] common = this.map.get(null); - AuditListener[] named = (null != event.getServiceName()) ? this.map.get(event.getServiceName()) : null; - fire(common, event); - fire(named, event); - } + if (null != event.getAuditEntities() && 0 != event.getAuditEntities().size()) { + AuditListener[] common = this.map.get(null); + AuditListener[] named = (null != event.getServiceName()) ? this.map.get(event.getServiceName()) : null; + fire(common, event); + fire(named, event); + } } - + private static void fire(AuditListener[] listeners, AuditEvent event) { if (listeners != null) { for (AuditListener listener : listeners) { @@ -94,18 +101,18 @@ private static void fire(AuditListener[] listeners, AuditEvent event) { } } } - + public AuditListener[] getAuditListeners() { return this.map.getListeners(); } - + public AuditListener[] getAuditListeners(String propertyName) { return this.map.getListeners(propertyName); } - + public boolean hasListeners(String propertyName) { return this.map.hasListeners(propertyName); } - + private Object source; } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-client-base/pom.xml b/eagle-core/eagle-query/eagle-client-base/pom.xml index 41a02c577b..01aff7e3d8 100644 --- a/eagle-core/eagle-query/eagle-client-base/pom.xml +++ b/eagle-core/eagle-query/eagle-client-base/pom.xml @@ -66,5 +66,17 @@ jersey-client + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + true + true + + + + diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceAsyncClient.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceAsyncClient.java index 48e71f5a90..8d7d135aa3 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceAsyncClient.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceAsyncClient.java @@ -25,29 +25,27 @@ import java.util.concurrent.Future; /** + * EagleServiceAsyncClient. * @see IEagleServiceClient */ -public interface EagleServiceAsyncClient extends Closeable{ +public interface EagleServiceAsyncClient extends Closeable { /** - * * @param * @param entities * @param serviceName * @return */ - Future> create(final List entities,final String serviceName) throws IOException, EagleServiceClientException; + Future> create(final List entities, final String serviceName) throws IOException, EagleServiceClientException; /** - * * @param entities * @param entityClass * @param * @return */ - Future> create(final List entities,final Class entityClass) throws IOException, EagleServiceClientException; + Future> create(final List entities, final Class entityClass) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return @@ -55,7 +53,6 @@ public interface EagleServiceAsyncClient extends Closeable{ Future> create(final List entities) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return @@ -63,23 +60,21 @@ public interface EagleServiceAsyncClient extends Closeable{ Future> delete(final List entities) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return */ - Future> delete(final List entities,final String serviceName) throws IOException, EagleServiceClientException; + Future> delete(final List entities, final String serviceName) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return */ - Future> delete(final List entities,final Class entityClass) throws IOException, EagleServiceClientException; + Future> delete(final List entities, final Class entityClass) throws IOException, EagleServiceClientException; /** - * + * delete. * @param request * @return */ @@ -93,23 +88,20 @@ public interface EagleServiceAsyncClient extends Closeable{ Future> update(final List entities) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return */ - Future> update(final List entities,final String serviceName) throws IOException, EagleServiceClientException; + Future> update(final List entities, final String serviceName) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return */ - Future> update(final List entities,final Class entityClass) throws IOException, EagleServiceClientException; + Future> update(final List entities, final Class entityClass) throws IOException, EagleServiceClientException; /** - * * @param request * @return */ diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceClientException.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceClientException.java index d3845db1c2..136b4d150b 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceClientException.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceClientException.java @@ -17,25 +17,22 @@ package org.apache.eagle.service.client; /** - * Default Eagle service client exception class + * Default Eagle service client exception class. */ public class EagleServiceClientException extends Exception { - /** - * - */ - private static final long serialVersionUID = 1710851110199812779L; + private static final long serialVersionUID = 1710851110199812779L; - /** - * Default constructor of EagleServiceClientException - */ + /** + * Default constructor of EagleServiceClientException. + */ public EagleServiceClientException() { super(); } /** - * Constructor of EagleServiceClientException - * + * Constructor of EagleServiceClientException. + * * @param message error message */ public EagleServiceClientException(String message) { @@ -43,19 +40,18 @@ public EagleServiceClientException(String message) { } /** - * Constructor of EagleServiceClientException - * + * Constructor of EagleServiceClientException. + * * @param message error message - * @param cause the cause of the exception - * + * @param cause the cause of the exception */ public EagleServiceClientException(String message, Throwable cause) { super(message, cause); } /** - * Constructor of EagleServiceClientException - * + * Constructor of EagleServiceClientException. + * * @param cause the cause of the exception */ public EagleServiceClientException(Throwable cause) { diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceConnector.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceConnector.java index d06a3e4550..70a919292c 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceConnector.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceConnector.java @@ -25,25 +25,25 @@ /** * Some common codes to enable DAO through eagle service including service host/post, credential population etc. */ -public class EagleServiceConnector implements Serializable{ +public class EagleServiceConnector implements Serializable { private final String eagleServiceHost; private final Integer eagleServicePort; private String username; private String password; - public String getEagleServiceHost(){ + public String getEagleServiceHost() { return this.eagleServiceHost; } - public Integer getEagleServicePort(){ + public Integer getEagleServicePort() { return this.eagleServicePort; } - public String getUsername(){ + public String getUsername() { return username; } - public String getPassword(){ + public String getPassword() { return password; } @@ -58,10 +58,11 @@ public EagleServiceConnector(String eagleServiceHost, Integer eagleServicePort, this.password = password; } - public EagleServiceConnector(Config config){ + public EagleServiceConnector(Config config) { this.eagleServiceHost = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.HOST); this.eagleServicePort = config.getInt(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PORT); - if (config.hasPath(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.USERNAME) && config.hasPath(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PASSWORD)) { + if (config.hasPath(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.USERNAME) && config.hasPath(EagleConfigConstants.EAGLE_PROPS + "" + + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PASSWORD)) { this.username = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.USERNAME); this.password = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PASSWORD); } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceGroupByQueryRequest.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceGroupByQueryRequest.java index e9e58c3f75..567980c0a4 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceGroupByQueryRequest.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceGroupByQueryRequest.java @@ -21,153 +21,172 @@ import java.util.List; public class EagleServiceGroupByQueryRequest { - - private String filter; - private String startRowkey; - private int pageSize; - private String startTime; - private String endTime; - private List groupBys; - private List returns; - private List orderBys; - private String metricName; - private int intervalMin; - - public String getMetricName() { - return metricName; - } - public void setMetricName(String metricName) { - this.metricName = metricName; - } - public String getFilter() { - return filter; - } - public void setFilter(String filter) { - this.filter = filter; - } - public String getStartRowkey() { - return startRowkey; - } - public void setStartRowkey(String startRowkey) { - this.startRowkey = startRowkey; - } - public int getPageSize() { - return pageSize; - } - public void setPageSize(int pageSize) { - this.pageSize = pageSize; - } - public String getStartTime() { - return startTime; - } - public void setStartTime(String startTime) { - this.startTime = startTime; - } - public String getEndTime() { - return endTime; - } - public void setEndTime(String endTime) { - this.endTime = endTime; - } - public List getGroupBys() { - return groupBys; - } - public void setGroupBys(List groupBys) { - this.groupBys = groupBys; - } - public List getOrderBys() { - return orderBys; - } - public void setOrderBys(List orderBys) { - this.orderBys = orderBys; - } - public List getReturns() { - return returns; - } - public void setReturns(List returns) { - this.returns = returns; - } - - public String getQueryParameterString(String service) throws EagleServiceClientException { - if (pageSize <= 0) { - throw new EagleServiceClientException("pageSize can't be less than 1, pageSize: " + pageSize); - } - try { - final String query = getQuery(); - final StringBuilder sb = new StringBuilder(); - // query - sb.append("query=").append(service).append(URLEncoder.encode(query, "UTF-8")); - // startRowkey - if (startRowkey != null) { - sb.append("&startRowkey=").append(startRowkey); - } - // pageSize - sb.append("&pageSize=").append(this.pageSize); - if (startTime != null || endTime != null) { - sb.append("&startTime=").append(URLEncoder.encode(startTime, "UTF-8")); - sb.append("&endTime=").append(URLEncoder.encode(endTime, "UTF-8")); - } - - // metricName - if(metricName != null){ - sb.append("&metricName=" + metricName); - } - - if (intervalMin != 0) { - sb.append("&timeSeries=true&intervalmin=" + intervalMin); - } - return sb.toString(); - } catch (UnsupportedEncodingException e) { - throw new EagleServiceClientException("Got an UnsupportedEncodingException" + e.getMessage(), e); - } - } - - private String getQuery() { - final StringBuilder sb = new StringBuilder(); - sb.append("[").append(filter).append("]<"); - boolean first = true; - if (groupBys != null && groupBys.size() > 0) { - for (String groupBy : groupBys) { - if (first) { - first = false; - } else { - sb.append(","); - } - sb.append("@").append(groupBy); - } - } - sb.append(">{"); - if (returns != null && returns.size() > 0) { - first = true; - for (String returnFiled : returns) { - if (first) { - first = false; - } else { - sb.append(","); - } - sb.append(returnFiled); - } - } - sb.append("}"); - if (orderBys != null && orderBys.size() > 0) { - sb.append(".{"); - first = true; - for (String orderBy : orderBys) { - if (first) { - first = false; - } else { - sb.append(","); - } - sb.append(orderBy); - } - sb.append("}"); - } - return sb.toString(); - } - public int getIntervalMin() { - return intervalMin; - } - public void setIntervalMin(int intervalMin) { - this.intervalMin = intervalMin; - } + + private String filter; + private String startRowkey; + private int pageSize; + private String startTime; + private String endTime; + private List groupBys; + private List returns; + private List orderBys; + private String metricName; + private int intervalMin; + + public String getMetricName() { + return metricName; + } + + public void setMetricName(String metricName) { + this.metricName = metricName; + } + + public String getFilter() { + return filter; + } + + public void setFilter(String filter) { + this.filter = filter; + } + + public String getStartRowkey() { + return startRowkey; + } + + public void setStartRowkey(String startRowkey) { + this.startRowkey = startRowkey; + } + + public int getPageSize() { + return pageSize; + } + + public void setPageSize(int pageSize) { + this.pageSize = pageSize; + } + + public String getStartTime() { + return startTime; + } + + public void setStartTime(String startTime) { + this.startTime = startTime; + } + + public String getEndTime() { + return endTime; + } + + public void setEndTime(String endTime) { + this.endTime = endTime; + } + + public List getGroupBys() { + return groupBys; + } + + public void setGroupBys(List groupBys) { + this.groupBys = groupBys; + } + + public List getOrderBys() { + return orderBys; + } + + public void setOrderBys(List orderBys) { + this.orderBys = orderBys; + } + + public List getReturns() { + return returns; + } + + public void setReturns(List returns) { + this.returns = returns; + } + + public String getQueryParameterString(String service) throws EagleServiceClientException { + if (pageSize <= 0) { + throw new EagleServiceClientException("pageSize can't be less than 1, pageSize: " + pageSize); + } + try { + final String query = getQuery(); + final StringBuilder sb = new StringBuilder(); + // query + sb.append("query=").append(service).append(URLEncoder.encode(query, "UTF-8")); + // startRowkey + if (startRowkey != null) { + sb.append("&startRowkey=").append(startRowkey); + } + // pageSize + sb.append("&pageSize=").append(this.pageSize); + if (startTime != null || endTime != null) { + sb.append("&startTime=").append(URLEncoder.encode(startTime, "UTF-8")); + sb.append("&endTime=").append(URLEncoder.encode(endTime, "UTF-8")); + } + + // metricName + if (metricName != null) { + sb.append("&metricName=" + metricName); + } + + if (intervalMin != 0) { + sb.append("&timeSeries=true&intervalmin=" + intervalMin); + } + return sb.toString(); + } catch (UnsupportedEncodingException e) { + throw new EagleServiceClientException("Got an UnsupportedEncodingException" + e.getMessage(), e); + } + } + + private String getQuery() { + final StringBuilder sb = new StringBuilder(); + sb.append("[").append(filter).append("]<"); + boolean first = true; + if (groupBys != null && groupBys.size() > 0) { + for (String groupBy : groupBys) { + if (first) { + first = false; + } else { + sb.append(","); + } + sb.append("@").append(groupBy); + } + } + sb.append(">{"); + if (returns != null && returns.size() > 0) { + first = true; + for (String returnFiled : returns) { + if (first) { + first = false; + } else { + sb.append(","); + } + sb.append(returnFiled); + } + } + sb.append("}"); + if (orderBys != null && orderBys.size() > 0) { + sb.append(".{"); + first = true; + for (String orderBy : orderBys) { + if (first) { + first = false; + } else { + sb.append(","); + } + sb.append(orderBy); + } + sb.append("}"); + } + return sb.toString(); + } + + public int getIntervalMin() { + return intervalMin; + } + + public void setIntervalMin(int intervalMin) { + this.intervalMin = intervalMin; + } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceQueryBuilder.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceQueryBuilder.java index 106ebc69da..a98fedf288 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceQueryBuilder.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceQueryBuilder.java @@ -20,88 +20,88 @@ import java.util.List; /** - * Builder helper class to create EagleServiceQueryRequest + * Builder helper class to create EagleServiceQueryRequest. */ public final class EagleServiceQueryBuilder { - private final EagleServiceQueryRequest request = new EagleServiceQueryRequest(); - - public EagleServiceQueryBuilder addSearchTag(String tagKey, String tagValue) throws EagleServiceClientException { - if (tagKey == null || tagValue == null) { - throw new EagleServiceClientException("tagKey or tagValue is null, tagKey: " + tagKey + ", tagValue: " + tagValue); - } - List searchTags = request.getSearchTags(); - if (searchTags == null) { - searchTags = new ArrayList(); - request.setSearchTags(searchTags); - } - if (!containsTag(tagKey, tagValue)) { - final EagleServiceQueryRequest.Tag tag = new EagleServiceQueryRequest.Tag(); - tag.setKey(tagKey); - tag.setValue(tagValue); - searchTags.add(tag); - } - return this; - } - - public EagleServiceQueryRequest buildRequest() throws EagleServiceClientException { - return request; - } - - public EagleServiceQueryBuilder setStartTime(long startTime) { - request.setStartTime(startTime); - return this; - } - - public EagleServiceQueryBuilder setEndTime(long endTime) { - request.setEndTime(endTime); - return this; - } - - public EagleServiceQueryBuilder setPageSize(int pageSize) throws EagleServiceClientException { - if (pageSize <= 0) { - throw new EagleServiceClientException("pageSize can't be less than 1"); - } - request.setPageSize(pageSize); - return this; - } + private final EagleServiceQueryRequest request = new EagleServiceQueryRequest(); - public EagleServiceQueryBuilder addReturnTag(String tagKey) throws EagleServiceClientException { - if (tagKey == null) { - throw new EagleServiceClientException("tagKey can't be null"); - } - List returnTags = request.getReturnTags(); - if (returnTags == null) { - returnTags = new ArrayList(); - request.setReturnTags(returnTags); - } - if (!returnTags.contains(tagKey)) { - returnTags.add(tagKey); - } - return this; - } - - public EagleServiceQueryBuilder addReturnField(String field) throws EagleServiceClientException { - if (field == null) { - throw new EagleServiceClientException("field can't be null"); - } - List returnFields = request.getReturnFields(); - if (returnFields == null) { - returnFields = new ArrayList(); - request.setReturnFields(returnFields); - } - if (!returnFields.contains(field)) { - returnFields.add(field); - } - return this; - } + public EagleServiceQueryBuilder addSearchTag(String tagKey, String tagValue) throws EagleServiceClientException { + if (tagKey == null || tagValue == null) { + throw new EagleServiceClientException("tagKey or tagValue is null, tagKey: " + tagKey + ", tagValue: " + tagValue); + } + List searchTags = request.getSearchTags(); + if (searchTags == null) { + searchTags = new ArrayList(); + request.setSearchTags(searchTags); + } + if (!containsTag(tagKey, tagValue)) { + final EagleServiceQueryRequest.Tag tag = new EagleServiceQueryRequest.Tag(); + tag.setKey(tagKey); + tag.setValue(tagValue); + searchTags.add(tag); + } + return this; + } - private boolean containsTag(String tagKey, String tagValue) { - for (EagleServiceQueryRequest.Tag tag : request.getSearchTags()) { - if (tag.getKey().equals(tagKey) && tag.getValue().equals(tagValue)) { - return true; - } - } - return false; - } + public EagleServiceQueryRequest buildRequest() throws EagleServiceClientException { + return request; + } + + public EagleServiceQueryBuilder setStartTime(long startTime) { + request.setStartTime(startTime); + return this; + } + + public EagleServiceQueryBuilder setEndTime(long endTime) { + request.setEndTime(endTime); + return this; + } + + public EagleServiceQueryBuilder setPageSize(int pageSize) throws EagleServiceClientException { + if (pageSize <= 0) { + throw new EagleServiceClientException("pageSize can't be less than 1"); + } + request.setPageSize(pageSize); + return this; + } + + public EagleServiceQueryBuilder addReturnTag(String tagKey) throws EagleServiceClientException { + if (tagKey == null) { + throw new EagleServiceClientException("tagKey can't be null"); + } + List returnTags = request.getReturnTags(); + if (returnTags == null) { + returnTags = new ArrayList(); + request.setReturnTags(returnTags); + } + if (!returnTags.contains(tagKey)) { + returnTags.add(tagKey); + } + return this; + } + + public EagleServiceQueryBuilder addReturnField(String field) throws EagleServiceClientException { + if (field == null) { + throw new EagleServiceClientException("field can't be null"); + } + List returnFields = request.getReturnFields(); + if (returnFields == null) { + returnFields = new ArrayList(); + request.setReturnFields(returnFields); + } + if (!returnFields.contains(field)) { + returnFields.add(field); + } + return this; + } + + private boolean containsTag(String tagKey, String tagValue) { + for (EagleServiceQueryRequest.Tag tag : request.getSearchTags()) { + if (tag.getKey().equals(tagKey) && tag.getValue().equals(tagValue)) { + return true; + } + } + return false; + } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceQueryRequest.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceQueryRequest.java index 37dcb4471e..8893232e62 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceQueryRequest.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceQueryRequest.java @@ -23,129 +23,132 @@ import java.util.List; /** - * Eagle service query parameter request + * Eagle service query parameter request. */ public class EagleServiceQueryRequest { - // instance members - private long startTime; - private long endTime; - private int pageSize; - private List searchTags; - private List returnTags; - private List returnFields; - - public static class Tag { - private String key; - private String value; - - public String getKey() { - return key; - } - public void setKey(String key) { - this.key = key; - } - public String getValue() { - return value; - } - public void setValue(String value) { - this.value = value; - } - } - - public long getStartTime() { - return startTime; - } - - public void setStartTime(long startTime) { - this.startTime = startTime; - } - - public long getEndTime() { - return endTime; - } - - public void setEndTime(long endTime) { - this.endTime = endTime; - } - - public int getPageSize() { - return pageSize; - } - - public void setPageSize(int pageSize) { - this.pageSize = pageSize; - } - - public List getSearchTags() { - return searchTags; - } - - public void setSearchTags(List searchTags) { - this.searchTags = searchTags; - } - - public List getReturnTags() { - return returnTags; - } - - public void setReturnTags(List returnTags) { - this.returnTags = returnTags; - } - - public List getReturnFields() { - return returnFields; - } - - public void setReturnFields(List returnFields) { - this.returnFields = returnFields; - } - - public String getQueryParameterString() throws EagleServiceClientException { - if (pageSize <= 0) { - throw new EagleServiceClientException("pageSize can't be less than 1, pageSize: " + pageSize); - } - if (startTime > endTime || (startTime == endTime && startTime != 0)) { - throw new EagleServiceClientException("Invalid startTime and endTime, startTime: " + startTime + ", endTime: " + endTime); - } - int returnSize = 0; - if (returnTags != null) { - returnSize += returnTags.size(); - } - if (returnFields != null) { - returnSize += returnFields.size(); - } - if (returnSize == 0) { - throw new EagleServiceClientException("Invalid request, no return tag or field added"); - } - final StringBuilder sb = new StringBuilder(); - sb.append("pageSize=").append(this.pageSize); - if (endTime > 0) { - final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd%20HH:mm:ss"); + // instance members + private long startTime; + private long endTime; + private int pageSize; + private List searchTags; + private List returnTags; + private List returnFields; + + public static class Tag { + private String key; + private String value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + } + + public long getStartTime() { + return startTime; + } + + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + public long getEndTime() { + return endTime; + } + + public void setEndTime(long endTime) { + this.endTime = endTime; + } + + public int getPageSize() { + return pageSize; + } + + public void setPageSize(int pageSize) { + this.pageSize = pageSize; + } + + public List getSearchTags() { + return searchTags; + } + + public void setSearchTags(List searchTags) { + this.searchTags = searchTags; + } + + public List getReturnTags() { + return returnTags; + } + + public void setReturnTags(List returnTags) { + this.returnTags = returnTags; + } + + public List getReturnFields() { + return returnFields; + } + + public void setReturnFields(List returnFields) { + this.returnFields = returnFields; + } + + public String getQueryParameterString() throws EagleServiceClientException { + if (pageSize <= 0) { + throw new EagleServiceClientException("pageSize can't be less than 1, pageSize: " + pageSize); + } + if (startTime > endTime || (startTime == endTime && startTime != 0)) { + throw new EagleServiceClientException("Invalid startTime and endTime, startTime: " + startTime + ", endTime: " + endTime); + } + int returnSize = 0; + if (returnTags != null) { + returnSize += returnTags.size(); + } + if (returnFields != null) { + returnSize += returnFields.size(); + } + if (returnSize == 0) { + throw new EagleServiceClientException("Invalid request, no return tag or field added"); + } + final StringBuilder sb = new StringBuilder(); + sb.append("pageSize=").append(this.pageSize); + if (endTime > 0) { + final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd%20HH:mm:ss"); format.setTimeZone(EagleConfigFactory.load().getTimeZone()); - Date date = new Date(startTime); - String timeString = format.format(date); - sb.append("&startTime=").append(timeString); - date.setTime(endTime); - timeString = format.format(date); - sb.append("&endTime=").append(timeString); - } - if (searchTags != null) { - for (Tag tag : searchTags) { - sb.append("&tagNameValue=").append(tag.getKey()).append("%3D").append(tag.getValue()); - } - } - if (returnTags != null) { - for (String tagKey : returnTags) { - sb.append("&outputTag=").append(tagKey); - } - } - if (returnFields != null) { - for (String field : returnFields) { - sb.append("&outputField=").append(field); - } - } - return sb.toString(); - } - + Date date = new Date(startTime); + String timeString = format.format(date); + sb.append("&startTime=").append(timeString); + date.setTime(endTime); + timeString = format.format(date); + sb.append("&endTime=").append(timeString); + } + if (searchTags != null) { + for (Tag tag : searchTags) { + sb.append("&tagNameValue=").append(tag.getKey()).append("%3D").append(tag.getValue()); + } + } + if (returnTags != null) { + for (String tagKey : returnTags) { + sb.append("&outputTag=").append(tagKey); + } + } + if (returnFields != null) { + for (String field : returnFields) { + sb.append("&outputField=").append(field); + } + } + return sb.toString(); + } + } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceSingleEntityQueryRequest.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceSingleEntityQueryRequest.java index 58b27642ed..d91c35b288 100755 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceSingleEntityQueryRequest.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/EagleServiceSingleEntityQueryRequest.java @@ -22,14 +22,14 @@ import java.util.Date; public class EagleServiceSingleEntityQueryRequest { - private String query; - private String startRowkey; - private int pageSize; - private long startTime; - private long endTime; - private boolean treeAgg; - private String metricName; - private boolean filterIfMissing; + private String query; + private String startRowkey; + private int pageSize; + private long startTime; + private long endTime; + private boolean treeAgg; + private String metricName; + private boolean filterIfMissing; public boolean isVerbose() { return verbose; @@ -40,129 +40,132 @@ public void setVerbose(boolean verbose) { } private boolean verbose; - - public String getMetricName() { - return metricName; - } - - public void setMetricName(String metricName) { - this.metricName = metricName; - } - - public String getQuery() { - return query; - } - - public void setQuery(String query) { - this.query = query; - } - - public String getStartRowkey() { - return startRowkey; - } - - public void setStartRowkey(String startRowkey) { - this.startRowkey = startRowkey; - } - - public boolean isTreeAgg() { - return treeAgg; - } - - public void setTreeAgg(boolean treeAgg) { - this.treeAgg = treeAgg; - } - - public long getStartTime() { - return startTime; - } - - public void setStartTime(long startTime) { - this.startTime = startTime; - } - - public long getEndTime() { - return endTime; - } - - public void setEndTime(long endTime) { - this.endTime = endTime; - } - - public int getPageSize() { - return pageSize; - } - - public void setPageSize(int pageSize) { - this.pageSize = pageSize; - } - - public boolean getFilterIfMissing() { - return filterIfMissing; - } - - public void setFilterIfMissing(boolean filterIfMissing) { - this.filterIfMissing = filterIfMissing; - } - - public String getQueryParameterString() throws EagleServiceClientException { - if (pageSize < 0) { - throw new EagleServiceClientException("pageSize can't be less than 0, pageSize: " + pageSize); - } - - // startTime and endTime is optional - - final StringBuilder sb = new StringBuilder(); - // query -// try { -// sb.append("query=").append(URLEncoder.encode(query,"UTF-8")); - sb.append("query=").append(query); -// } catch (UnsupportedEncodingException e) { -// throw new EagleServiceClientException(e); -// } + + public String getMetricName() { + return metricName; + } + + public void setMetricName(String metricName) { + this.metricName = metricName; + } + + public String getQuery() { + return query; + } + + public void setQuery(String query) { + this.query = query; + } + + public String getStartRowkey() { + return startRowkey; + } + + public void setStartRowkey(String startRowkey) { + this.startRowkey = startRowkey; + } + + public boolean isTreeAgg() { + return treeAgg; + } + + public void setTreeAgg(boolean treeAgg) { + this.treeAgg = treeAgg; + } + + public long getStartTime() { + return startTime; + } + + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + public long getEndTime() { + return endTime; + } + + public void setEndTime(long endTime) { + this.endTime = endTime; + } + + public int getPageSize() { + return pageSize; + } + + public void setPageSize(int pageSize) { + this.pageSize = pageSize; + } + + public boolean getFilterIfMissing() { + return filterIfMissing; + } + + public void setFilterIfMissing(boolean filterIfMissing) { + this.filterIfMissing = filterIfMissing; + } + + public String getQueryParameterString() throws EagleServiceClientException { + if (pageSize < 0) { + throw new EagleServiceClientException("pageSize can't be less than 0, pageSize: " + pageSize); + } + + // startTime and endTime is optional + + final StringBuilder sb = new StringBuilder(); + // query + // try { + // sb.append("query=").append(URLEncoder.encode(query,"UTF-8")); + sb.append("query=").append(query); + // catch (UnsupportedEncodingException e) { + // throw new EagleServiceClientException(e); + // } // startRowkey - if (startRowkey != null) { - sb.append("&startRowkey=").append(startRowkey); - } - // pageSize - sb.append("&pageSize=").append(this.pageSize); - if (startTime !=0 || endTime != 0) { - Date date = new Date(startTime); - SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd%20HH:mm:ss"); + if (startRowkey != null) { + sb.append("&startRowkey=").append(startRowkey); + } + // pageSize + sb.append("&pageSize=").append(this.pageSize); + if (startTime != 0 || endTime != 0) { + Date date = new Date(startTime); + SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd%20HH:mm:ss"); dateFormatter.setTimeZone(EagleConfigFactory.load().getTimeZone()); - String timeString = dateFormatter.format(date); - sb.append("&startTime=").append(timeString); - date.setTime(endTime); - timeString = dateFormatter.format(date); - sb.append("&endTime=").append(timeString); - } - // tree aggregate - sb.append("&treeAgg=").append(treeAgg); - - // for metric read - if(metricName != null){ - sb.append("&metricName=").append(metricName); - } - - if (filterIfMissing == true) { - sb.append("&filterIfMissing=").append("true"); - } - return sb.toString(); - } - - public static Builder build(){ + String timeString = dateFormatter.format(date); + sb.append("&startTime=").append(timeString); + date.setTime(endTime); + timeString = dateFormatter.format(date); + sb.append("&endTime=").append(timeString); + } + // tree aggregate + sb.append("&treeAgg=").append(treeAgg); + + // for metric read + if (metricName != null) { + sb.append("&metricName=").append(metricName); + } + + if (filterIfMissing == true) { + sb.append("&filterIfMissing=").append("true"); + } + return sb.toString(); + } + + public static Builder build() { return new Builder(); } - public static class Builder{ + public static class Builder { private final EagleServiceSingleEntityQueryRequest rawQuery; - public Builder(){ - this.rawQuery= new EagleServiceSingleEntityQueryRequest(); + + public Builder() { + this.rawQuery = new EagleServiceSingleEntityQueryRequest(); } - public EagleServiceSingleEntityQueryRequest done(){ + + public EagleServiceSingleEntityQueryRequest done() { return this.rawQuery; } + public Builder query(String query) { this.rawQuery.setQuery(query); return this; diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/IEagleServiceClient.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/IEagleServiceClient.java index ce62eee082..42c9a3316f 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/IEagleServiceClient.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/IEagleServiceClient.java @@ -16,9 +16,9 @@ */ package org.apache.eagle.service.client; +import com.sun.jersey.api.client.Client; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity; -import com.sun.jersey.api.client.Client; import java.io.Closeable; import java.io.IOException; @@ -31,25 +31,22 @@ public interface IEagleServiceClient extends IEagleServiceRequestBuilder, Closea IEagleServiceClient silence(boolean silence); /** - * * @param * @param entities * @param serviceName * @return */ - GenericServiceAPIResponseEntity create(List entities,String serviceName) throws IOException, EagleServiceClientException; + GenericServiceAPIResponseEntity create(List entities, String serviceName) throws IOException, EagleServiceClientException; /** - * * @param entities * @param entityClass * @param * @return */ - GenericServiceAPIResponseEntity create(List entities,Class entityClass) throws IOException, EagleServiceClientException; + GenericServiceAPIResponseEntity create(List entities, Class entityClass) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return @@ -57,7 +54,6 @@ public interface IEagleServiceClient extends IEagleServiceRequestBuilder, Closea GenericServiceAPIResponseEntity create(List entities) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return @@ -65,37 +61,35 @@ public interface IEagleServiceClient extends IEagleServiceRequestBuilder, Closea GenericServiceAPIResponseEntity delete(List entities) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return */ - GenericServiceAPIResponseEntity delete(List entities,String serviceName) throws IOException, EagleServiceClientException; + GenericServiceAPIResponseEntity delete(List entities, String serviceName) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return */ - GenericServiceAPIResponseEntity delete(List entities,Class entityClass) throws IOException, EagleServiceClientException; + GenericServiceAPIResponseEntity delete(List entities, Class entityClass) throws IOException, EagleServiceClientException; /** - * + * delete. * @param request * @return */ GenericServiceAPIResponseEntity delete(EagleServiceSingleEntityQueryRequest request) throws EagleServiceClientException, IOException; /** - * + * delete by id. * @param ids * @param serviceName * @return * @throws EagleServiceClientException * @throws IOException */ - GenericServiceAPIResponseEntity deleteById(List ids,String serviceName) throws EagleServiceClientException, IOException; + GenericServiceAPIResponseEntity deleteById(List ids, String serviceName) throws EagleServiceClientException, IOException; /** * @param entities @@ -105,23 +99,20 @@ public interface IEagleServiceClient extends IEagleServiceRequestBuilder, Closea GenericServiceAPIResponseEntity update(List entities) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return */ - GenericServiceAPIResponseEntity update(List entities,String serviceName) throws IOException, EagleServiceClientException; + GenericServiceAPIResponseEntity update(List entities, String serviceName) throws IOException, EagleServiceClientException; /** - * * @param entities * @param * @return */ - GenericServiceAPIResponseEntity update(List entities,Class entityClass) throws IOException, EagleServiceClientException; + GenericServiceAPIResponseEntity update(List entities, Class entityClass) throws IOException, EagleServiceClientException; /** - * * @param request * @return */ diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/IEagleServiceRequestBuilder.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/IEagleServiceRequestBuilder.java index 1264b3090a..4e988837d8 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/IEagleServiceRequestBuilder.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/IEagleServiceRequestBuilder.java @@ -19,21 +19,20 @@ import org.apache.eagle.service.client.impl.*; /** - * - * IEagleServiceClient extension interfaces + * IEagleServiceClient extension interfaces. * * @see IEagleServiceClient */ public interface IEagleServiceRequestBuilder { /** - * Search pipe API + * Search pipe API. * * @return */ SearchRequestBuilder search(); /** - * Search pipe API + * Search pipe API. * * @param query * @return @@ -41,15 +40,14 @@ public interface IEagleServiceRequestBuilder { SearchRequestBuilder search(String query); /** - * - * Delete pipe API + * Delete pipe API. * * @return */ DeleteRequestBuilder delete(); /** - * Metric sender + * Metric sender. * * @param metricName * @return @@ -57,7 +55,7 @@ public interface IEagleServiceRequestBuilder { MetricSender metric(String metricName); /** - * Batch entities sender + * Batch entities sender. * * @param batchSize * @return @@ -65,14 +63,14 @@ public interface IEagleServiceRequestBuilder { BatchSender batch(int batchSize); /** - * Async service client requester + * Async service client requester. * * @return */ EagleServiceAsyncClient async(); /** - * Parallel service client requester + * Parallel service client requester. * * @param parallelNum * @return diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListGroupByQueryAPIResponseEntity.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListGroupByQueryAPIResponseEntity.java index e59513bf5d..6a6e46aab6 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListGroupByQueryAPIResponseEntity.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListGroupByQueryAPIResponseEntity.java @@ -21,70 +21,83 @@ public class ListGroupByQueryAPIResponseEntity { - private boolean success; - private String exception; - private int totalResults; - private long elapsedms; - private List obj; - private long lastTimestamp; - - public long getElapsedms() { - return elapsedms; - } - public void setElapsedms(long elapsedms) { - this.elapsedms = elapsedms; - } - public boolean isSuccess() { - return success; - } - public void setSuccess(boolean success) { - this.success = success; - } - public String getException() { - return exception; - } - public void setException(String exception) { - this.exception = exception; - } - public int getTotalResults() { - return totalResults; - } - public void setTotalResults(int totalResults) { - this.totalResults = totalResults; - } - public List getObj() { - return obj; - } - public void setObj(List obj) { - this.obj = obj; - } - - public long getLastTimestamp() { - return lastTimestamp; - } - - public void setLastTimestamp(long lastTimestamp) { - this.lastTimestamp = lastTimestamp; - } - - public static class Entry implements Map.Entry, List> { - - private List key; - private List value; - public List getKey() { - return key; - } - public void setKey(List key) { - this.key = key; - } - public List getValue() { - return value; - } - @Override - public List setValue(List value) { - List old = this.value; - this.value = value; - return old; - } - } + private boolean success; + private String exception; + private int totalResults; + private long elapsedms; + private List obj; + private long lastTimestamp; + + public long getElapsedms() { + return elapsedms; + } + + public void setElapsedms(long elapsedms) { + this.elapsedms = elapsedms; + } + + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getException() { + return exception; + } + + public void setException(String exception) { + this.exception = exception; + } + + public int getTotalResults() { + return totalResults; + } + + public void setTotalResults(int totalResults) { + this.totalResults = totalResults; + } + + public List getObj() { + return obj; + } + + public void setObj(List obj) { + this.obj = obj; + } + + public long getLastTimestamp() { + return lastTimestamp; + } + + public void setLastTimestamp(long lastTimestamp) { + this.lastTimestamp = lastTimestamp; + } + + public static class Entry implements Map.Entry, List> { + + private List key; + private List value; + + public List getKey() { + return key; + } + + public void setKey(List key) { + this.key = key; + } + + public List getValue() { + return value; + } + + @Override + public List setValue(List value) { + List old = this.value; + this.value = value; + return old; + } + } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListQueryAPIResponseConvertHelper.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListQueryAPIResponseConvertHelper.java index 3e71f4e598..eeca502977 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListQueryAPIResponseConvertHelper.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListQueryAPIResponseConvertHelper.java @@ -16,102 +16,96 @@ */ package org.apache.eagle.service.client; +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; +import org.apache.eagle.log.entity.ListQueryAPIResponseEntity; +import org.codehaus.jackson.JsonGenerationException; +import org.codehaus.jackson.map.JsonMappingException; +import org.codehaus.jackson.map.ObjectMapper; + import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Type; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.ConcurrentHashMap; -import org.codehaus.jackson.JsonGenerationException; -import org.codehaus.jackson.map.JsonMappingException; -import org.codehaus.jackson.map.ObjectMapper; - -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.apache.eagle.log.entity.ListQueryAPIResponseEntity; - /** * TODO: It's just a temporary solution. We need fix jersy and jackson mapping issue so the class - * can be safely removed. - * + * can be safely removed. */ public final class ListQueryAPIResponseConvertHelper { - - private static final Map, Map> BEAN_SETTER_MAP = new ConcurrentHashMap, Map>(); - private static final String SETTER_PREFIX = "set"; - - @SuppressWarnings({ "unchecked" }) - public static ListQueryAPIResponseEntity convert(Class clazz, ListQueryAPIResponseEntity response) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, JsonGenerationException, JsonMappingException, IOException { - if (response == null || response.getObj() == null) { - return response; - } - final List entities = new ArrayList(); - final Object obj = response.getObj(); - final Map settings = getOrCreateSetterMap(clazz); - final Collection> objs = (Collection>) obj; - for (Map map : objs) { - final TaggedLogAPIEntity entity = clazz.newInstance(); - for (Map.Entry entry : map.entrySet()) { - final String propertyName = entry.getKey(); - Object value = entry.getValue(); - final Method method = settings.get(propertyName); - final Type type = method.getGenericParameterTypes()[0]; - if ((type == double.class || type == Double.class || type == long.class || type == Long.class) - && (value.equals("NaN"))) { - value = 0; - } - - final Class parameterClass = method.getParameterTypes()[0]; - if (value instanceof Number || value instanceof String || parameterClass.isInstance(value)) { - try { - method.invoke(entity, value); - } - catch (Exception e){ - e.printStackTrace(); - } - } else { - ObjectMapper om = new ObjectMapper(); - String objJson = om.writeValueAsString(value); - value = om.readValue(objJson, parameterClass); - method.invoke(entity, value); - } - } - entities.add(entity); - } - response.setObj(entities); - return response; - } - - private static Map getOrCreateSetterMap(Class clazz) { - Map setterMap = BEAN_SETTER_MAP.get(clazz); - if (setterMap == null) { - setterMap = createSetterMap(clazz); - } - return setterMap; - } - private static Map createSetterMap(Class clazz) { - final Map setterMap = new HashMap(); - final Method[] methods = clazz.getMethods(); - final StringBuilder sb = new StringBuilder(100); - for (Method m : methods) { - final String methodName = m.getName(); - if (methodName.startsWith(SETTER_PREFIX) && methodName.length() > SETTER_PREFIX.length()) { - sb.setLength(0); - final char c = methodName.charAt(3); - sb.append(Character.toLowerCase(c)); - sb.append(methodName.substring(4)); - String propertyName = sb.toString(); - setterMap.put(propertyName, m); - } - } - BEAN_SETTER_MAP.put(clazz, setterMap); - return setterMap; - } + private static final Map, Map> BEAN_SETTER_MAP = new ConcurrentHashMap, Map>(); + private static final String SETTER_PREFIX = "set"; + + @SuppressWarnings( {"unchecked"}) + public static ListQueryAPIResponseEntity convert(Class clazz, ListQueryAPIResponseEntity response) throws InstantiationException, IllegalAccessException, + IllegalArgumentException, InvocationTargetException, JsonGenerationException, JsonMappingException, IOException { + if (response == null || response.getObj() == null) { + return response; + } + final List entities = new ArrayList(); + final Object obj = response.getObj(); + final Map settings = getOrCreateSetterMap(clazz); + final Collection> objs = (Collection>) obj; + for (Map map : objs) { + final TaggedLogAPIEntity entity = clazz.newInstance(); + for (Map.Entry entry : map.entrySet()) { + final String propertyName = entry.getKey(); + Object value = entry.getValue(); + final Method method = settings.get(propertyName); + final Type type = method.getGenericParameterTypes()[0]; + if ((type == double.class || type == Double.class || type == long.class || type == Long.class) + && (value.equals("NaN"))) { + value = 0; + } + + final Class parameterClass = method.getParameterTypes()[0]; + if (value instanceof Number || value instanceof String || parameterClass.isInstance(value)) { + try { + method.invoke(entity, value); + } catch (Exception e) { + e.printStackTrace(); + } + } else { + ObjectMapper om = new ObjectMapper(); + String objJson = om.writeValueAsString(value); + value = om.readValue(objJson, parameterClass); + method.invoke(entity, value); + } + } + entities.add(entity); + } + response.setObj(entities); + return response; + } + + private static Map getOrCreateSetterMap(Class clazz) { + Map setterMap = BEAN_SETTER_MAP.get(clazz); + if (setterMap == null) { + setterMap = createSetterMap(clazz); + } + return setterMap; + } + + private static Map createSetterMap(Class clazz) { + final Map setterMap = new HashMap(); + final Method[] methods = clazz.getMethods(); + final StringBuilder sb = new StringBuilder(100); + for (Method m : methods) { + final String methodName = m.getName(); + if (methodName.startsWith(SETTER_PREFIX) && methodName.length() > SETTER_PREFIX.length()) { + sb.setLength(0); + final char c = methodName.charAt(3); + sb.append(Character.toLowerCase(c)); + sb.append(methodName.substring(4)); + String propertyName = sb.toString(); + setterMap.put(propertyName, m); + } + } + BEAN_SETTER_MAP.put(clazz, setterMap); + return setterMap; + } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListTimeSeriesQueryAPIResponseEntity.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListTimeSeriesQueryAPIResponseEntity.java index 80c6aeb55b..9efb7e1e5a 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListTimeSeriesQueryAPIResponseEntity.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ListTimeSeriesQueryAPIResponseEntity.java @@ -16,69 +16,82 @@ */ package org.apache.eagle.service.client; +import org.codehaus.jackson.annotate.JsonPropertyOrder; + import java.util.List; import java.util.Map; -import org.codehaus.jackson.annotate.JsonPropertyOrder; - -@JsonPropertyOrder({ "success", "exception", "elapsems", "totalResults", "elapsedms", "obj" }) +@JsonPropertyOrder( {"success", "exception", "elapsems", "totalResults", "elapsedms", "obj"}) public class ListTimeSeriesQueryAPIResponseEntity { - private boolean success; - private String exception; - private int totalResults; - private long elapsedms; - private List obj; - - public long getElapsedms() { - return elapsedms; - } - public void setElapsedms(long elapsedms) { - this.elapsedms = elapsedms; - } - public boolean isSuccess() { - return success; - } - public void setSuccess(boolean success) { - this.success = success; - } - public String getException() { - return exception; - } - public void setException(String exception) { - this.exception = exception; - } - public int getTotalResults() { - return totalResults; - } - public void setTotalResults(int totalResults) { - this.totalResults = totalResults; - } - public List getObj() { - return obj; - } - public void setObj(List obj) { - this.obj = obj; - } - - public static class Entry implements Map.Entry, List> { - - private List key; - private List value; - public List getKey() { - return key; - } - public void setKey(List key) { - this.key = key; - } - public List getValue() { - return value; - } - @Override - public List setValue(List value) { - List old = this.value; - this.value = value; - return old; - } - } + private boolean success; + private String exception; + private int totalResults; + private long elapsedms; + private List obj; + + public long getElapsedms() { + return elapsedms; + } + + public void setElapsedms(long elapsedms) { + this.elapsedms = elapsedms; + } + + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getException() { + return exception; + } + + public void setException(String exception) { + this.exception = exception; + } + + public int getTotalResults() { + return totalResults; + } + + public void setTotalResults(int totalResults) { + this.totalResults = totalResults; + } + + public List getObj() { + return obj; + } + + public void setObj(List obj) { + this.obj = obj; + } + + public static class Entry implements Map.Entry, List> { + + private List key; + private List value; + + public List getKey() { + return key; + } + + public void setKey(List key) { + this.key = key; + } + + public List getValue() { + return value; + } + + @Override + public List setValue(List value) { + List old = this.value; + this.value = value; + return old; + } + } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/RowkeyQueryAPIResponseConvertHelper.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/RowkeyQueryAPIResponseConvertHelper.java index 7dc39f0256..38d8c6350e 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/RowkeyQueryAPIResponseConvertHelper.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/RowkeyQueryAPIResponseConvertHelper.java @@ -16,6 +16,12 @@ */ package org.apache.eagle.service.client; +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; +import org.apache.eagle.log.entity.RowkeyQueryAPIResponseEntity; +import org.codehaus.jackson.JsonGenerationException; +import org.codehaus.jackson.map.JsonMappingException; +import org.codehaus.jackson.map.ObjectMapper; + import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -24,87 +30,79 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.codehaus.jackson.JsonGenerationException; -import org.codehaus.jackson.map.JsonMappingException; -import org.codehaus.jackson.map.ObjectMapper; - -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.apache.eagle.log.entity.RowkeyQueryAPIResponseEntity; - /** * TODO: It's just a temporary solution. We need fix jersy and jackson mapping issue so the class - * can be safely removed. - * + * can be safely removed. */ public final class RowkeyQueryAPIResponseConvertHelper { - - private static final Map, Map> BEAN_SETTER_MAP = new ConcurrentHashMap, Map>(); - private static final String SETTER_PREFIX = "set"; - - @SuppressWarnings({ "unchecked" }) - public static RowkeyQueryAPIResponseEntity convert(Class clazz, RowkeyQueryAPIResponseEntity response) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, JsonGenerationException, JsonMappingException, IOException { - if (response == null || response.getObj() == null) { - return response; - } - final Object obj = response.getObj(); - final Map settings = getOrCreateSetterMap(clazz); - final Map map = (Map) obj; - final TaggedLogAPIEntity entity = clazz.newInstance(); - for (Map.Entry entry : map.entrySet()) { - final String propertyName = entry.getKey(); - Object value = entry.getValue(); - final Method method = settings.get(propertyName); - final Type type = method.getGenericParameterTypes()[0]; - if ((type == double.class || type == Double.class || type == long.class || type == Long.class) - && (value.equals("NaN"))) { - value = 0; - } - - final Class parameterClass = method.getParameterTypes()[0]; - if (value instanceof Number || value instanceof String || parameterClass.isInstance(value)) { - try { - method.invoke(entity, value); - } - catch (Exception e){ - e.printStackTrace(); - } - } else { - ObjectMapper om = new ObjectMapper(); - String objJson = om.writeValueAsString(value); - value = om.readValue(objJson, parameterClass); - method.invoke(entity, value); - } - } - response.setObj(entity); - return response; - } - - private static Map getOrCreateSetterMap(Class clazz) { - Map setterMap = BEAN_SETTER_MAP.get(clazz); - if (setterMap == null) { - setterMap = createSetterMap(clazz); - } - return setterMap; - } - private static Map createSetterMap(Class clazz) { - final Map setterMap = new HashMap(); - final Method[] methods = clazz.getMethods(); - final StringBuilder sb = new StringBuilder(100); - for (Method m : methods) { - final String methodName = m.getName(); - if (methodName.startsWith(SETTER_PREFIX) && methodName.length() > SETTER_PREFIX.length()) { - sb.setLength(0); - final char c = methodName.charAt(3); - sb.append(Character.toLowerCase(c)); - sb.append(methodName.substring(4)); - String propertyName = sb.toString(); - setterMap.put(propertyName, m); - } - } - BEAN_SETTER_MAP.put(clazz, setterMap); - return setterMap; - } + private static final Map, Map> BEAN_SETTER_MAP = new ConcurrentHashMap, Map>(); + private static final String SETTER_PREFIX = "set"; + + @SuppressWarnings( {"unchecked"}) + public static RowkeyQueryAPIResponseEntity convert(Class clazz, RowkeyQueryAPIResponseEntity response) throws InstantiationException, IllegalAccessException, + IllegalArgumentException, InvocationTargetException, JsonGenerationException, JsonMappingException, IOException { + if (response == null || response.getObj() == null) { + return response; + } + final Object obj = response.getObj(); + final Map settings = getOrCreateSetterMap(clazz); + final Map map = (Map) obj; + final TaggedLogAPIEntity entity = clazz.newInstance(); + for (Map.Entry entry : map.entrySet()) { + final String propertyName = entry.getKey(); + Object value = entry.getValue(); + final Method method = settings.get(propertyName); + final Type type = method.getGenericParameterTypes()[0]; + if ((type == double.class || type == Double.class || type == long.class || type == Long.class) + && (value.equals("NaN"))) { + value = 0; + } + + final Class parameterClass = method.getParameterTypes()[0]; + if (value instanceof Number || value instanceof String || parameterClass.isInstance(value)) { + try { + method.invoke(entity, value); + } catch (Exception e) { + e.printStackTrace(); + } + } else { + ObjectMapper om = new ObjectMapper(); + String objJson = om.writeValueAsString(value); + value = om.readValue(objJson, parameterClass); + method.invoke(entity, value); + } + } + response.setObj(entity); + return response; + } + + private static Map getOrCreateSetterMap(Class clazz) { + Map setterMap = BEAN_SETTER_MAP.get(clazz); + if (setterMap == null) { + setterMap = createSetterMap(clazz); + } + return setterMap; + } + + private static Map createSetterMap(Class clazz) { + final Map setterMap = new HashMap(); + final Method[] methods = clazz.getMethods(); + final StringBuilder sb = new StringBuilder(100); + for (Method m : methods) { + final String methodName = m.getName(); + if (methodName.startsWith(SETTER_PREFIX) && methodName.length() > SETTER_PREFIX.length()) { + sb.setLength(0); + final char c = methodName.charAt(3); + sb.append(Character.toLowerCase(c)); + sb.append(methodName.substring(4)); + String propertyName = sb.toString(); + setterMap.put(propertyName, m); + } + } + BEAN_SETTER_MAP.put(clazz, setterMap); + return setterMap; + } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ServiceConfig.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ServiceConfig.java index 1dbf75b937..412ea5a90c 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ServiceConfig.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/ServiceConfig.java @@ -21,7 +21,7 @@ import java.io.Serializable; -public class ServiceConfig implements Serializable{ +public class ServiceConfig implements Serializable { public String serviceHost; public Integer servicePort; public String username; diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/TimeSeriesGroupByAPIResponseEntity.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/TimeSeriesGroupByAPIResponseEntity.java index e1d6618dd0..246908e308 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/TimeSeriesGroupByAPIResponseEntity.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/TimeSeriesGroupByAPIResponseEntity.java @@ -15,87 +15,98 @@ * limitations under the License. */ /** - * + * */ package org.apache.eagle.service.client; -import java.util.List; -import java.util.Map; - import org.codehaus.jackson.annotate.JsonIgnoreProperties; import org.codehaus.jackson.map.annotate.JsonSerialize; +import java.util.List; +import java.util.Map; + //@JsonPropertyOrder({ "success", "exception", "elapsems", "totalResults", "elapsedms", "obj", "lastTimestamp" }) -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) public class TimeSeriesGroupByAPIResponseEntity { - private boolean success; - private String exception; - private int totalResults; - private long elapsedms; - private List obj; - private long lastTimestamp; - - public long getElapsedms() { - return elapsedms; - } - public void setElapsedms(long elapsedms) { - this.elapsedms = elapsedms; - } - - public boolean isSuccess() { - return success; - } - public void setSuccess(boolean success) { - this.success = success; - } - public String getException() { - return exception; - } - public void setException(String exception) { - this.exception = exception; - } - public int getTotalResults() { - return totalResults; - } - public void setTotalResults(int totalResults) { - this.totalResults = totalResults; - } - public List getObj() { - return obj; - } - public void setObj(List obj) { - this.obj = obj; - } - - public long getLastTimestamp() { - return lastTimestamp; - } - public void setLastTimestamp(long lastTimestamp) { - this.lastTimestamp = lastTimestamp; - } - - public static class Entry implements Map.Entry, List> { - - private List key; - private List value; - public List getKey() { - return key; - } - public void setKey(List key) { - this.key = key; - } - - @Override - public List getValue() { - return value; - } - - @Override - public List setValue(List value) { - List old = this.value; - this.value = value; - return old; - } - } + private boolean success; + private String exception; + private int totalResults; + private long elapsedms; + private List obj; + private long lastTimestamp; + + public long getElapsedms() { + return elapsedms; + } + + public void setElapsedms(long elapsedms) { + this.elapsedms = elapsedms; + } + + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getException() { + return exception; + } + + public void setException(String exception) { + this.exception = exception; + } + + public int getTotalResults() { + return totalResults; + } + + public void setTotalResults(int totalResults) { + this.totalResults = totalResults; + } + + public List getObj() { + return obj; + } + + public void setObj(List obj) { + this.obj = obj; + } + + public long getLastTimestamp() { + return lastTimestamp; + } + + public void setLastTimestamp(long lastTimestamp) { + this.lastTimestamp = lastTimestamp; + } + + public static class Entry implements Map.Entry, List> { + + private List key; + private List value; + + public List getKey() { + return key; + } + + public void setKey(List key) { + this.key = key; + } + + @Override + public List getValue() { + return value; + } + + @Override + public List setValue(List value) { + List old = this.value; + this.value = value; + return old; + } + } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/BatchSender.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/BatchSender.java index ba3e6ab2d6..c1b1a58844 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/BatchSender.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/BatchSender.java @@ -18,8 +18,8 @@ import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity; -import org.apache.eagle.service.client.IEagleServiceClient; import org.apache.eagle.service.client.EagleServiceClientException; +import org.apache.eagle.service.client.IEagleServiceClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -29,7 +29,7 @@ import java.util.List; public class BatchSender implements Closeable { - private final static Logger LOG = LoggerFactory.getLogger(BatchSender.class); + private static final Logger LOG = LoggerFactory.getLogger(BatchSender.class); private final List entityBucket; private final IEagleServiceClient client; @@ -38,13 +38,15 @@ protected int getBatchSize() { } protected void setBatchSize(int batchSize) { - if(batchSize<0) throw new IllegalArgumentException("batch size should be "+batchSize); + if (batchSize < 0) { + throw new IllegalArgumentException("batch size should be " + batchSize); + } this.batchSize = batchSize; } private int batchSize; - public BatchSender(IEagleServiceClient client, int batchSize){ + public BatchSender(IEagleServiceClient client, int batchSize) { this.setBatchSize(batchSize); this.client = client; this.entityBucket = new LinkedList(); @@ -52,7 +54,7 @@ public BatchSender(IEagleServiceClient client, int batchSize){ public BatchSender send(TaggedLogAPIEntity entity) throws IOException, EagleServiceClientException { this.entityBucket.add(entity); - if(this.entityBucket.size()>=this.batchSize){ + if (this.entityBucket.size() >= this.batchSize) { flush(); } return this; @@ -60,24 +62,24 @@ public BatchSender send(TaggedLogAPIEntity entity) throws IOException, EagleServ public BatchSender send(List entities) throws IOException, EagleServiceClientException { this.entityBucket.addAll(entities); - if(this.entityBucket.size()>= this.batchSize){ + if (this.entityBucket.size() >= this.batchSize) { flush(); } return this; } public void flush() throws IOException, EagleServiceClientException { - if(this.entityBucket.size() == 0 && LOG.isDebugEnabled()){ + if (this.entityBucket.size() == 0 && LOG.isDebugEnabled()) { LOG.debug("No entities to flush"); return; } - LOG.info("Writing "+this.entityBucket.size()+" entities"); + LOG.info("Writing " + this.entityBucket.size() + " entities"); GenericServiceAPIResponseEntity response = this.client.create(this.entityBucket); - if(!response.isSuccess()){ - LOG.error("Got service exception: "+response.getException()); - throw new IOException("Service exception"+response.getException()); - }else{ + if (!response.isSuccess()) { + LOG.error("Got service exception: " + response.getException()); + throw new IOException("Service exception" + response.getException()); + } else { this.entityBucket.clear(); } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/ConcurrentSender.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/ConcurrentSender.java index 0c2aad0f05..8184f16647 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/ConcurrentSender.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/ConcurrentSender.java @@ -29,7 +29,7 @@ import java.util.List; import java.util.concurrent.SynchronousQueue; -public class ConcurrentSender implements Closeable{ +public class ConcurrentSender implements Closeable { private final int parallelNum; private final IEagleServiceClient client; private final SynchronousQueue queue; @@ -38,17 +38,17 @@ public class ConcurrentSender implements Closeable{ private long batchInterval = 3 * 1000; private boolean isStarted = false; - private final static Logger LOG = LoggerFactory.getLogger(ConcurrentSender.class); + private static final Logger LOG = LoggerFactory.getLogger(ConcurrentSender.class); public ConcurrentSender(IEagleServiceClient client, int parallelNum) { this.parallelNum = parallelNum; this.client = client; - this.queue= new SynchronousQueue(); + this.queue = new SynchronousQueue(); this.handlers = Collections.synchronizedList(new LinkedList()); } - public void start(){ - if(!this.isStarted) { + public void start() { + if (!this.isStarted) { LOG.info("Starting with handlers = " + this.parallelNum + ", batchSize = " + this.batchSize + ", batchInterval (ms) = " + this.batchInterval); for (int i = 0; i < this.parallelNum; i++) { @@ -62,30 +62,30 @@ public void start(){ } this.isStarted = true; - }else{ + } else { LOG.warn("Already started"); } } - public ConcurrentSender batchSize(int batchSize){ + public ConcurrentSender batchSize(int batchSize) { this.batchSize = batchSize; return this; } - public ConcurrentSender batchInterval(long batchInterval){ + public ConcurrentSender batchInterval(long batchInterval) { this.batchInterval = batchInterval; return this; } public ConcurrentSender send(final List entities) throws InterruptedException { - for(TaggedLogAPIEntity entity:entities){ + for (TaggedLogAPIEntity entity : entities) { this.send(entity); } return this; } public ConcurrentSender send(final TaggedLogAPIEntity entity) throws InterruptedException { - if(!this.isStarted){ + if (!this.isStarted) { this.start(); } this.queue.put(entity); @@ -94,12 +94,12 @@ public ConcurrentSender send(final TaggedLogAPIEntity entity) throws Interrupted @Override public void close() throws IOException { - for(Handler handler: handlers){ + for (Handler handler : handlers) { handler.close(); } } - private class Handler extends BatchSender implements Runnable{ + private class Handler extends BatchSender implements Runnable { private final long batchInterval; private final SynchronousQueue localQueue; @@ -114,45 +114,50 @@ public Handler(SynchronousQueue queue, IEagleServiceClient c @Override public void run() { - if(LOG.isDebugEnabled()) LOG.debug("Starting ..."); + if (LOG.isDebugEnabled()) { + LOG.debug("Starting ..."); + } lastFlushTime = System.currentTimeMillis(); - while(!isStopped){ + while (!isStopped) { TaggedLogAPIEntity entity = null; try { entity = this.localQueue.take(); } catch (InterruptedException e) { - LOG.error(e.getMessage(),e); + LOG.error(e.getMessage(), e); } - if(entity!=null){ + if (entity != null) { try { this.send(entity); } catch (IOException e) { - LOG.error(e.getMessage(),e); + LOG.error(e.getMessage(), e); } catch (EagleServiceClientException e) { - LOG.error(e.getMessage(),e); + LOG.error(e.getMessage(), e); } long currentTimestamp = System.currentTimeMillis(); - if((currentTimestamp - this.lastFlushTime) >= this.batchInterval){ - if(LOG.isDebugEnabled()) - LOG.info(String.format("%s - %s >= %s",currentTimestamp,this.lastFlushTime,this.batchInterval)); + if ((currentTimestamp - this.lastFlushTime) >= this.batchInterval) { + if (LOG.isDebugEnabled()) { + LOG.info(String.format("%s - %s >= %s", currentTimestamp, this.lastFlushTime, this.batchInterval)); + } try { this.flush(); } catch (IOException e) { - LOG.error(e.getMessage(),e); + LOG.error(e.getMessage(), e); } catch (EagleServiceClientException e) { - LOG.error(e.getMessage(),e); + LOG.error(e.getMessage(), e); } } - }else{ + } else { LOG.warn("Got null entity"); } } - if(LOG.isDebugEnabled()) LOG.debug("Stopping ..."); + if (LOG.isDebugEnabled()) { + LOG.debug("Stopping ..."); + } } @Override diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/DeleteRequestBuilder.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/DeleteRequestBuilder.java index ba708d69c1..645ac21ec6 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/DeleteRequestBuilder.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/DeleteRequestBuilder.java @@ -16,11 +16,11 @@ */ package org.apache.eagle.service.client.impl; +import org.apache.eagle.common.DateTimeUtil; import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity; import org.apache.eagle.service.client.EagleServiceClientException; import org.apache.eagle.service.client.EagleServiceSingleEntityQueryRequest; import org.apache.eagle.service.client.IEagleServiceClient; -import org.apache.eagle.common.DateTimeUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,103 +34,123 @@ public class DeleteRequestBuilder { private List deleteIds = null; private EagleServiceSingleEntityQueryRequest request; - private final static Logger LOG = LoggerFactory.getLogger(DeleteRequestBuilder.class); + private static final Logger LOG = LoggerFactory.getLogger(DeleteRequestBuilder.class); private String serviceName; - public DeleteRequestBuilder(IEagleServiceClient client){ + public DeleteRequestBuilder(IEagleServiceClient client) { this.client = client; } - public DeleteRequestBuilder byId(List ids){ + public DeleteRequestBuilder byId(List ids) { this.deleteIds = ids; return this; } - public DeleteRequestBuilder byQuery(String query){ - if(this.request==null) this.request = new EagleServiceSingleEntityQueryRequest(); + public DeleteRequestBuilder byQuery(String query) { + if (this.request == null) { + this.request = new EagleServiceSingleEntityQueryRequest(); + } try { this.request.setQuery(URLEncoder.encode(query, "UTF-8")); } catch (UnsupportedEncodingException e) { - LOG.error(e.getMessage(),e); + LOG.error(e.getMessage(), e); } return this; } - public DeleteRequestBuilder serviceName(String serviceName){ + public DeleteRequestBuilder serviceName(String serviceName) { this.serviceName = serviceName; return this; } - public DeleteRequestBuilder startRowkey(String startRowkey){ - if(this.request==null) this.request = new EagleServiceSingleEntityQueryRequest(); + public DeleteRequestBuilder startRowkey(String startRowkey) { + if (this.request == null) { + this.request = new EagleServiceSingleEntityQueryRequest(); + } this.request.setStartRowkey(startRowkey); return this; } - public DeleteRequestBuilder pageSize(int pageSize){ - if(this.request==null) this.request = new EagleServiceSingleEntityQueryRequest(); + public DeleteRequestBuilder pageSize(int pageSize) { + if (this.request == null) { + this.request = new EagleServiceSingleEntityQueryRequest(); + } this.request.setPageSize(pageSize); return this; } - public DeleteRequestBuilder startTime(long startTime){ - if(this.request==null) this.request = new EagleServiceSingleEntityQueryRequest(); + public DeleteRequestBuilder startTime(long startTime) { + if (this.request == null) { + this.request = new EagleServiceSingleEntityQueryRequest(); + } this.request.setStartTime(startTime); return this; } - public DeleteRequestBuilder startTime(String startTime){ - if(this.request==null) this.request = new EagleServiceSingleEntityQueryRequest(); + public DeleteRequestBuilder startTime(String startTime) { + if (this.request == null) { + this.request = new EagleServiceSingleEntityQueryRequest(); + } this.request.setStartTime(DateTimeUtil.humanDateToMillisecondsWithoutException(startTime)); return this; } - public DeleteRequestBuilder endTime(long endTime){ - if(this.request==null) this.request = new EagleServiceSingleEntityQueryRequest(); + public DeleteRequestBuilder endTime(long endTime) { + if (this.request == null) { + this.request = new EagleServiceSingleEntityQueryRequest(); + } this.request.setEndTime(endTime); return this; } - public DeleteRequestBuilder endTime(String endTime){ - if(this.request==null) this.request = new EagleServiceSingleEntityQueryRequest(); + public DeleteRequestBuilder endTime(String endTime) { + if (this.request == null) { + this.request = new EagleServiceSingleEntityQueryRequest(); + } this.request.setEndTime(DateTimeUtil.humanDateToMillisecondsWithoutException(endTime)); return this; } - public DeleteRequestBuilder timeRange(String startTime,String endTime){ + public DeleteRequestBuilder timeRange(String startTime, String endTime) { this.startTime(startTime); this.endTime(endTime); return this; } - public DeleteRequestBuilder timeRange(long startTime,long endTime){ + public DeleteRequestBuilder timeRange(long startTime, long endTime) { this.startTime(startTime); this.endTime(endTime); return this; } - public DeleteRequestBuilder treeAgg(boolean treeAgg){ - if(this.request==null) this.request = new EagleServiceSingleEntityQueryRequest(); + public DeleteRequestBuilder treeAgg(boolean treeAgg) { + if (this.request == null) { + this.request = new EagleServiceSingleEntityQueryRequest(); + } this.request.setTreeAgg(treeAgg); return this; } - public DeleteRequestBuilder metricName(String metricName){ - if(this.request==null) this.request = new EagleServiceSingleEntityQueryRequest(); + public DeleteRequestBuilder metricName(String metricName) { + if (this.request == null) { + this.request = new EagleServiceSingleEntityQueryRequest(); + } this.request.setMetricName(metricName); return this; } - public DeleteRequestBuilder filterIfMissing(Boolean filterIfMissing){ - if(this.request==null) this.request = new EagleServiceSingleEntityQueryRequest(); + public DeleteRequestBuilder filterIfMissing(Boolean filterIfMissing) { + if (this.request == null) { + this.request = new EagleServiceSingleEntityQueryRequest(); + } this.request.setFilterIfMissing(filterIfMissing); return this; } public GenericServiceAPIResponseEntity send() throws EagleServiceClientException, IOException { - if(this.deleteIds!=null){ - return client.deleteById(this.deleteIds,this.serviceName); - }else { + if (this.deleteIds != null) { + return client.deleteById(this.deleteIds, this.serviceName); + } else { return client.delete(this.request); } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceAsyncClientImpl.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceAsyncClientImpl.java index 7bdfab863d..0a8cd6bcaf 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceAsyncClientImpl.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceAsyncClientImpl.java @@ -19,9 +19,9 @@ import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity; import org.apache.eagle.service.client.EagleServiceAsyncClient; -import org.apache.eagle.service.client.IEagleServiceClient; import org.apache.eagle.service.client.EagleServiceClientException; import org.apache.eagle.service.client.EagleServiceSingleEntityQueryRequest; +import org.apache.eagle.service.client.IEagleServiceClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,7 +32,7 @@ public class EagleServiceAsyncClientImpl implements EagleServiceAsyncClient { private final IEagleServiceClient client; - private final static Logger LOG = LoggerFactory.getLogger(EagleServiceAsyncClientImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(EagleServiceAsyncClientImpl.class); public EagleServiceAsyncClientImpl(IEagleServiceClient client) { this.client = client; @@ -40,7 +40,9 @@ public EagleServiceAsyncClientImpl(IEagleServiceClient client) { @Override public void close() throws IOException { - if(LOG.isDebugEnabled()) LOG.debug("Executor service is shutting down"); + if (LOG.isDebugEnabled()) { + LOG.debug("Executor service is shutting down"); + } this.client.getJerseyClient().getExecutorService().shutdown(); } @@ -49,7 +51,7 @@ public Future>() { @Override public GenericServiceAPIResponseEntity call() throws Exception { - return client.create(entities,serviceName); + return client.create(entities, serviceName); } }); } @@ -59,7 +61,7 @@ public Future>() { @Override public GenericServiceAPIResponseEntity call() throws Exception { - return client.create(entities,entityClass); + return client.create(entities, entityClass); } }); } @@ -99,7 +101,7 @@ public Future>() { @Override public GenericServiceAPIResponseEntity call() throws Exception { - return client.create(entities,entityClass); + return client.create(entities, entityClass); } }); } diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceBaseClient.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceBaseClient.java index 3b717d87b7..357269a5b4 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceBaseClient.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceBaseClient.java @@ -21,6 +21,12 @@ import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider; +import com.sun.jersey.api.client.AsyncWebResource; +import com.sun.jersey.api.client.Client; +import com.sun.jersey.api.client.WebResource; +import com.sun.jersey.api.client.config.ClientConfig; +import com.sun.jersey.api.client.config.DefaultClientConfig; +import com.sun.jersey.client.urlconnection.URLConnectionClientHandler; import org.apache.eagle.common.Base64; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity; @@ -30,13 +36,6 @@ import org.apache.eagle.service.client.EagleServiceClientException; import org.apache.eagle.service.client.IEagleServiceClient; import org.apache.eagle.service.client.security.SecurityConstants; - -import com.sun.jersey.api.client.AsyncWebResource; -import com.sun.jersey.api.client.Client; -import com.sun.jersey.api.client.WebResource; -import com.sun.jersey.api.client.config.ClientConfig; -import com.sun.jersey.api.client.config.DefaultClientConfig; -import com.sun.jersey.client.urlconnection.URLConnectionClientHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,8 +48,8 @@ import java.util.Map; public abstract class EagleServiceBaseClient implements IEagleServiceClient { - public final static String SERVICE_NAME="serviceName"; - public final static String DELETE_BY_ID="byId"; + public static final String SERVICE_NAME = "serviceName"; + public static final String DELETE_BY_ID = "byId"; private final String host; private final int port; @@ -65,15 +64,15 @@ public String getBaseEndpoint() { private final String baseEndpoint; - private final static Logger LOG = LoggerFactory.getLogger(EagleServiceBaseClient.class); + private static final Logger LOG = LoggerFactory.getLogger(EagleServiceBaseClient.class); public static final String DEFAULT_BASE_PATH = "/rest"; protected static final MediaType DEFAULT_MEDIA_TYPE = MediaType.APPLICATION_JSON_TYPE; protected static final String DEFAULT_HTTP_HEADER_CONTENT_TYPE = "application/json"; protected static final String CONTENT_TYPE = "Content-Type"; - protected final static String GENERIC_ENTITY_PATH = "/entities"; - protected final static String GENERIC_ENTITY_DELETE_PATH = GENERIC_ENTITY_PATH+"/delete"; + protected static final String GENERIC_ENTITY_PATH = "/entities"; + protected static final String GENERIC_ENTITY_DELETE_PATH = GENERIC_ENTITY_PATH + "/delete"; private final Client client; private final List closeables = new LinkedList<>(); @@ -96,20 +95,20 @@ public EagleServiceBaseClient(String host, int port, String basePath, String use client.addFilter(new com.sun.jersey.api.client.filter.GZIPContentEncodingFilter()); } - public EagleServiceBaseClient(String host, int port, String basePath){ + public EagleServiceBaseClient(String host, int port, String basePath) { this(host, port, basePath, null); } - public EagleServiceBaseClient(String host, int port, String username, String password){ + public EagleServiceBaseClient(String host, int port, String username, String password) { this(host, port, DEFAULT_BASE_PATH, username, password); } - public Client getJerseyClient(){ + public Client getJerseyClient() { return client; } - public EagleServiceBaseClient(String host, int port){ - this(host,port,DEFAULT_BASE_PATH); + public EagleServiceBaseClient(String host, int port) { + this(host, port, DEFAULT_BASE_PATH); } protected final StringBuilder buildBathPath() { @@ -129,23 +128,25 @@ protected static String marshall(List entities) throws JsonMappingException, return mapper.writeValueAsString(entities); } - protected Map> groupEntitiesByService(List entities) throws EagleServiceClientException { - Map> serviceEntityMap = new HashMap>(); - if(LOG.isDebugEnabled()) LOG.debug("Grouping entities by service name"); - for(E entity: entities){ - if(entity == null) { + protected Map> groupEntitiesByService(List entities) throws EagleServiceClientException { + Map> serviceEntityMap = new HashMap>(); + if (LOG.isDebugEnabled()) { + LOG.debug("Grouping entities by service name"); + } + for (E entity : entities) { + if (entity == null) { LOG.warn("Skip null entity"); continue; } try { EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(entity.getClass()); - if(entityDefinition == null){ - throw new EagleServiceClientException("Failed to find entity definition of class: "+entity.getClass()); + if (entityDefinition == null) { + throw new EagleServiceClientException("Failed to find entity definition of class: " + entity.getClass()); } String serviceName = entityDefinition.getService(); List bucket = serviceEntityMap.get(serviceName); - if(bucket == null){ + if (bucket == null) { bucket = new LinkedList(); serviceEntityMap.put(serviceName, bucket); } @@ -169,34 +170,34 @@ public SearchRequestBuilder search(String query) { return new SearchRequestBuilder(this).query(query); } - protected void register(Closeable closeable){ + protected void register(Closeable closeable) { this.closeables.add(closeable); } @Override public MetricSender metric(String metricName) { - MetricSender metricGenerator = new MetricSender(this,metricName); + MetricSender metricGenerator = new MetricSender(this, metricName); this.register(metricGenerator); return metricGenerator; } - protected WebResource getWebResource(String relativePath){ + protected WebResource getWebResource(String relativePath) { return this.getJerseyClient().resource(this.getBaseEndpoint() + relativePath); } - protected AsyncWebResource getAsyncWebResource(String relativePath){ + protected AsyncWebResource getAsyncWebResource(String relativePath) { return this.getJerseyClient().asyncResource(this.getBaseEndpoint() + relativePath); } protected WebResource.Builder putAuthHeaderIfNeeded(WebResource.Builder r) { if (username != null && password != null) { - r.header(SecurityConstants.AUTHORIZATION, SecurityConstants.BASIC_AUTHORIZATION_HEADER_PREFIX + Base64.encode(username + ":" + password)); + r.header(SecurityConstants.AUTHORIZATION, SecurityConstants.BASIC_AUTHORIZATION_HEADER_PREFIX + Base64.encode(username + ":" + password)); } return r; } /** - * Send HTTP POST request with entities and serviceName + * Send HTTP POST request with entities and serviceName. * * @param resourceURL * @param entities @@ -207,16 +208,17 @@ protected WebResource.Builder putAuthHeaderIfNeeded(WebResource.Builder r) { * @throws IOException */ @SuppressWarnings("unchecked") - protected GenericServiceAPIResponseEntity postEntitiesWithService(String resourceURL, List entities,String serviceName) throws JsonMappingException, JsonGenerationException, IOException { + protected GenericServiceAPIResponseEntity postEntitiesWithService(String resourceURL, List entities, String serviceName) throws JsonMappingException, + JsonGenerationException, IOException { final String json = marshall(entities); final WebResource r = getWebResource(resourceURL); - return putAuthHeaderIfNeeded(r.queryParam(SERVICE_NAME,serviceName).accept(DEFAULT_MEDIA_TYPE)) - .header(CONTENT_TYPE, DEFAULT_HTTP_HEADER_CONTENT_TYPE) - .post(GenericServiceAPIResponseEntity.class, json); + return putAuthHeaderIfNeeded(r.queryParam(SERVICE_NAME, serviceName).accept(DEFAULT_MEDIA_TYPE)) + .header(CONTENT_TYPE, DEFAULT_HTTP_HEADER_CONTENT_TYPE) + .post(GenericServiceAPIResponseEntity.class, json); } /** - * Send HTTP PUT request with entities and serviceName + * Send HTTP PUT request with entities and serviceName. * * @param resourceURL * @param entities @@ -227,12 +229,13 @@ protected GenericServiceAPIResponseEntity postEntitiesWithService(String * @throws IOException */ @SuppressWarnings("unchecked") - protected GenericServiceAPIResponseEntity putEntitiesWithService(String resourceURL, List entities,String serviceName) throws JsonMappingException, JsonGenerationException, IOException { + protected GenericServiceAPIResponseEntity putEntitiesWithService(String resourceURL, List entities, String serviceName) throws JsonMappingException, + JsonGenerationException, IOException { final String json = marshall(entities); final WebResource r = getWebResource(resourceURL); - return putAuthHeaderIfNeeded(r.queryParam(SERVICE_NAME,serviceName).accept(DEFAULT_MEDIA_TYPE)) - .header(CONTENT_TYPE, DEFAULT_HTTP_HEADER_CONTENT_TYPE) - .put(GenericServiceAPIResponseEntity.class, json); + return putAuthHeaderIfNeeded(r.queryParam(SERVICE_NAME, serviceName).accept(DEFAULT_MEDIA_TYPE)) + .header(CONTENT_TYPE, DEFAULT_HTTP_HEADER_CONTENT_TYPE) + .put(GenericServiceAPIResponseEntity.class, json); } @@ -246,8 +249,8 @@ protected String getServiceNameByClass(Class e throw new EagleServiceClientException(e); } - if(entityDefinition == null){ - throw new EagleServiceClientException("cannot find entity definition of class "+entityClass); + if (entityDefinition == null) { + throw new EagleServiceClientException("cannot find entity definition of class " + entityClass); } return entityDefinition.getService(); } @@ -255,7 +258,7 @@ protected String getServiceNameByClass(Class e @Override public BatchSender batch(int batchSize) { - BatchSender batchSender = new BatchSender(this,batchSize); + BatchSender batchSender = new BatchSender(this, batchSize); this.register(batchSender); return batchSender; } @@ -269,7 +272,7 @@ public EagleServiceAsyncClient async() { @Override public ConcurrentSender parallel(int parallelNum) { - ConcurrentSender concurrentSender = new ConcurrentSender(this,parallelNum); + ConcurrentSender concurrentSender = new ConcurrentSender(this, parallelNum); this.register(concurrentSender); return concurrentSender; } @@ -279,6 +282,11 @@ public GenericServiceAPIResponseEntity de return delete(entities, getServiceNameByClass(entityClass)); } + @Override + public DeleteRequestBuilder delete() { + return new DeleteRequestBuilder(this); + } + @Override public GenericServiceAPIResponseEntity create(List entities, Class entityClass) throws IOException, EagleServiceClientException { return create(entities, getServiceNameByClass(entityClass)); @@ -291,8 +299,10 @@ public GenericServiceAPIResponseEntity up @Override public void close() throws IOException { - if(!this.isStopped) { - if(LOG.isDebugEnabled()) LOG.debug("Client is closing"); + if (!this.isStopped) { + if (LOG.isDebugEnabled()) { + LOG.debug("Client is closing"); + } for (Closeable closeable : this.closeables) { try { closeable.close(); @@ -305,13 +315,11 @@ public void close() throws IOException { this.isStopped = true; } - @Override - public DeleteRequestBuilder delete() { - return new DeleteRequestBuilder(this); - } - protected void checkNotNull(Object obj,String name) throws EagleServiceClientException{ - if(obj == null) throw new EagleServiceClientException(name+" should not be null but given"); + protected void checkNotNull(Object obj, String name) throws EagleServiceClientException { + if (obj == null) { + throw new EagleServiceClientException(name + " should not be null but given"); + } } @Override diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceClientImpl.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceClientImpl.java index 912f1f7df9..7f187124f7 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceClientImpl.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/EagleServiceClientImpl.java @@ -16,12 +16,12 @@ */ package org.apache.eagle.service.client.impl; +import com.sun.jersey.api.client.WebResource; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity; import org.apache.eagle.service.client.EagleServiceClientException; import org.apache.eagle.service.client.EagleServiceConnector; import org.apache.eagle.service.client.EagleServiceSingleEntityQueryRequest; -import com.sun.jersey.api.client.WebResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,32 +31,32 @@ import java.util.Map; public class EagleServiceClientImpl extends EagleServiceBaseClient { - private final static Logger LOG = LoggerFactory.getLogger(EagleServiceClientImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(EagleServiceClientImpl.class); - public EagleServiceClientImpl(String host, int port){ + public EagleServiceClientImpl(String host, int port) { super(host, port); } - public EagleServiceClientImpl(EagleServiceConnector connector){ + public EagleServiceClientImpl(EagleServiceConnector connector) { this(connector.getEagleServiceHost(), connector.getEagleServicePort(), connector.getUsername(), connector.getPassword()); } - public EagleServiceClientImpl(String host, int port, String username, String password){ + public EagleServiceClientImpl(String host, int port, String username, String password) { super(host, port, username, password); } - public EagleServiceClientImpl(String host, int port, String basePath, String username, String password){ + public EagleServiceClientImpl(String host, int port, String basePath, String username, String password) { super(host, port, basePath, username, password); } - private String getWholePath(String urlString){ - return getBaseEndpoint() + urlString; + private String getWholePath(String urlString) { + return getBaseEndpoint() + urlString; } @Override - public GenericServiceAPIResponseEntity create(List entities, String serviceName) throws IOException,EagleServiceClientException { - checkNotNull(serviceName,"serviceName"); - checkNotNull(entities,"entities"); + public GenericServiceAPIResponseEntity create(List entities, String serviceName) throws IOException, EagleServiceClientException { + checkNotNull(serviceName, "serviceName"); + checkNotNull(entities, "entities"); final GenericServiceAPIResponseEntity response; response = postEntitiesWithService(GENERIC_ENTITY_PATH, entities, serviceName); @@ -68,18 +68,20 @@ public GenericServiceAPIResponseEntity cr @Override public GenericServiceAPIResponseEntity create(List entities) throws IOException, EagleServiceClientException { - checkNotNull(entities,"entities"); + checkNotNull(entities, "entities"); - Map> serviceEntityMap = groupEntitiesByService(entities); - if(LOG.isDebugEnabled()) LOG.debug("Creating entities for "+serviceEntityMap.keySet().size()+" services"); + Map> serviceEntityMap = groupEntitiesByService(entities); + if (LOG.isDebugEnabled()) { + LOG.debug("Creating entities for " + serviceEntityMap.keySet().size() + " services"); + } List createdKeys = new LinkedList(); - for(Map.Entry> entry: serviceEntityMap.entrySet()){ - GenericServiceAPIResponseEntity response = create(entry.getValue(),entry.getKey()); - if(!response.isSuccess()){ - throw new IOException("Service side exception: "+response.getException()); - }else if(response.getObj()!=null){ + for (Map.Entry> entry : serviceEntityMap.entrySet()) { + GenericServiceAPIResponseEntity response = create(entry.getValue(), entry.getKey()); + if (!response.isSuccess()) { + throw new IOException("Service side exception: " + response.getException()); + } else if (response.getObj() != null) { createdKeys.addAll(response.getObj()); } } @@ -91,18 +93,20 @@ public GenericServiceAPIResponseEntity cr @Override public GenericServiceAPIResponseEntity delete(List entities) throws IOException, EagleServiceClientException { - checkNotNull(entities,"entities"); + checkNotNull(entities, "entities"); - Map> serviceEntityMap = groupEntitiesByService(entities); - if(LOG.isDebugEnabled()) LOG.debug("Creating entities for "+serviceEntityMap.keySet().size()+" services"); + Map> serviceEntityMap = groupEntitiesByService(entities); + if (LOG.isDebugEnabled()) { + LOG.debug("Creating entities for " + serviceEntityMap.keySet().size() + " services"); + } List deletedKeys = new LinkedList(); - for(Map.Entry> entry: serviceEntityMap.entrySet()){ + for (Map.Entry> entry : serviceEntityMap.entrySet()) { GenericServiceAPIResponseEntity response = delete(entry.getValue(), entry.getKey()); - if(!response.isSuccess()){ - LOG.error("Got service exception: "+response.getException()); + if (!response.isSuccess()) { + LOG.error("Got service exception: " + response.getException()); throw new IOException(response.getException()); - }else if(response.getObj()!=null){ + } else if (response.getObj() != null) { deletedKeys.addAll(response.getObj()); } } @@ -114,60 +118,64 @@ public GenericServiceAPIResponseEntity de @SuppressWarnings("unchecked") @Override - public GenericServiceAPIResponseEntity delete(List entities, String serviceName) throws IOException,EagleServiceClientException { - checkNotNull(entities,"entities"); - checkNotNull(serviceName,"serviceName"); + public GenericServiceAPIResponseEntity delete(List entities, String serviceName) throws IOException, EagleServiceClientException { + checkNotNull(entities, "entities"); + checkNotNull(serviceName, "serviceName"); - return postEntitiesWithService(GENERIC_ENTITY_DELETE_PATH,entities,serviceName); + return postEntitiesWithService(GENERIC_ENTITY_DELETE_PATH, entities, serviceName); } @SuppressWarnings("unchecked") @Override - public GenericServiceAPIResponseEntity delete(EagleServiceSingleEntityQueryRequest request) throws IOException,EagleServiceClientException { + public GenericServiceAPIResponseEntity delete(EagleServiceSingleEntityQueryRequest request) throws IOException, EagleServiceClientException { String queryString = request.getQueryParameterString(); StringBuilder sb = new StringBuilder(); sb.append(GENERIC_ENTITY_PATH); sb.append("?"); sb.append(queryString); - final String urlString = sb.toString(); + final String urlString = sb.toString(); - if(!this.silence) LOG.info("Going to delete by querying service: " + getWholePath(urlString)); + if (!this.silence) { + LOG.info("Going to delete by querying service: " + getWholePath(urlString)); + } WebResource r = getWebResource(urlString); return putAuthHeaderIfNeeded(r.accept(DEFAULT_MEDIA_TYPE) - .header(CONTENT_TYPE, DEFAULT_HTTP_HEADER_CONTENT_TYPE)) - .delete(GenericServiceAPIResponseEntity.class); + .header(CONTENT_TYPE, DEFAULT_HTTP_HEADER_CONTENT_TYPE)) + .delete(GenericServiceAPIResponseEntity.class); } @SuppressWarnings("unchecked") @Override public GenericServiceAPIResponseEntity deleteById(List ids, String serviceName) throws EagleServiceClientException, IOException { - checkNotNull(serviceName,"serviceName"); - checkNotNull(ids,"ids"); + checkNotNull(serviceName, "serviceName"); + checkNotNull(ids, "ids"); final String json = marshall(ids); final WebResource r = getWebResource(GENERIC_ENTITY_DELETE_PATH); - return putAuthHeaderIfNeeded(r.queryParam(SERVICE_NAME,serviceName) - .queryParam(DELETE_BY_ID, "true") - .accept(DEFAULT_MEDIA_TYPE)) - .header(CONTENT_TYPE, DEFAULT_HTTP_HEADER_CONTENT_TYPE) - .post(GenericServiceAPIResponseEntity.class, json); + return putAuthHeaderIfNeeded(r.queryParam(SERVICE_NAME, serviceName) + .queryParam(DELETE_BY_ID, "true") + .accept(DEFAULT_MEDIA_TYPE)) + .header(CONTENT_TYPE, DEFAULT_HTTP_HEADER_CONTENT_TYPE) + .post(GenericServiceAPIResponseEntity.class, json); } @Override public GenericServiceAPIResponseEntity update(List entities) throws IOException, EagleServiceClientException { - checkNotNull(entities,"entities"); + checkNotNull(entities, "entities"); - Map> serviceEntityMap = groupEntitiesByService(entities); - if(LOG.isDebugEnabled()) LOG.debug("Updating entities for "+serviceEntityMap.keySet().size()+" services"); + Map> serviceEntityMap = groupEntitiesByService(entities); + if (LOG.isDebugEnabled()) { + LOG.debug("Updating entities for " + serviceEntityMap.keySet().size() + " services"); + } List createdKeys = new LinkedList(); - for(Map.Entry> entry: serviceEntityMap.entrySet()){ + for (Map.Entry> entry : serviceEntityMap.entrySet()) { GenericServiceAPIResponseEntity response = update(entry.getValue(), entry.getKey()); - if(!response.isSuccess()){ - throw new IOException("Got service exception when updating service "+entry.getKey()+" : "+response.getException()); - }else{ - if(response.getObj()!=null) { + if (!response.isSuccess()) { + throw new IOException("Got service exception when updating service " + entry.getKey() + " : " + response.getException()); + } else { + if (response.getObj() != null) { createdKeys.addAll(response.getObj()); } } @@ -181,10 +189,10 @@ public GenericServiceAPIResponseEntity up @Override public GenericServiceAPIResponseEntity update(List entities, String serviceName) throws IOException, EagleServiceClientException { - checkNotNull(entities,"entities"); - checkNotNull(serviceName,"serviceName"); + checkNotNull(entities, "entities"); + checkNotNull(serviceName, "serviceName"); - return putEntitiesWithService(GENERIC_ENTITY_PATH,entities,serviceName); + return putEntitiesWithService(GENERIC_ENTITY_PATH, entities, serviceName); } @Override @@ -195,11 +203,13 @@ public GenericServiceAPIResponseEntity search(EagleService sb.append(GENERIC_ENTITY_PATH); sb.append("?"); sb.append(queryString); - final String urlString = sb.toString(); - if(!this.silence) LOG.info("Going to query service: " + getWholePath(urlString)); + final String urlString = sb.toString(); + if (!this.silence) { + LOG.info("Going to query service: " + getWholePath(urlString)); + } WebResource r = getWebResource(urlString); return putAuthHeaderIfNeeded(r.accept(DEFAULT_MEDIA_TYPE)) - .header(CONTENT_TYPE, DEFAULT_HTTP_HEADER_CONTENT_TYPE) - .get(GenericServiceAPIResponseEntity.class); + .header(CONTENT_TYPE, DEFAULT_HTTP_HEADER_CONTENT_TYPE) + .get(GenericServiceAPIResponseEntity.class); } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/MetricSender.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/MetricSender.java index dce5045f81..a2bb5d4881 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/MetricSender.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/MetricSender.java @@ -17,8 +17,8 @@ package org.apache.eagle.service.client.impl; import org.apache.eagle.log.entity.GenericMetricEntity; -import org.apache.eagle.service.client.IEagleServiceClient; import org.apache.eagle.service.client.EagleServiceClientException; +import org.apache.eagle.service.client.IEagleServiceClient; import java.io.IOException; import java.util.HashMap; @@ -29,7 +29,7 @@ public class MetricSender extends BatchSender { private Map tags; public MetricSender(IEagleServiceClient client, String metricName) { - super(client,1); + super(client, 1); this.metricName = metricName; } @@ -38,12 +38,12 @@ public MetricSender batch(int batchSize) { return this; } - public MetricSender tags(Map tags){ + public MetricSender tags(Map tags) { this.tags = tags; return this; } - public MetricSender send(String metricName,long timestamp,Map tags,double ...values) throws IOException, EagleServiceClientException { + public MetricSender send(String metricName, long timestamp, Map tags, double... values) throws IOException, EagleServiceClientException { GenericMetricEntity metric = new GenericMetricEntity(); metric.setPrefix(metricName); metric.setValue(values); @@ -53,23 +53,25 @@ public MetricSender send(String metricName,long timestamp,Map tag return this; } - public MetricSender send(Long timestamp,Map tags,double ...values) throws IOException, EagleServiceClientException { - return this.send(this.metricName,timestamp,tags,values); + public MetricSender send(Long timestamp, Map tags, double... values) throws IOException, EagleServiceClientException { + return this.send(this.metricName, timestamp, tags, values); + } + + public MetricSender send(Long timestamp, double... values) throws IOException, EagleServiceClientException { + return this.send(timestamp, new HashMap(this.tags), values); } - public MetricSender send(Long timestamp, double ... values) throws IOException, EagleServiceClientException { - return this.send(timestamp,new HashMap(this.tags),values); + /* + public EagleServiceMetricSender send(String metricName,Map tags,double ... values) throws IOException, EagleServiceClientException { + return this.send(metricName,System.currentTimeMillis(),tags,values); } -// public EagleServiceMetricSender send(String metricName,Map tags,double ... values) throws IOException, EagleServiceClientException { -// return this.send(metricName,System.currentTimeMillis(),tags,values); -// } -// -// public EagleServiceMetricSender send(Map tags,double ...values) throws IOException, EagleServiceClientException { -// return this.send(this.metricName,System.currentTimeMillis(),tags,values); -// } -// -// public EagleServiceMetricSender send(double ... values) throws IOException, EagleServiceClientException { -// return this.send(System.currentTimeMillis(), new HashMap(this.tags), values); -// } + public EagleServiceMetricSender send(Map tags,double ...values) throws IOException, EagleServiceClientException { + return this.send(this.metricName,System.currentTimeMillis(),tags,values); + } + + public EagleServiceMetricSender send(double ... values) throws IOException, EagleServiceClientException { + return this.send(System.currentTimeMillis(), new HashMap(this.tags), values); + } + */ } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/SearchRequestBuilder.java b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/SearchRequestBuilder.java index 331043cfeb..924010fabc 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/SearchRequestBuilder.java +++ b/eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/SearchRequestBuilder.java @@ -16,11 +16,11 @@ */ package org.apache.eagle.service.client.impl; +import org.apache.eagle.common.DateTimeUtil; import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity; -import org.apache.eagle.service.client.IEagleServiceClient; import org.apache.eagle.service.client.EagleServiceClientException; import org.apache.eagle.service.client.EagleServiceSingleEntityQueryRequest; -import org.apache.eagle.common.DateTimeUtil; +import org.apache.eagle.service.client.IEagleServiceClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,63 +30,63 @@ public class SearchRequestBuilder { private final EagleServiceSingleEntityQueryRequest request; private final IEagleServiceClient client; - private final static Logger LOG = LoggerFactory.getLogger(SearchRequestBuilder.class); + private static final Logger LOG = LoggerFactory.getLogger(SearchRequestBuilder.class); public SearchRequestBuilder(IEagleServiceClient client) { this.request = new EagleServiceSingleEntityQueryRequest(); this.client = client; } - public SearchRequestBuilder query(String query){ + public SearchRequestBuilder query(String query) { try { - this.request.setQuery(URLEncoder.encode(query,"UTF-8")); + this.request.setQuery(URLEncoder.encode(query, "UTF-8")); } catch (UnsupportedEncodingException e) { - LOG.error(e.getMessage(),e); + LOG.error(e.getMessage(), e); } return this; } - public SearchRequestBuilder startRowkey(String startRowkey){ + public SearchRequestBuilder startRowkey(String startRowkey) { this.request.setStartRowkey(startRowkey); return this; } - public SearchRequestBuilder pageSize(int pageSize){ + public SearchRequestBuilder pageSize(int pageSize) { this.request.setPageSize(pageSize); return this; } - public SearchRequestBuilder startTime(long startTime){ + public SearchRequestBuilder startTime(long startTime) { this.request.setStartTime(startTime); return this; } - public SearchRequestBuilder startTime(String startTime){ + public SearchRequestBuilder startTime(String startTime) { this.request.setStartTime(DateTimeUtil.humanDateToMillisecondsWithoutException(startTime)); return this; } - public SearchRequestBuilder endTime(long endTime){ + public SearchRequestBuilder endTime(long endTime) { this.request.setEndTime(endTime); return this; } - public SearchRequestBuilder endTime(String endTime){ + public SearchRequestBuilder endTime(String endTime) { this.request.setEndTime(DateTimeUtil.humanDateToMillisecondsWithoutException(endTime)); return this; } - public SearchRequestBuilder treeAgg(boolean treeAgg){ + public SearchRequestBuilder treeAgg(boolean treeAgg) { this.request.setTreeAgg(treeAgg); return this; } - public SearchRequestBuilder metricName(String metricName){ + public SearchRequestBuilder metricName(String metricName) { this.request.setMetricName(metricName); return this; } - public SearchRequestBuilder filterIfMissing(Boolean filterIfMissing){ + public SearchRequestBuilder filterIfMissing(Boolean filterIfMissing) { this.request.setFilterIfMissing(filterIfMissing); return this; } diff --git a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/Base.java b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/Base.java index 1f54f00b9f..cf6333199b 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/Base.java +++ b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/Base.java @@ -21,10 +21,10 @@ //@JsonTypeInfo(use=JsonTypeInfo.Id.CLASS, include=JsonTypeInfo.As.PROPERTY, property="@class") -@JsonTypeInfo(use=JsonTypeInfo.Id.NAME ) -@JsonSubTypes({ - @JsonSubTypes.Type(value=ClassA.class, name="ClassA"), - @JsonSubTypes.Type(value=ClassB.class, name="ClassB") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME) +@JsonSubTypes( { + @JsonSubTypes.Type(value = ClassA.class, name = "ClassA"), + @JsonSubTypes.Type(value = ClassB.class, name = "ClassB") }) public abstract class Base { diff --git a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClassA.java b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClassA.java index 60158fc5ce..1731753975 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClassA.java +++ b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClassA.java @@ -22,14 +22,14 @@ @JsonTypeName("ClassA") public class ClassA extends Base { - private int a; + private int a; - public int getA() { - return a; - } + public int getA() { + return a; + } + + public void setA(int a) { + this.a = a; + } - public void setA(int a) { - this.a = a; - } - } diff --git a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClassB.java b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClassB.java index faa20bafb6..87c98fcd98 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClassB.java +++ b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClassB.java @@ -21,14 +21,14 @@ @JsonTypeName("ClassB") public class ClassB extends Base { - private String b; + private String b; - public String getB() { - return b; - } + public String getB() { + return b; + } + + public void setB(String b) { + this.b = b; + } - public void setB(String b) { - this.b = b; - } - } diff --git a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClientTestBase.java b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClientTestBase.java index ac16b93536..7e8041ceb3 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClientTestBase.java +++ b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/ClientTestBase.java @@ -19,14 +19,14 @@ import org.apache.eagle.service.hbase.EmbeddedHbase; public class ClientTestBase { - - //protected static EmbeddedServer server; - protected static EmbeddedHbase hbase; - //@BeforeClass - public static void startup() throws Exception { - //hbase = EmbeddedHbase.getInstance(); - //String webappDirLocation = "../../../eagle-webservice/target/eagle-service"; - //server = EmbeddedServer.getInstance(webappDirLocation); - } + //protected static EmbeddedServer server; + protected static EmbeddedHbase hbase; + + //@BeforeClass + public static void startup() throws Exception { + //hbase = EmbeddedHbase.getInstance(); + //String webappDirLocation = "../../../eagle-webservice/target/eagle-service"; + //server = EmbeddedServer.getInstance(webappDirLocation); + } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestEagleServiceClientImpl.java b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestEagleServiceClientImpl.java index eab33a79f5..30ab60fc85 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestEagleServiceClientImpl.java +++ b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestEagleServiceClientImpl.java @@ -19,9 +19,11 @@ import org.apache.eagle.log.entity.GenericMetricEntity; import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity; import org.apache.eagle.log.entity.test.TestTimeSeriesAPIEntity; -import org.apache.eagle.service.client.impl.EagleServiceClientImpl; import org.apache.eagle.service.client.impl.ConcurrentSender; -import org.junit.*; +import org.apache.eagle.service.client.impl.EagleServiceClientImpl; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; import java.io.IOException; import java.util.*; @@ -31,19 +33,19 @@ @Ignore("Depends on eagle server, should be used for integration test") public class TestEagleServiceClientImpl extends ClientTestBase { - private IEagleServiceClient client = new EagleServiceClientImpl("localhost",5050); + private IEagleServiceClient client = new EagleServiceClientImpl("localhost", 5050); @Test public void testCreateAndSearch() throws IOException, EagleServiceClientException, IllegalAccessException, InstantiationException { List entities = new ArrayList(); - for(int i=0;i<100;i++){ + for (int i = 0; i < 100; i++) { TestTimeSeriesAPIEntity entity = new TestTimeSeriesAPIEntity(); entity.setTimestamp(System.currentTimeMillis()); entity.setTags(new HashMap() {{ put("cluster", "cluster4ut"); put("datacenter", "datacenter4ut"); - put("timestampStr",System.currentTimeMillis()+""); + put("timestampStr", System.currentTimeMillis() + ""); }}); entity.setField1(1); entity.setField2(1); @@ -58,22 +60,22 @@ public void testCreateAndSearch() throws IOException, EagleServiceClientExceptio GenericServiceAPIResponseEntity response = client.create(entities); assert response.isSuccess(); - response = client.create(entities,TestTimeSeriesAPIEntity.class); + response = client.create(entities, TestTimeSeriesAPIEntity.class); assert response.isSuccess(); - response = client.create(entities,"TestTimeSeriesAPIEntity"); + response = client.create(entities, "TestTimeSeriesAPIEntity"); assert response.isSuccess(); response = client.search("TestTimeSeriesAPIEntity[]{*}") - .startTime(0) - .endTime(System.currentTimeMillis() + 25 * 3600 * 1000) - .pageSize(1000) - .send(); + .startTime(0) + .endTime(System.currentTimeMillis() + 25 * 3600 * 1000) + .pageSize(1000) + .send(); - assert response.isSuccess(); + assert response.isSuccess(); assert response.getObj().size() > 0; } - private TestTimeSeriesAPIEntity newEntity(){ + private TestTimeSeriesAPIEntity newEntity() { TestTimeSeriesAPIEntity entity = new TestTimeSeriesAPIEntity(); entity.setTimestamp(System.currentTimeMillis()); entity.setTags(new HashMap() {{ @@ -94,7 +96,7 @@ private TestTimeSeriesAPIEntity newEntity(){ @Test public void testUpdate() throws IOException, EagleServiceClientException, IllegalAccessException, InstantiationException { List entities = new ArrayList(); - for(int i=0;i<100;i++){ + for (int i = 0; i < 100; i++) { TestTimeSeriesAPIEntity entity = new TestTimeSeriesAPIEntity(); entity.setTimestamp(System.currentTimeMillis()); entity.setTags(new HashMap() {{ @@ -120,10 +122,10 @@ public void testUpdate() throws IOException, EagleServiceClientException, Illega assert response.isSuccess(); response = client.search("TestTimeSeriesAPIEntity[]{*}") - .startTime(0) - .endTime(System.currentTimeMillis() + 25 * 3600 * 1000) - .pageSize(1000) - .send(); + .startTime(0) + .endTime(System.currentTimeMillis() + 25 * 3600 * 1000) + .pageSize(1000) + .send(); assert response.isSuccess(); assert response.getObj().size() > 0; @@ -132,7 +134,7 @@ public void testUpdate() throws IOException, EagleServiceClientException, Illega @Test public void testDelete() throws IOException, EagleServiceClientException { List entities = new ArrayList(); - for(int i=0;i<100;i++){ + for (int i = 0; i < 100; i++) { TestTimeSeriesAPIEntity entity = new TestTimeSeriesAPIEntity(); entity.setTimestamp(System.currentTimeMillis()); entity.setTags(new HashMap() {{ @@ -159,18 +161,18 @@ public void testDelete() throws IOException, EagleServiceClientException { assert response.isSuccess(); response = client.delete() - .byId(Arrays.asList("30RR1H___rOqxUr5M_sR-g5RxZlmldR_9eQ49A")) - .serviceName("TestTimeSeriesAPIEntity") - .send(); + .byId(Arrays.asList("30RR1H___rOqxUr5M_sR-g5RxZlmldR_9eQ49A")) + .serviceName("TestTimeSeriesAPIEntity") + .send(); assert response.isSuccess(); response = client.delete() - .byQuery("TestTimeSeriesAPIEntity[]{*}") - .startTime(0) - .endTime(System.currentTimeMillis()) - .pageSize(1000) - .send(); + .byQuery("TestTimeSeriesAPIEntity[]{*}") + .startTime(0) + .endTime(System.currentTimeMillis()) + .pageSize(1000) + .send(); assert response.isSuccess(); } @@ -179,54 +181,54 @@ public void testDelete() throws IOException, EagleServiceClientException { public void testMetricsSender() throws IOException, EagleServiceClientException { List entities = new ArrayList(); - Map tags = new HashMap() {{ + Map tags = new HashMap() {{ put("cluster", "cluster4ut"); put("datacenter", "datacenter4ut"); }}; - for(int i=0;i<100;i++){ + for (int i = 0; i < 100; i++) { GenericMetricEntity entity = new GenericMetricEntity(); entity.setTimestamp(System.currentTimeMillis()); entity.setTags(tags); - entity.setValue(new double[]{1.234}); + entity.setValue(new double[] {1.234}); entity.setPrefix("unit.test.metrics"); entities.add(entity); } GenericServiceAPIResponseEntity response = client.create(entities); assert response.isSuccess(); - response = client.create(entities,GenericMetricEntity.class); + response = client.create(entities, GenericMetricEntity.class); assert response.isSuccess(); - response = client.create(entities,GenericMetricEntity.GENERIC_METRIC_SERVICE); + response = client.create(entities, GenericMetricEntity.GENERIC_METRIC_SERVICE); assert response.isSuccess(); client.metric("unit.test.metrics") - .batch(5) - .tags(tags) - .send("unit.test.anothermetrics", System.currentTimeMillis(), tags, 0.1, 0.2, 0.3) - .send(System.currentTimeMillis(), 0.1) - .send(System.currentTimeMillis(),0.1,0.2) - .send(System.currentTimeMillis(),0.1,0.2,0.3) - .send(System.currentTimeMillis(),tags,0.1,0.2,0.3) - .send("unit.test.anothermetrics",System.currentTimeMillis(),tags,0.1,0.2,0.3) - .flush(); + .batch(5) + .tags(tags) + .send("unit.test.anothermetrics", System.currentTimeMillis(), tags, 0.1, 0.2, 0.3) + .send(System.currentTimeMillis(), 0.1) + .send(System.currentTimeMillis(), 0.1, 0.2) + .send(System.currentTimeMillis(), 0.1, 0.2, 0.3) + .send(System.currentTimeMillis(), tags, 0.1, 0.2, 0.3) + .send("unit.test.anothermetrics", System.currentTimeMillis(), tags, 0.1, 0.2, 0.3) + .flush(); GenericServiceAPIResponseEntity metricResponse = client.search("GenericMetricService[@cluster=\"cluster4ut\" AND @datacenter = \"datacenter4ut\"]{*}") - .startTime(0) - .endTime(System.currentTimeMillis()+24 * 3600 * 1000) - .metricName("unit.test.metrics") - .pageSize(1000) - .send(); + .startTime(0) + .endTime(System.currentTimeMillis() + 24 * 3600 * 1000) + .metricName("unit.test.metrics") + .pageSize(1000) + .send(); List metricEntities = metricResponse.getObj(); assert metricEntities != null; assert metricResponse.isSuccess(); GenericServiceAPIResponseEntity metricAggResponse = client.search("GenericMetricService[@cluster=\"cluster4ut\" AND @datacenter = \"datacenter4ut\"]<@cluster>{sum(value)}") - .startTime(0) - .endTime(System.currentTimeMillis()+24 * 3600 * 1000) - .metricName("unit.test.metrics") - .pageSize(1000) - .send(); + .startTime(0) + .endTime(System.currentTimeMillis() + 24 * 3600 * 1000) + .metricName("unit.test.metrics") + .pageSize(1000) + .send(); List aggResult = metricAggResponse.getObj(); Assert.assertNotNull(aggResult); Assert.assertTrue(metricAggResponse.isSuccess()); @@ -237,9 +239,9 @@ public void testMetricsSender() throws IOException, EagleServiceClientException @Test public void testBatchSender() throws IOException, EagleServiceClientException { client.batch(2) - .send(newEntity()) - .send(newEntity()) - .send(newEntity()); + .send(newEntity()) + .send(newEntity()) + .send(newEntity()); client.close(); } @@ -248,21 +250,21 @@ public void testAsyncSender() throws IOException, EagleServiceClientException, E EagleServiceAsyncClient asyncClient = client.async(); Future> future1 = - asyncClient.create(Arrays.asList(newEntity())); + asyncClient.create(Arrays.asList(newEntity())); GenericServiceAPIResponseEntity response1 = future1.get(); Assert.assertTrue(response1.isSuccess()); Future> future2 = - asyncClient.update(Arrays.asList(newEntity())); + asyncClient.update(Arrays.asList(newEntity())); GenericServiceAPIResponseEntity response2 = future2.get(); Assert.assertTrue(response2.isSuccess()); Future> future3 = - asyncClient.delete(Arrays.asList(newEntity())); + asyncClient.delete(Arrays.asList(newEntity())); GenericServiceAPIResponseEntity response3 = future3.get(); @@ -275,25 +277,25 @@ public void testAsyncSender() throws IOException, EagleServiceClientException, E public void testParallelSender() throws IOException, EagleServiceClientException, InterruptedException { // Case #1: ConcurrentSender concurrentSender = client - .parallel(10) - .batchSize(30) - .batchInterval(1000); + .parallel(10) + .batchSize(30) + .batchInterval(1000); int num = 1000; - for(int i=0; i< num;i++) { + for (int i = 0; i < num; i++) { concurrentSender.send(Arrays.asList(newEntity())); } // Case #2: ConcurrentSender concurrentSender2 = client - .parallel(10) - .batchSize(20) - .batchInterval(3); + .parallel(10) + .batchSize(20) + .batchInterval(3); int num2 = 50; - for(int i=0; i< num2;i++) { + for (int i = 0; i < num2; i++) { concurrentSender2.send(Arrays.asList(newEntity())); Thread.sleep(1); } @@ -304,17 +306,18 @@ public void testParallelSender() throws IOException, EagleServiceClientException public void testSearch() throws EagleServiceClientException, IOException { GenericServiceAPIResponseEntity response = - client.search("TestTimeSeriesAPIEntity[]{*}").startTime(0).endTime(System.currentTimeMillis()+1000).pageSize(1000).send(); + client.search("TestTimeSeriesAPIEntity[]{*}").startTime(0).endTime(System.currentTimeMillis() + 1000).pageSize(1000).send(); Assert.assertTrue(response.isSuccess()); GenericServiceAPIResponseEntity response2 = - client.search("TestTimeSeriesAPIEntity[]<@cluster>{count}").startTime(0).endTime(System.currentTimeMillis()+1000).pageSize(1000).send(); + client.search("TestTimeSeriesAPIEntity[]<@cluster>{count}").startTime(0).endTime(System.currentTimeMillis() + 1000).pageSize(1000).send(); Assert.assertTrue(response2.isSuccess()); GenericServiceAPIResponseEntity response3 = - client.search("GenericMetricService[@cluster = \"cluster4ut\" AND @datacenter = \"datacenter4ut\"]{*}").metricName("unit.test.metrics").startTime(0).endTime(System.currentTimeMillis()+1000).pageSize(1000).send(); + client.search("GenericMetricService[@cluster = \"cluster4ut\" AND @datacenter = \"datacenter4ut\"]{*}").metricName("unit.test.metrics").startTime(0).endTime(System.currentTimeMillis() + + 1000).pageSize(1000).send(); Assert.assertTrue(response3.isSuccess()); } diff --git a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestIEagleServiceClient.java b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestIEagleServiceClient.java index a144118cf6..1ef92cf168 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestIEagleServiceClient.java +++ b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestIEagleServiceClient.java @@ -30,8 +30,9 @@ public class TestIEagleServiceClient extends ClientTestBase { IEagleServiceClient client; + //@Before - public void setUp(){ + public void setUp() { client = new EagleServiceClientImpl("localhost", EagleConfigFactory.load().getServicePort()); } @@ -47,11 +48,11 @@ public void testCreate() throws IOException, EagleServiceClientException, Illega client = new EagleServiceClientImpl("localhost", EagleConfigFactory.load().getServicePort()); List metricEntityList = new ArrayList(); GenericServiceAPIResponseEntity unTypedResponse = client.create(metricEntityList); - GenericServiceAPIResponseEntity weakTypedResponse = client.create(metricEntityList,GenericMetricEntity.GENERIC_METRIC_SERVICE); - GenericServiceAPIResponseEntity strongTypedResponse = client.create(metricEntityList,GenericMetricEntity.class); + GenericServiceAPIResponseEntity weakTypedResponse = client.create(metricEntityList, GenericMetricEntity.GENERIC_METRIC_SERVICE); + GenericServiceAPIResponseEntity strongTypedResponse = client.create(metricEntityList, GenericMetricEntity.class); GenericServiceAPIResponseEntity weakTypedSearchResponse = client.search("").send(); - if(weakTypedSearchResponse!=null) { + if (weakTypedSearchResponse != null) { Class typedClazz = weakTypedSearchResponse.getType(); List typedEntities = weakTypedSearchResponse.getObj(); } diff --git a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestJackson.java b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestJackson.java index bc0ca57978..45a41b5b33 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestJackson.java +++ b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/client/TestJackson.java @@ -16,47 +16,47 @@ */ package org.apache.eagle.service.client; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - import org.codehaus.jackson.JsonGenerationException; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; import org.junit.Assert; import org.junit.Test; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + // http://wiki.fasterxml.com/JacksonPolymorphicDeserialization public class TestJackson { - @Test - public void testBase() throws JsonGenerationException, JsonMappingException, IOException { - List objs = new ArrayList(); - ClassA a = new ClassA(); - a.setA(1); - ClassB b = new ClassB(); - b.setB("2"); - - objs.add(a); - objs.add(b); - - ObjectMapper om = new ObjectMapper(); - om.enableDefaultTyping(); + @Test + public void testBase() throws JsonGenerationException, JsonMappingException, IOException { + List objs = new ArrayList(); + ClassA a = new ClassA(); + a.setA(1); + ClassB b = new ClassB(); + b.setB("2"); + + objs.add(a); + objs.add(b); + + ObjectMapper om = new ObjectMapper(); + om.enableDefaultTyping(); // om.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL); - String value = om.writeValueAsString(objs); - - System.out.println("value = " + value); - - @SuppressWarnings("rawtypes") - List result = om.readValue(value, ArrayList.class); - System.out.println("size = " + result.size()); - Object obj1 = result.get(0); - Object obj2 = result.get(1); - - Assert.assertEquals("ClassA", obj1.getClass().getSimpleName()); - Assert.assertEquals(1, ((ClassA)obj1).getA()); - Assert.assertEquals("ClassB", obj2.getClass().getSimpleName()); - Assert.assertEquals("2", ((ClassB)obj2).getB()); - - } + String value = om.writeValueAsString(objs); + + System.out.println("value = " + value); + + @SuppressWarnings("rawtypes") + List result = om.readValue(value, ArrayList.class); + System.out.println("size = " + result.size()); + Object obj1 = result.get(0); + Object obj2 = result.get(1); + + Assert.assertEquals("ClassA", obj1.getClass().getSimpleName()); + Assert.assertEquals(1, ((ClassA) obj1).getA()); + Assert.assertEquals("ClassB", obj2.getClass().getSimpleName()); + Assert.assertEquals("2", ((ClassB) obj2).getB()); + + } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/jackson/TestJacksonMarshalling.java b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/jackson/TestJacksonMarshalling.java index b5689c75ec..06f8aa9f13 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/jackson/TestJacksonMarshalling.java +++ b/eagle-core/eagle-query/eagle-client-base/src/test/java/org/apache/eagle/service/jackson/TestJacksonMarshalling.java @@ -16,134 +16,136 @@ */ package org.apache.eagle.service.jackson; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.Assert; - import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.map.ObjectMapper; +import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + public class TestJacksonMarshalling { - private static Logger LOG = LoggerFactory.getLogger(TestJacksonMarshalling.class); - - - - @Test - public void testJSonArrayMarshalling(){ - String[] array = {"cluster", "datacenter", "rack", "hostname"}; - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - String result = null; - try{ - result = mapper.writeValueAsString(array); - }catch(Exception ex){ - LOG.error("Cannot marshall", ex); - Assert.fail("cannot marshall an String array"); - } - Assert.assertEquals("[\"cluster\",\"datacenter\",\"rack\",\"hostname\"]", result); - } - - - static class Pojo{ - private String field1; - private String field2; - public String getField1() { - return field1; - } - public void setField1(String field1) { - this.field1 = field1; - } - public String getField2() { - return field2; - } - public void setField2(String field2) { - this.field2 = field2; - } - } - - @Test - public void testPojoMarshalling(){ - Pojo p = new Pojo(); - p.setField1("field1"); - p.setField2("field2"); - - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - String result = null; - try{ - result = mapper.writeValueAsString(p); - }catch(Exception ex){ - LOG.error("Cannot marshall", ex); - Assert.fail("Cannot marshall a Pojo"); - } - System.out.println(result); - Assert.assertEquals("{\"field1\":\"field1\",\"field2\":\"field2\"}", result); - } - - @Test - public void testPojoArrayMashalling(){ - Pojo[] ps = new Pojo[2]; - ps[0] = new Pojo(); - ps[0].setField1("0_field1"); - ps[0].setField2("0_field2"); - ps[1] = new Pojo(); - ps[1].setField1("1_field1"); - ps[1].setField2("1_field2"); - - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - String result = null; - try{ - result = mapper.writeValueAsString(ps); - }catch(Exception ex){ - LOG.error("Cannot marshall", ex); - Assert.fail("Cannot marshall a Pojo array"); - } - System.out.println(result); - Assert.assertEquals("[{\"field1\":\"0_field1\",\"field2\":\"0_field2\"},{\"field1\":\"1_field1\",\"field2\":\"1_field2\"}]", result); - } - - @Test - public void testComplexMapMarshalling(){ - Map, String> map = new HashMap, String>(); - map.put(Arrays.asList("cluster1","dc1"), "123"); - map.put(Arrays.asList("cluster1","dc1"), "456"); - - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - String result = null; - try{ - result = mapper.writeValueAsString(map); - }catch(Exception ex){ - LOG.error("Cannot marshall", ex); - Assert.fail("Cannot marshall a complex map"); - } - System.out.println(result); - } - - @Test - public void testMapMapMarshalling(){ - Map> map = new HashMap>(); - Map childmap1 = new HashMap(); - childmap1.put("dc1", "123"); - childmap1.put("dc1", "456"); - map.put("cluster1", childmap1); - - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - String result = null; - try{ - result = mapper.writeValueAsString(map); - }catch(Exception ex){ - LOG.error("Cannot marshall", ex); - Assert.fail("Cannot marshall a complex map"); - } - System.out.println(result); - } + private static Logger LOG = LoggerFactory.getLogger(TestJacksonMarshalling.class); + + + @Test + public void testJSonArrayMarshalling() { + String[] array = {"cluster", "datacenter", "rack", "hostname"}; + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + String result = null; + try { + result = mapper.writeValueAsString(array); + } catch (Exception ex) { + LOG.error("Cannot marshall", ex); + Assert.fail("cannot marshall an String array"); + } + Assert.assertEquals("[\"cluster\",\"datacenter\",\"rack\",\"hostname\"]", result); + } + + + static class Pojo { + private String field1; + private String field2; + + public String getField1() { + return field1; + } + + public void setField1(String field1) { + this.field1 = field1; + } + + public String getField2() { + return field2; + } + + public void setField2(String field2) { + this.field2 = field2; + } + } + + @Test + public void testPojoMarshalling() { + Pojo p = new Pojo(); + p.setField1("field1"); + p.setField2("field2"); + + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + String result = null; + try { + result = mapper.writeValueAsString(p); + } catch (Exception ex) { + LOG.error("Cannot marshall", ex); + Assert.fail("Cannot marshall a Pojo"); + } + System.out.println(result); + Assert.assertEquals("{\"field1\":\"field1\",\"field2\":\"field2\"}", result); + } + + @Test + public void testPojoArrayMashalling() { + Pojo[] ps = new Pojo[2]; + ps[0] = new Pojo(); + ps[0].setField1("0_field1"); + ps[0].setField2("0_field2"); + ps[1] = new Pojo(); + ps[1].setField1("1_field1"); + ps[1].setField2("1_field2"); + + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + String result = null; + try { + result = mapper.writeValueAsString(ps); + } catch (Exception ex) { + LOG.error("Cannot marshall", ex); + Assert.fail("Cannot marshall a Pojo array"); + } + System.out.println(result); + Assert.assertEquals("[{\"field1\":\"0_field1\",\"field2\":\"0_field2\"},{\"field1\":\"1_field1\",\"field2\":\"1_field2\"}]", result); + } + + @Test + public void testComplexMapMarshalling() { + Map, String> map = new HashMap, String>(); + map.put(Arrays.asList("cluster1", "dc1"), "123"); + map.put(Arrays.asList("cluster1", "dc1"), "456"); + + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + String result = null; + try { + result = mapper.writeValueAsString(map); + } catch (Exception ex) { + LOG.error("Cannot marshall", ex); + Assert.fail("Cannot marshall a complex map"); + } + System.out.println(result); + } + + @Test + public void testMapMapMarshalling() { + Map> map = new HashMap>(); + Map childmap1 = new HashMap(); + childmap1.put("dc1", "123"); + childmap1.put("dc1", "456"); + map.put("cluster1", childmap1); + + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + String result = null; + try { + result = mapper.writeValueAsString(map); + } catch (Exception ex) { + LOG.error("Cannot marshall", ex); + Assert.fail("Cannot marshall a complex map"); + } + System.out.println(result); + } } diff --git a/eagle-core/eagle-query/eagle-client-base/src/test/resources/log4j.properties b/eagle-core/eagle-query/eagle-client-base/src/test/resources/log4j.properties index d59ded6b43..9c6875d8d4 100644 --- a/eagle-core/eagle-query/eagle-client-base/src/test/resources/log4j.properties +++ b/eagle-core/eagle-query/eagle-client-base/src/test/resources/log4j.properties @@ -12,9 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - log4j.rootLogger=INFO, stdout - # standard output log4j.appender.stdout=org.apache.log4j.ConsoleAppender log4j.appender.stdout.layout=org.apache.log4j.PatternLayout diff --git a/eagle-core/eagle-query/eagle-entity-base/pom.xml b/eagle-core/eagle-query/eagle-entity-base/pom.xml index 017375c493..467c6f8999 100755 --- a/eagle-core/eagle-query/eagle-entity-base/pom.xml +++ b/eagle-core/eagle-query/eagle-entity-base/pom.xml @@ -100,4 +100,16 @@ mockito-all + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + true + true + + + + diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityContext.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityContext.java index 17b3fdb37b..14245df181 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityContext.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityContext.java @@ -20,21 +20,21 @@ import java.util.Map; public class EntityContext { - private Map context; + private Map context; - public Map getContext() { - return context; - } - - public EntityContext() { - this.context = new HashMap<>(); - } - - protected EntityContext(EntityContext context) { - this.context = new HashMap<>(context.context); - } - - public EntityContext cloneEntity() { - return new EntityContext(this); - } + public Map getContext() { + return context; + } + + public EntityContext() { + this.context = new HashMap<>(); + } + + protected EntityContext(EntityContext context) { + this.context = new HashMap<>(context.context); + } + + public EntityContext cloneEntity() { + return new EntityContext(this); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityJsonModule.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityJsonModule.java index fb86fa63a6..fcc8d41ba8 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityJsonModule.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityJsonModule.java @@ -21,7 +21,7 @@ import java.util.Map; public class EntityJsonModule extends SimpleModule { - public EntityJsonModule(){ - addSerializer(Map.Entry.class,new MapEntrySerializer()); + public EntityJsonModule() { + addSerializer(Map.Entry.class, new MapEntrySerializer()); } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/MapEntrySerializer.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/MapEntrySerializer.java index 4cebbf6c42..ad917f671c 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/MapEntrySerializer.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/MapEntrySerializer.java @@ -31,8 +31,8 @@ public class MapEntrySerializer extends JsonSerializer { @Override public void serialize(Map.Entry entry, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException { jsonGenerator.writeStartObject(); - jsonGenerator.writeObjectField(KEY_FIELD,entry.getKey()); - jsonGenerator.writeObjectField(VALUE_FIELD,entry.getValue()); + jsonGenerator.writeObjectField(KEY_FIELD, entry.getKey()); + jsonGenerator.writeObjectField(VALUE_FIELD, entry.getValue()); jsonGenerator.writeEndObject(); } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/NoSuchRowException.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/NoSuchRowException.java index 3304bea707..658c20ad7c 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/NoSuchRowException.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/NoSuchRowException.java @@ -16,14 +16,14 @@ */ package org.apache.eagle.log.base.taggedlog; -public class NoSuchRowException extends RuntimeException{ - static final long serialVersionUID = -4538233994503905943L; +public class NoSuchRowException extends RuntimeException { + static final long serialVersionUID = -4538233994503905943L; - public NoSuchRowException(){ - super(); - } - - public NoSuchRowException(String s){ - super(s); - } + public NoSuchRowException() { + super(); + } + + public NoSuchRowException(String s) { + super(s); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/RowkeyAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/RowkeyAPIEntity.java index d72c35a62a..a8b416fcb3 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/RowkeyAPIEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/RowkeyAPIEntity.java @@ -16,66 +16,78 @@ */ package org.apache.eagle.log.base.taggedlog; -import java.util.Map; - import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; +import java.util.Map; @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) @XmlType(propOrder = {"success", "exception", "prefixHashCode", "timestamp", "humanTime", "tagNameHashValueHashMap", "fieldNameValueMap"}) public class RowkeyAPIEntity { - boolean success; - String exception; - int prefixHashCode; - long timestamp; - String humanTime; - Map tagNameHashValueHashMap; - Map fieldNameValueMap; - - public boolean isSuccess() { - return success; - } - public void setSuccess(boolean success) { - this.success = success; - } - public String getException() { - return exception; - } - public void setException(String exception) { - this.exception = exception; - } - public String getHumanTime() { - return humanTime; - } - public void setHumanTime(String humanTime) { - this.humanTime = humanTime; - } - public int getPrefixHashCode() { - return prefixHashCode; - } - public void setPrefixHashCode(int prefixHashcode) { - this.prefixHashCode = prefixHashcode; - } - public long getTimestamp() { - return timestamp; - } - public void setTimestamp(long timestamp) { - this.timestamp = timestamp; - } - public Map getTagNameHashValueHashMap() { - return tagNameHashValueHashMap; - } - public void setTagNameHashValueHashMap( - Map tagNameHashValueHashMap) { - this.tagNameHashValueHashMap = tagNameHashValueHashMap; - } - public Map getFieldNameValueMap() { - return fieldNameValueMap; - } - public void setFieldNameValueMap(Map fieldNameValueMap) { - this.fieldNameValueMap = fieldNameValueMap; - } + boolean success; + String exception; + int prefixHashCode; + long timestamp; + String humanTime; + Map tagNameHashValueHashMap; + Map fieldNameValueMap; + + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getException() { + return exception; + } + + public void setException(String exception) { + this.exception = exception; + } + + public String getHumanTime() { + return humanTime; + } + + public void setHumanTime(String humanTime) { + this.humanTime = humanTime; + } + + public int getPrefixHashCode() { + return prefixHashCode; + } + + public void setPrefixHashCode(int prefixHashcode) { + this.prefixHashCode = prefixHashcode; + } + + public long getTimestamp() { + return timestamp; + } + + public void setTimestamp(long timestamp) { + this.timestamp = timestamp; + } + + public Map getTagNameHashValueHashMap() { + return tagNameHashValueHashMap; + } + + public void setTagNameHashValueHashMap( + Map tagNameHashValueHashMap) { + this.tagNameHashValueHashMap = tagNameHashValueHashMap; + } + + public Map getFieldNameValueMap() { + return fieldNameValueMap; + } + + public void setFieldNameValueMap(Map fieldNameValueMap) { + this.fieldNameValueMap = fieldNameValueMap; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogAPIEntity.java index b396b06b33..f5589b17bc 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogAPIEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogAPIEntity.java @@ -16,9 +16,6 @@ */ package org.apache.eagle.log.base.taggedlog; -import org.apache.eagle.common.DateTimeUtil; -import org.apache.eagle.log.entity.meta.EntityDefinitionManager; - import com.fasterxml.jackson.annotation.JsonFilter; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; @@ -28,6 +25,8 @@ import com.fasterxml.jackson.databind.ser.PropertyWriter; import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter; import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; +import org.apache.eagle.common.DateTimeUtil; +import org.apache.eagle.log.entity.meta.EntityDefinitionManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -72,7 +71,7 @@ public Map getExp() { // track what qualifiers are changed private Set modifiedProperties = new HashSet(); protected PropertyChangeSupport pcs - = new PropertyChangeSupport(this); + = new PropertyChangeSupport(this); public Map getSerializeAlias() { @@ -197,7 +196,7 @@ public void serializeAsField(Object pojo, JsonGenerator jgen, SerializerProvider String writerName = writer.getName(); if (modified.contains(writerName) || basePropertyNames.contains(writerName)) { if ((!entity.isSerializeVerbose() && verboseFields.contains(writerName)) - || (timestamp.equals(writerName) && !EntityDefinitionManager.isTimeSeries(entity.getClass()))) { + || (timestamp.equals(writerName) && !EntityDefinitionManager.isTimeSeries(entity.getClass()))) { // log skip if (LOG.isDebugEnabled()) { LOG.debug("skip field"); diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogObjectMapper.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogObjectMapper.java index 1df1c0d800..fd9592b9f3 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogObjectMapper.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogObjectMapper.java @@ -19,17 +19,19 @@ import java.util.Map; public interface TaggedLogObjectMapper { - /** - * when read, business logic should convert schema-less key/value into business object based on its own schema - * @param entity - * @param qualifierValues - */ - public void populateQualifierValues(TaggedLogAPIEntity entity, Map qualifierValues); - - /** - * when write, business logic should convert business object to schema-less key value - * @param entity - * @return - */ - public Map createQualifierValues(TaggedLogAPIEntity entity); + /** + * when read, business logic should convert schema-less key/value into business object based on its own schema. + * + * @param entity + * @param qualifierValues + */ + public void populateQualifierValues(TaggedLogAPIEntity entity, Map qualifierValues); + + /** + * when write, business logic should convert business object to schema-less key value. + * + * @param entity + * @return + */ + public Map createQualifierValues(TaggedLogAPIEntity entity); } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/AbstractHBaseLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/AbstractHBaseLogReader.java index 916706ffdd..24bbb306bb 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/AbstractHBaseLogReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/AbstractHBaseLogReader.java @@ -16,11 +16,13 @@ */ package org.apache.eagle.log.entity; -import org.apache.eagle.common.config.EagleConfigFactory; -import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.common.ByteUtil; import org.apache.eagle.common.EagleBase64Wrapper; -import org.apache.hadoop.hbase.client.*; +import org.apache.eagle.common.config.EagleConfigFactory; +import org.apache.eagle.log.entity.meta.EntityDefinition; +import org.apache.hadoop.hbase.client.HTableFactory; +import org.apache.hadoop.hbase.client.HTableInterface; +import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; @@ -35,206 +37,208 @@ /** * HBase Log Reader basic initialization: *

    - *
  1. Open HBase connection to target HBase table
  2. - *
  3. Generate HBase filter,start and stop row key, output qualifier and Scan
  4. - *
  5. onOpen(HTableInterface,Scan): Callback abstract method
  6. - *
  7. close: Close HBase connection
  8. + *
  9. Open HBase connection to target HBase table
  10. + *
  11. Generate HBase filter,start and stop row key, output qualifier and Scan
  12. + *
  13. onOpen(HTableInterface,Scan): Callback abstract method
  14. + *
  15. close: Close HBase connection.
  16. *
* * @param Reader entity class type - * */ public abstract class AbstractHBaseLogReader implements LogReader { - private static Logger LOG = LoggerFactory.getLogger(AbstractHBaseLogReader.class); - - protected byte[][] qualifiers; - private HTableInterface tbl; - private byte[] startKey; - private byte[] stopKey; - protected Map> searchTags; - private Filter filter; - private Date startTime; - private Date endTime; - -// protected ResultScanner rs; - private boolean isOpen = false; - - /** - * TODO it's ugly that both _ed and prefix fields can hold prefix information, - * prefix field should be in precedence over _ed - */ - private String _prefix; - protected EntityDefinition _ed; - - public AbstractHBaseLogReader(EntityDefinition ed, List partitions, Date startTime, Date endTime, - Filter filter, String lastScanKey, byte[][] outputQualifiers){ - this(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers, null); - } - /** - * This constructor supports partition. - * - * @param ed entity definition - * @param partitions partition values, which is sorted in partition definition order. TODO: in future we need to support - * multiple values for one partition field - * @param startTime start time of the query - * @param endTime end time of the query - * @param filter filter for the hbase scan - * @param lastScanKey the key of last scan - * @param outputQualifiers the bytes of output qualifier names - * @param prefix can be populated from outside world specifically for generic metric reader - */ - public AbstractHBaseLogReader(EntityDefinition ed, List partitions, Date startTime, Date endTime, - Filter filter, String lastScanKey, byte[][] outputQualifiers, String prefix){ - this.startTime = startTime; - this.endTime = endTime; - this._ed = ed; - if (_ed.getPartitions() != null) { - if (partitions == null || _ed.getPartitions().length != partitions.size()) { - throw new IllegalArgumentException("Invalid argument. Entity " + ed.getClass().getSimpleName() + " defined " - + "partitions, but argument partitions is null or number of partition values are different!"); - } - } - /** - * decide prefix field value - */ - if(prefix == null || prefix.isEmpty()){ - this._prefix = _ed.getPrefix(); - }else{ - this._prefix = prefix; - } - this.qualifiers = outputQualifiers; - this.filter = filter; - - this.startKey = buildRowKey(this._prefix, partitions, startTime); - - - /** - * startTime should be inclusive, -128 is max value for hbase Bytes comparison, see PureJavaComparer.compareTo - * as an alternative, we can use startTime-1000 and endTime-1000 to make sure startTime is inclusive and endTime is exclusive - */ - this.startKey = ByteUtil.concat(this.startKey, new byte[] {-1, -1,-1,-1}); - if (lastScanKey == null) { - this.stopKey = buildRowKey(this._prefix, partitions, endTime); - // endTime should be exclusive - this.stopKey = ByteUtil.concat(this.stopKey, new byte[] {-1,-1,-1,-1,-1}); - } else { - // build stop key - this.stopKey = EagleBase64Wrapper.decode(lastScanKey); - // TODO to-be-fixed, probably it's an issue because contacting 1 is not - // enough for lexicographical sorting - this.stopKey = ByteUtil.concat(this.stopKey, new byte[] { 1 }); - } - } - - /** - * TODO If the required field is null for a row, then this row will not be fetched. That could be a problem for counting - * Need another version of read to strictly get the number of rows which will return all the columns for a column family - */ - @Override - public void open() throws IOException { - if (isOpen) - return; // silently return - try { - tbl = EagleConfigFactory.load().getHTable(_ed.getTable()); - } catch (RuntimeException ex) { - throw new IOException(ex); - } - - Scan s1 = new Scan(); - // reverse timestamp, startRow is stopKey, and stopRow is startKey - s1.setStartRow(stopKey); - s1.setStopRow(startKey); - s1.setFilter(filter); - // TODO the # of cached rows should be minimum of (pagesize and 100) - int cs = EagleConfigFactory.load().getHBaseClientScanCacheSize(); - s1.setCaching(cs); - // TODO not optimized for all applications - s1.setCacheBlocks(true) - ; - // scan specified columnfamily and qualifiers - if(this.qualifiers == null) { - // Filter all - s1.addFamily(_ed.getColumnFamily().getBytes()); - }else{ - for (byte[] qualifier : qualifiers) { - s1.addColumn(_ed.getColumnFamily().getBytes(), qualifier); - } - } - // TODO: Work around https://issues.apache.org/jira/browse/HBASE-2198. More graceful implementation should use SingleColumnValueExcludeFilter, - // but it's complicated in current implementation. - workaroundHBASE2198(s1, filter); - if (LOG.isDebugEnabled()) { - LOG.debug(s1.toString()); - } -// rs = tbl.getScanner(s1); - this.onOpen(tbl,s1); - isOpen = true; - } - - /** - * HBase table connection callback function - * - * @param tbl HBase table connection - * @param scan HBase scan - * @throws IOException - */ - protected abstract void onOpen(HTableInterface tbl,Scan scan) throws IOException; - - /** - *

History

- *
    - *
  • Nov 19th, 2014: Fix for out put all qualifiers
  • - *
- * @param s1 - * @param filter - */ - protected void workaroundHBASE2198(Scan s1, Filter filter) { - if (filter instanceof SingleColumnValueFilter) { - if(this.qualifiers == null){ - s1.addFamily(((SingleColumnValueFilter) filter).getFamily()); - }else { - s1.addColumn(((SingleColumnValueFilter) filter).getFamily(), ((SingleColumnValueFilter) filter).getQualifier()); - } - return; - } - if (filter instanceof FilterList) { - for (Filter f : ((FilterList)filter).getFilters()) { - workaroundHBASE2198(s1, f); - } - } - } - - /** - *

Close:

- * 1. release current table connection - * - * @throws IOException - */ - @Override - public void close() throws IOException { - if(tbl != null){ - new HTableFactory().releaseHTableInterface(tbl); - } -// if(rs != null){ -// rs.close(); -// } - } - - private static byte[] buildRowKey(String prefix, List partitions, Date t){ - final int length = (partitions == null) ? (4 + 8) : (4 + 8 + partitions.size() * 4); - final byte[] key = new byte[length]; - int offset = 0; - ByteUtil.intToBytes(prefix.hashCode(), key, offset); - offset += 4; - if (partitions != null) { - for (String partition : partitions) { - ByteUtil.intToBytes(partition.hashCode(), key, offset); - offset += 4; - } - } - // reverse timestamp - long ts = Long.MAX_VALUE - t.getTime(); - ByteUtil.longToBytes(ts, key, offset); - return key; - } + private static Logger LOG = LoggerFactory.getLogger(AbstractHBaseLogReader.class); + + protected byte[][] qualifiers; + private HTableInterface tbl; + private byte[] startKey; + private byte[] stopKey; + protected Map> searchTags; + private Filter filter; + private Date startTime; + private Date endTime; + + // protected ResultScanner rs; + private boolean isOpen = false; + + /** + * TODO it's ugly that both _ed and prefix fields can hold prefix information, + * prefix field should be in precedence over _ed. + */ + private String prefix; + protected EntityDefinition entityDefinition; + + public AbstractHBaseLogReader(EntityDefinition ed, List partitions, Date startTime, Date endTime, + Filter filter, String lastScanKey, byte[][] outputQualifiers) { + this(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers, null); + } + + /** + * This constructor supports partition. + * + * @param ed entity definition + * @param partitions partition values, which is sorted in partition definition order. TODO: in future we need to support + * multiple values for one partition field + * @param startTime start time of the query + * @param endTime end time of the query + * @param filter filter for the hbase scan + * @param lastScanKey the key of last scan + * @param outputQualifiers the bytes of output qualifier names + * @param prefix can be populated from outside world specifically for generic metric reader + */ + public AbstractHBaseLogReader(EntityDefinition ed, List partitions, Date startTime, Date endTime, + Filter filter, String lastScanKey, byte[][] outputQualifiers, String prefix) { + this.startTime = startTime; + this.endTime = endTime; + this.entityDefinition = ed; + if (entityDefinition.getPartitions() != null) { + if (partitions == null || entityDefinition.getPartitions().length != partitions.size()) { + throw new IllegalArgumentException("Invalid argument. Entity " + ed.getClass().getSimpleName() + " defined " + + "partitions, but argument partitions is null or number of partition values are different!"); + } + } + /** + * decide prefix field value + */ + if (prefix == null || prefix.isEmpty()) { + this.prefix = entityDefinition.getPrefix(); + } else { + this.prefix = prefix; + } + this.qualifiers = outputQualifiers; + this.filter = filter; + + this.startKey = buildRowKey(this.prefix, partitions, startTime); + + + /** + * startTime should be inclusive, -128 is max value for hbase Bytes comparison, see PureJavaComparer.compareTo + * as an alternative, we can use startTime-1000 and endTime-1000 to make sure startTime is inclusive and endTime is exclusive + */ + this.startKey = ByteUtil.concat(this.startKey, new byte[] {-1, -1, -1, -1}); + if (lastScanKey == null) { + this.stopKey = buildRowKey(this.prefix, partitions, endTime); + // endTime should be exclusive + this.stopKey = ByteUtil.concat(this.stopKey, new byte[] {-1, -1, -1, -1, -1}); + } else { + // build stop key + this.stopKey = EagleBase64Wrapper.decode(lastScanKey); + // TODO to-be-fixed, probably it's an issue because contacting 1 is not + // enough for lexicographical sorting + this.stopKey = ByteUtil.concat(this.stopKey, new byte[] {1}); + } + } + + /** + * TODO If the required field is null for a row, then this row will not be fetched. That could be a problem for counting + * Need another version of read to strictly get the number of rows which will return all the columns for a column family + */ + @Override + public void open() throws IOException { + if (isOpen) { + return; // silently return + } + try { + tbl = EagleConfigFactory.load().getHTable(entityDefinition.getTable()); + } catch (RuntimeException ex) { + throw new IOException(ex); + } + + Scan s1 = new Scan(); + // reverse timestamp, startRow is stopKey, and stopRow is startKey + s1.setStartRow(stopKey); + s1.setStopRow(startKey); + s1.setFilter(filter); + // TODO the # of cached rows should be minimum of (pagesize and 100) + int cs = EagleConfigFactory.load().getHBaseClientScanCacheSize(); + s1.setCaching(cs); + // TODO not optimized for all applications + s1.setCacheBlocks(true) + ; + // scan specified columnfamily and qualifiers + if (this.qualifiers == null) { + // Filter all + s1.addFamily(entityDefinition.getColumnFamily().getBytes()); + } else { + for (byte[] qualifier : qualifiers) { + s1.addColumn(entityDefinition.getColumnFamily().getBytes(), qualifier); + } + } + // TODO: Work around https://issues.apache.org/jira/browse/HBASE-2198. More graceful implementation should use SingleColumnValueExcludeFilter, + // but it's complicated in current implementation. + workaroundHBASE2198(s1, filter); + if (LOG.isDebugEnabled()) { + LOG.debug(s1.toString()); + } + // rs = tbl.getScanner(s1); + this.onOpen(tbl, s1); + isOpen = true; + } + + /** + * HBase table connection callback function. + * + * @param tbl HBase table connection + * @param scan HBase scan + * @throws IOException + */ + protected abstract void onOpen(HTableInterface tbl, Scan scan) throws IOException; + + /** + *

History

+ *
    + *
  • Nov 19th, 2014: Fix for out put all qualifiers.
  • + *
+ * + * @param s1 + * @param filter + */ + protected void workaroundHBASE2198(Scan s1, Filter filter) { + if (filter instanceof SingleColumnValueFilter) { + if (this.qualifiers == null) { + s1.addFamily(((SingleColumnValueFilter) filter).getFamily()); + } else { + s1.addColumn(((SingleColumnValueFilter) filter).getFamily(), ((SingleColumnValueFilter) filter).getQualifier()); + } + return; + } + if (filter instanceof FilterList) { + for (Filter f : ((FilterList) filter).getFilters()) { + workaroundHBASE2198(s1, f); + } + } + } + + /** + *

Close:

+ * 1. release current table connection. + * + * @throws IOException + */ + @Override + public void close() throws IOException { + if (tbl != null) { + new HTableFactory().releaseHTableInterface(tbl); + } + // if(rs != null){ + // rs.close(); + // } + } + + private static byte[] buildRowKey(String prefix, List partitions, Date t) { + final int length = (partitions == null) ? (4 + 8) : (4 + 8 + partitions.size() * 4); + final byte[] key = new byte[length]; + int offset = 0; + ByteUtil.intToBytes(prefix.hashCode(), key, offset); + offset += 4; + if (partitions != null) { + for (String partition : partitions) { + ByteUtil.intToBytes(partition.hashCode(), key, offset); + offset += 4; + } + } + // reverse timestamp + long ts = Long.MAX_VALUE - t.getTime(); + ByteUtil.longToBytes(ts, key, offset); + return key; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/BaseEntityRepository.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/BaseEntityRepository.java index 71253da5c6..f7de525acd 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/BaseEntityRepository.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/BaseEntityRepository.java @@ -18,9 +18,9 @@ import org.apache.eagle.log.entity.repo.EntityRepository; -public class BaseEntityRepository extends EntityRepository { +public class BaseEntityRepository extends EntityRepository { - public BaseEntityRepository() { - entitySet.add(GenericMetricEntity.class); - } + public BaseEntityRepository() { + entitySet.add(GenericMetricEntity.class); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityCreationListener.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityCreationListener.java index 4ad8959f10..b0bb0b4aea 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityCreationListener.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityCreationListener.java @@ -19,8 +19,8 @@ import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; /** - * Interface to notify creation event of an entity + * Interface to notify creation event of an entity. */ public interface EntityCreationListener { - public void entityCreated(TaggedLogAPIEntity entity) throws Exception; + public void entityCreated(TaggedLogAPIEntity entity) throws Exception; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityQualifierUtils.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityQualifierUtils.java index 6e5cb5ca96..55613f26f2 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityQualifierUtils.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityQualifierUtils.java @@ -16,11 +16,11 @@ */ package org.apache.eagle.log.entity; +import org.apache.commons.lang.StringEscapeUtils; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; import org.apache.eagle.log.entity.meta.EntitySerDeser; import org.apache.eagle.log.entity.meta.Qualifier; -import org.apache.commons.lang.StringEscapeUtils; import org.apache.hadoop.hbase.KeyValue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,248 +34,265 @@ import java.util.regex.Pattern; public class EntityQualifierUtils { - private final static Logger LOG = LoggerFactory.getLogger(EntityQualifierUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(EntityQualifierUtils.class); + + public static Map keyValuesToMap(List row, EntityDefinition ed) { + Map result = new HashMap(); + for (KeyValue kv : row) { + String qualifierName = new String(kv.getQualifier()); + if (!ed.isTag(qualifierName)) { + Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName); + if (qualifier == null) { + qualifier = ed.getQualifierNameMap().get(qualifierName); + } + qualifierName = qualifier.getDisplayName(); + Object value = qualifier.getSerDeser().deserialize(kv.getValue()); + result.put(qualifierName, value); + } else { + result.put(qualifierName, new String(kv.getValue())); + } + } + return result; + } - public static Map keyValuesToMap(List row,EntityDefinition ed){ - Map result = new HashMap(); - for(KeyValue kv:row){ - String qualifierName = new String(kv.getQualifier()); - if(!ed.isTag(qualifierName)){ - Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName); - if(qualifier == null){ - qualifier = ed.getQualifierNameMap().get(qualifierName); - } - qualifierName = qualifier.getDisplayName(); - Object value = qualifier.getSerDeser().deserialize(kv.getValue()); - result.put(qualifierName,value); - }else{ - result.put(qualifierName,new String(kv.getValue())); - } - } - return result; - } + public static Map keyValuesToDoubleMap(List row, EntityDefinition ed) { + Map result = new HashMap(); + for (KeyValue kv : row) { + String qualifierName = new String(kv.getQualifier()); + if (!ed.isTag(qualifierName)) { + Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName); + if (qualifier == null) { + qualifier = ed.getQualifierNameMap().get(qualifierName); + } + qualifierName = qualifier.getDisplayName(); + Object value = qualifier.getSerDeser().deserialize(kv.getValue()); + result.put(qualifierName, convertObjToDouble(value)); + } else { + result.put(qualifierName, Double.NaN); + } + } + return result; + } - public static Map keyValuesToDoubleMap(List row,EntityDefinition ed){ - Map result = new HashMap(); - for(KeyValue kv:row){ - String qualifierName = new String(kv.getQualifier()); - if(!ed.isTag(qualifierName)){ - Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName); - if(qualifier == null){ - qualifier = ed.getQualifierNameMap().get(qualifierName); - } - qualifierName = qualifier.getDisplayName(); - Object value = qualifier.getSerDeser().deserialize(kv.getValue()); - result.put(qualifierName,convertObjToDouble(value)); - }else{ - result.put(qualifierName,Double.NaN); - } - } - return result; - } + /** + * Map[Display Name,Double Value]. + * + * @param map + * @param ed + * @return + */ + public static Map bytesMapToDoubleMap(Map map, EntityDefinition ed) { + Map result = new HashMap(); + for (Map.Entry entry : map.entrySet()) { + String qualifierName = entry.getKey(); + Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName); + if (qualifier == null) { + qualifier = ed.getQualifierNameMap().get(qualifierName); + } + if (qualifier != null && entry.getValue() != null) { + qualifierName = qualifier.getDisplayName(); + Object value = qualifier.getSerDeser().deserialize(entry.getValue()); + result.put(qualifierName, convertObjToDouble(value)); + } else { + result.put(qualifierName, null); + } + } + return result; + } - /** - * Map[Display Name,Double Value] - * - * @param map - * @param ed - * @return - */ - public static Map bytesMapToDoubleMap(Map map,EntityDefinition ed){ - Map result = new HashMap(); - for(Map.Entry entry:map.entrySet()){ - String qualifierName = entry.getKey(); - Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName); - if(qualifier == null) qualifier = ed.getQualifierNameMap().get(qualifierName); - if(qualifier!=null && entry.getValue()!=null) { - qualifierName = qualifier.getDisplayName(); - Object value = qualifier.getSerDeser().deserialize(entry.getValue()); - result.put(qualifierName, convertObjToDouble(value)); - }else{ - result.put(qualifierName,null); - } - } - return result; - } + public static byte[] toBytes(EntityDefinition ed, String qualifierName, String qualifierValueInStr) { + // Get field type from entity class + // and skip for not-found fields query expression + Object typedValue = null; + EntitySerDeser serDeser = null; + if (ed.isTag(qualifierName)) { + typedValue = qualifierValueInStr; + serDeser = EntityDefinitionManager.getSerDeser(String.class); + } else { + try { + Field field = ed.getEntityClass().getDeclaredField(qualifierName); + Class fieldType = field.getType(); + serDeser = EntityDefinitionManager.getSerDeser(fieldType); + if (serDeser == null) { + throw new IllegalArgumentException("Can't find EntitySerDeser for field: " + qualifierName + "'s type: " + fieldType + + ", so the field is not supported to be filtered yet"); + } + typedValue = convertStringToObject(qualifierValueInStr, fieldType); + } catch (NoSuchFieldException ex) { + // Handle the field not found exception in caller + LOG.error("Field " + qualifierName + " not found in " + ed.getEntityClass()); + throw new IllegalArgumentException("Field " + qualifierName + " not found in " + ed.getEntityClass(), ex); + } + } + return serDeser.serialize(typedValue); + } - public static byte[] toBytes(EntityDefinition ed, String qualifierName, String qualifierValueInStr){ - // Get field type from entity class - // and skip for not-found fields query expression - Object typedValue = null; - EntitySerDeser serDeser = null; - if(ed.isTag(qualifierName)){ - typedValue = qualifierValueInStr; - serDeser = EntityDefinitionManager.getSerDeser(String.class); - }else{ - try{ - Field field = ed.getEntityClass().getDeclaredField(qualifierName); - Class fieldType = field.getType(); - serDeser = EntityDefinitionManager.getSerDeser(fieldType); - if(serDeser == null){ - throw new IllegalArgumentException("Can't find EntitySerDeser for field: "+ qualifierName +"'s type: "+fieldType - +", so the field is not supported to be filtered yet"); - } - typedValue = convertStringToObject(qualifierValueInStr, fieldType); - } catch (NoSuchFieldException ex) { - // Handle the field not found exception in caller - LOG.error("Field " + qualifierName + " not found in " + ed.getEntityClass()); - throw new IllegalArgumentException("Field "+qualifierName+" not found in "+ed.getEntityClass(),ex); - } - } - return serDeser.serialize(typedValue); - } + public static Class getType(EntityDefinition ed, String qualifierName) { + Field field; + try { + field = ed.getEntityClass().getDeclaredField(qualifierName); + } catch (NoSuchFieldException e) { + if (LOG.isDebugEnabled()) { + LOG.debug("Field " + qualifierName + " not found in " + ed.getEntityClass()); + } + return null; + } + return field.getType(); + } - public static Class getType(EntityDefinition ed, String qualifierName) { - Field field; - try { - field = ed.getEntityClass().getDeclaredField(qualifierName); - } catch (NoSuchFieldException e) { - if(LOG.isDebugEnabled()) LOG.debug("Field "+qualifierName+" not found in "+ed.getEntityClass()); - return null; - } - return field.getType(); - } + /** + * Not support negative numeric value: + * - http://en.wikipedia.org/wiki/Double-precision_floating-point_format + * + * @param value + * @param type + * @return + */ + public static Object convertStringToObject(String value, Class type) { + Object obj = null; + try { + if (String.class.equals(type)) { + obj = value; + } + if (Long.class.equals(type) || long.class.equals(type)) { + obj = Long.parseLong(value); + // if((Long) obj < 0) throw new IllegalArgumentException("Don't support negative Long yet: "+obj); + } else if (Integer.class.equals(type) || int.class.equals(type)) { + obj = Integer.parseInt(value); + // if((Integer) obj < 0) throw new IllegalArgumentException("Don't support negative Integer yet: "+obj); + } else if (Double.class.equals(type) || double.class.equals(type)) { + obj = Double.parseDouble(value); + // if((Double) obj < 0) throw new IllegalArgumentException("Don't support negative Double yet: "+obj); + } else if (Float.class.equals(type) || float.class.equals(type)) { + obj = Float.parseFloat(value); + // if((Double) obj < 0) throw new IllegalArgumentException("Don't support negative Float yet: "+obj); + } else if (Boolean.class.equals(type) || boolean.class.equals(type)) { + obj = Boolean.valueOf(value); + } + if (obj != null) { + return obj; + } + } catch (NumberFormatException ex) { + throw new IllegalArgumentException("Fail to convert string: " + value + " into type of " + type, ex); + } - /** - * Not support negative numeric value: - * - http://en.wikipedia.org/wiki/Double-precision_floating-point_format - * - * @param value - * @param type - * @return - */ - public static Object convertStringToObject(String value, Class type){ - Object obj = null; - try{ - if(String.class.equals(type)){ - obj = value; - }if(Long.class.equals(type) || long.class.equals(type)){ - obj = Long.parseLong(value); - // if((Long) obj < 0) throw new IllegalArgumentException("Don't support negative Long yet: "+obj); - }else if(Integer.class.equals(type) || int.class.equals(type)){ - obj = Integer.parseInt(value); - // if((Integer) obj < 0) throw new IllegalArgumentException("Don't support negative Integer yet: "+obj); - }else if(Double.class.equals(type) || double.class.equals(type)){ - obj = Double.parseDouble(value); - // if((Double) obj < 0) throw new IllegalArgumentException("Don't support negative Double yet: "+obj); - }else if(Float.class.equals(type) || float.class.equals(type)){ - obj = Float.parseFloat(value); - // if((Double) obj < 0) throw new IllegalArgumentException("Don't support negative Float yet: "+obj); - }else if(Boolean.class.equals(type) || boolean.class.equals(type)) { - obj = Boolean.valueOf(value); - } - if(obj != null) return obj; - }catch (NumberFormatException ex){ - throw new IllegalArgumentException("Fail to convert string: "+value +" into type of "+type,ex); - } + throw new IllegalArgumentException("Fail to convert string: " + value + " into type of " + type + ", illegal type: " + type); + } - throw new IllegalArgumentException("Fail to convert string: "+value +" into type of "+type+", illegal type: "+type); - } + /** + * @param obj + * @return double value, otherwise Double.NaN + */ + public static double convertObjToDouble(Object obj) { + if (Long.class.equals(obj.getClass()) || long.class.equals(obj.getClass())) { + Long _value = (Long) obj; + return _value.doubleValue(); + } else if (Integer.class.equals(obj.getClass()) || int.class.equals(obj.getClass())) { + Integer _value = (Integer) obj; + return _value.doubleValue(); + } else if (Double.class.equals(obj.getClass()) || double.class.equals(obj.getClass())) { + return (Double) obj; + } else if (Float.class.equals(obj.getClass()) || float.class.equals(obj.getClass())) { + Float _value = (Float) obj; + return _value.doubleValue(); + } else if (Short.class.equals(obj.getClass()) || short.class.equals(obj.getClass())) { + Float _value = (Float) obj; + return _value.doubleValue(); + } else if (Byte.class.equals(obj.getClass()) || byte.class.equals(obj.getClass())) { + Byte _value = (Byte) obj; + return _value.doubleValue(); + } + LOG.warn("Failed to convert object " + obj.toString() + " in type of " + obj.getClass() + " to double"); + return Double.NaN; + } - /** - * - * @param obj - * @return double value, otherwise Double.NaN - */ - public static double convertObjToDouble(Object obj){ - if(Long.class.equals(obj.getClass()) || long.class.equals(obj.getClass())){ - Long _value = (Long) obj; - return _value.doubleValue(); - }else if(Integer.class.equals(obj.getClass()) || int.class.equals(obj.getClass())){ - Integer _value = (Integer) obj; - return _value.doubleValue(); - }else if(Double.class.equals(obj.getClass()) || double.class.equals(obj.getClass())) { - return (Double) obj; - }else if(Float.class.equals(obj.getClass()) || float.class.equals(obj.getClass())) { - Float _value = (Float) obj; - return _value.doubleValue(); - }else if(Short.class.equals(obj.getClass()) || short.class.equals(obj.getClass())) { - Float _value = (Float) obj; - return _value.doubleValue(); - }else if(Byte.class.equals(obj.getClass()) || byte.class.equals(obj.getClass())) { - Byte _value = (Byte) obj; - return _value.doubleValue(); - } - LOG.warn("Failed to convert object " + obj.toString() + " in type of " + obj.getClass() + " to double"); - return Double.NaN; - } + /** + * Parse List String as Set without duplicate items + * Support: + *
    + *
  • normal string: ("a","b") => ["a","b"]
  • + *
  • number: (1.5,"b") => [1.5,"b"]
  • + *
  • inner string comma: ("va,lue","value",",") => ["va,lue","value",","]
  • + *
  • inner escaped chars: ("va\"lue","value") => ["va\"lue","value"]
  • + *
  • some bad formats list: ("va"lue","value") => ["va\"lue","value"]
  • + *
+ * Warning: it will not throw exception if the format is not strictly valid. + * @param listValue in format (item1,item2,...) + * @return + */ + public static List parseList(String listValue) { + Matcher matcher = SET_PATTERN.matcher(listValue); + if (matcher.find()) { + String content = matcher.group(1); + List result = new ArrayList(); + StringBuilder str = null; + STATE state = null; + char last = 0; + for (char c : content.toCharArray()) { + if (str == null) { + str = new StringBuilder(); + } + if (c == DOUBLE_QUOTE && last != SLASH) { + // Open or Close String + if (state == STATE.STRING) { + state = null; + } else { + state = STATE.STRING; + } + } else if (c == COMMA && state != STATE.STRING) { + result.add(unescape(str.toString())); + str = null; + last = c; + continue; + } + last = c; + str.append(c); + } + if (str != null) { + result.add(unescape(str.toString())); + } + return result; + } else { + LOG.error("Invalid list value: " + listValue); + throw new IllegalArgumentException("Invalid format of list value: " + listValue + ", must be in format: (item1,item2,...)"); + } + } - /** - * Parse List String as Set without duplicate items - * - *

- * Support: - *
    - *
  • normal string: ("a","b") => ["a","b"]
  • - *
  • number: (1.5,"b") => [1.5,"b"]
  • - *
  • inner string comma: ("va,lue","value",",") => ["va,lue","value",","]
  • - *
  • inner escaped chars: ("va\"lue","value") => ["va\"lue","value"]
  • - *
  • some bad formats list: ("va"lue","value") => ["va\"lue","value"]
  • - *
- * - * Warning: it will not throw exception if the format is not strictly valid - * - * @param listValue in format (item1,item2,...) - * @return - */ - public static List parseList(String listValue){ - Matcher matcher = SET_PATTERN.matcher(listValue); - if(matcher.find()){ - String content = matcher.group(1); - List result = new ArrayList(); - StringBuilder str = null; - STATE state = null; - char last = 0; - for(char c: content.toCharArray()){ - if(str == null) str = new StringBuilder(); - if(c == DOUBLE_QUOTE && last != SLASH){ - // Open or Close String - if(state == STATE.STRING) - state = null; - else state = STATE.STRING; - }else if(c == COMMA && state != STATE.STRING){ - result.add(unescape(str.toString())); - str = null; - last = c; - continue; - } - last = c; - str.append(c); - } - if(str!=null) result.add(unescape(str.toString())); - return result; - }else{ - LOG.error("Invalid list value: " + listValue); - throw new IllegalArgumentException("Invalid format of list value: "+listValue+", must be in format: (item1,item2,...)"); - } - } + private static String unescape(String str) { + int start = 0; + int end = str.length(); + if (str.startsWith("\"")) { + start = start + 1; + } + if (str.endsWith("\"")) { + end = end - 1; + } + str = str.substring(start, end); + return StringEscapeUtils.unescapeJava(str); + } - private static String unescape(String str){ - int start=0,end = str.length(); - if(str.startsWith("\"")) start = start +1; - if(str.endsWith("\"")) end = end -1; - str = str.substring(start,end); - return StringEscapeUtils.unescapeJava(str); - } + private static final Pattern SET_PATTERN = Pattern.compile("^\\((.*)\\)$"); + private static final char COMMA = ','; + private static final char DOUBLE_QUOTE = '"'; + private static final char SLASH = '\\'; - private final static Pattern SET_PATTERN = Pattern.compile("^\\((.*)\\)$"); - private final static char COMMA = ','; - private final static char DOUBLE_QUOTE = '"'; - private final static char SLASH = '\\'; - private static enum STATE{ STRING } + private static enum STATE { + STRING + } + // TODO: NOT FINISHED + private static final Map ESCAPE_REGEXP = new HashMap() { + { + this.put("\\.", "\\\\."); + } + }; -// TODO: NOT FINISHED -// private final static Map ESCAPE_REGEXP=new HashMap(){{ -// this.put("\\.","\\\\."); -// }}; -// -// public static String escapeRegExp(String value) { -// String _value = value; -// for(Map.Entry entry:ESCAPE_REGEXP.entrySet()){ -// _value = _value.replace(entry.getKey(),entry.getValue()); -// } -// return _value; -// } + /*public static String escapeRegExp(String value) { + String _value = value; + for(Map.Entry entry:ESCAPE_REGEXP.entrySet()){ + _value = _value.replace(entry.getKey(),entry.getValue()); + } + return _value; + }*/ } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityUniq.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityUniq.java index 36e1e0b21b..0066a0d002 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityUniq.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityUniq.java @@ -15,7 +15,7 @@ * limitations under the License. */ /** - * + * */ package org.apache.eagle.log.entity; @@ -24,44 +24,45 @@ import java.util.Map.Entry; -/** - * @since Sep 12, 2014 - */ public class EntityUniq { - - public Map tags; - public Long timestamp; - public long createdTime; // for cache removal; - - public EntityUniq(Map tags, long timestamp) { - this.tags = new HashMap(tags); - this.timestamp = timestamp; - this.createdTime = System.currentTimeMillis(); - } - - @Override - public boolean equals(Object obj) { - if (obj instanceof EntityUniq) { - EntityUniq au = (EntityUniq) obj; - if (tags.size() != au.tags.size()) return false; - for (Entry keyValue : au.tags.entrySet()) { - boolean keyExist = tags.containsKey(keyValue.getKey()); - if ( !keyExist || !tags.get(keyValue.getKey()).equals(keyValue.getValue())) { - return false; - } - } - if (!timestamp.equals(au.timestamp)) return false; - return true; - } - return false; - } - - @Override - public int hashCode() { - int hashCode = 0; - for (String value : tags.values()) { - hashCode ^= value.hashCode(); - } - return hashCode ^= timestamp.hashCode(); - } + + public Map tags; + public Long timestamp; + public long createdTime; // for cache removal; + + public EntityUniq(Map tags, long timestamp) { + this.tags = new HashMap(tags); + this.timestamp = timestamp; + this.createdTime = System.currentTimeMillis(); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof EntityUniq) { + EntityUniq au = (EntityUniq) obj; + if (tags.size() != au.tags.size()) { + return false; + } + for (Entry keyValue : au.tags.entrySet()) { + boolean keyExist = tags.containsKey(keyValue.getKey()); + if (!keyExist || !tags.get(keyValue.getKey()).equals(keyValue.getValue())) { + return false; + } + } + if (!timestamp.equals(au.timestamp)) { + return false; + } + return true; + } + return false; + } + + @Override + public int hashCode() { + int hashCode = 0; + for (String value : tags.values()) { + hashCode ^= value.hashCode(); + } + return hashCode ^= timestamp.hashCode(); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericCreateAPIResponseEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericCreateAPIResponseEntity.java index 6ffa621a11..f2a26d858f 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericCreateAPIResponseEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericCreateAPIResponseEntity.java @@ -16,42 +16,46 @@ */ package org.apache.eagle.log.entity; -import java.util.List; +import org.codehaus.jackson.annotate.JsonIgnoreProperties; +import org.codehaus.jackson.map.annotate.JsonSerialize; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; - -import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import org.codehaus.jackson.map.annotate.JsonSerialize; +import java.util.List; @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) @XmlType(propOrder = {"success", "exception", "encodedRowkeys"}) -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) -@JsonIgnoreProperties(ignoreUnknown=true) -public class GenericCreateAPIResponseEntity{ - private boolean success; - private String exception; - private List encodedRowkeys; - - public List getEncodedRowkeys() { - return encodedRowkeys; - } - public void setEncodedRowkeys(List encodedRowkeys) { - this.encodedRowkeys = encodedRowkeys; - } - public boolean isSuccess() { - return success; - } - public void setSuccess(boolean success) { - this.success = success; - } - public String getException() { - return exception; - } - public void setException(String exception) { - this.exception = exception; - } +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) +@JsonIgnoreProperties(ignoreUnknown = true) +public class GenericCreateAPIResponseEntity { + private boolean success; + private String exception; + private List encodedRowkeys; + + public List getEncodedRowkeys() { + return encodedRowkeys; + } + + public void setEncodedRowkeys(List encodedRowkeys) { + this.encodedRowkeys = encodedRowkeys; + } + + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getException() { + return exception; + } + + public void setException(String exception) { + this.exception = exception; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityBatchReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityBatchReader.java index 9c42ab2ced..3b2d7e18bd 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityBatchReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityBatchReader.java @@ -23,36 +23,41 @@ import java.util.ArrayList; import java.util.List; -public class GenericEntityBatchReader implements EntityCreationListener{ - private static final Logger LOG = LoggerFactory.getLogger(GenericEntityBatchReader.class); - - private List entities = new ArrayList(); - private StreamReader reader; - - public GenericEntityBatchReader(String serviceName, SearchCondition condition) throws InstantiationException, IllegalAccessException{ - reader = new GenericEntityStreamReader(serviceName, condition); - reader.register(this); - } - - public GenericEntityBatchReader(StreamReader reader) throws InstantiationException, IllegalAccessException{ - this.reader = reader; - reader.register(this); - } - - public long getLastTimestamp() { - return reader.getLastTimestamp(); - } - public long getFirstTimestamp(){ return reader.getFirstTimestamp();} - - @Override - public void entityCreated(TaggedLogAPIEntity entity){ - entities.add(entity); - } - - @SuppressWarnings("unchecked") - public List read() throws Exception{ - if(LOG.isDebugEnabled()) LOG.debug("Start reading as batch mode"); - reader.readAsStream(); - return (List)entities; - } +public class GenericEntityBatchReader implements EntityCreationListener { + private static final Logger LOG = LoggerFactory.getLogger(GenericEntityBatchReader.class); + + private List entities = new ArrayList(); + private StreamReader reader; + + public GenericEntityBatchReader(String serviceName, SearchCondition condition) throws InstantiationException, IllegalAccessException { + reader = new GenericEntityStreamReader(serviceName, condition); + reader.register(this); + } + + public GenericEntityBatchReader(StreamReader reader) throws InstantiationException, IllegalAccessException { + this.reader = reader; + reader.register(this); + } + + public long getLastTimestamp() { + return reader.getLastTimestamp(); + } + + public long getFirstTimestamp() { + return reader.getFirstTimestamp(); + } + + @Override + public void entityCreated(TaggedLogAPIEntity entity) { + entities.add(entity); + } + + @SuppressWarnings("unchecked") + public List read() throws Exception { + if (LOG.isDebugEnabled()) { + LOG.debug("Start reading as batch mode"); + } + reader.readAsStream(); + return (List) entities; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityScanStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityScanStreamReader.java index 3f97e7807b..da9087026c 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityScanStreamReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityScanStreamReader.java @@ -16,11 +16,11 @@ */ package org.apache.eagle.log.entity; +import org.apache.eagle.common.DateTimeUtil; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.meta.EntityConstants; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; -import org.apache.eagle.common.DateTimeUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,102 +28,103 @@ import java.util.Date; public class GenericEntityScanStreamReader extends StreamReader { - private static final Logger LOG = LoggerFactory.getLogger(GenericEntityScanStreamReader.class); - - private EntityDefinition entityDef; - private SearchCondition condition; - private String prefix; - private long lastTimestamp = 0; - private long firstTimestamp = 0; - - public GenericEntityScanStreamReader(String serviceName, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException{ - this.prefix = prefix; - checkNotNull(serviceName, "serviceName"); - this.entityDef = EntityDefinitionManager.getEntityByServiceName(serviceName); - checkNotNull(entityDef, "EntityDefinition"); - this.condition = condition; - } + private static final Logger LOG = LoggerFactory.getLogger(GenericEntityScanStreamReader.class); + + private EntityDefinition entityDef; + private SearchCondition condition; + private String prefix; + private long lastTimestamp = 0; + private long firstTimestamp = 0; + + public GenericEntityScanStreamReader(String serviceName, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException { + this.prefix = prefix; + checkNotNull(serviceName, "serviceName"); + this.entityDef = EntityDefinitionManager.getEntityByServiceName(serviceName); + checkNotNull(entityDef, "EntityDefinition"); + this.condition = condition; + } + + public GenericEntityScanStreamReader(EntityDefinition entityDef, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException { + this.prefix = prefix; + checkNotNull(entityDef, "entityDef"); + this.entityDef = entityDef; + checkNotNull(entityDef, "EntityDefinition"); + this.condition = condition; + } + + public long getLastTimestamp() { + return lastTimestamp; + } - public GenericEntityScanStreamReader(EntityDefinition entityDef, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException{ - this.prefix = prefix; - checkNotNull(entityDef, "entityDef"); - this.entityDef = entityDef; - checkNotNull(entityDef, "EntityDefinition"); - this.condition = condition; - } - - public long getLastTimestamp() { - return lastTimestamp; - } - - private void checkNotNull(Object o, String message){ - if(o == null){ - throw new IllegalArgumentException(message + " should not be null"); - } - } + private void checkNotNull(Object o, String message) { + if (o == null) { + throw new IllegalArgumentException(message + " should not be null"); + } + } - public EntityDefinition getEntityDefinition() { - return entityDef; - } + public EntityDefinition getEntityDefinition() { + return entityDef; + } - public SearchCondition getSearchCondition() { - return condition; - } + public SearchCondition getSearchCondition() { + return condition; + } - @Override - public void readAsStream() throws Exception{ - Date start = null; - Date end = null; - // shortcut to avoid read when pageSize=0 - if(condition.getPageSize() <= 0){ - return; // return nothing - } - // Process the time range if needed - if(entityDef.isTimeSeries()){ - start = DateTimeUtil.humanDateToDate(condition.getStartTime()); - end = DateTimeUtil.humanDateToDate(condition.getEndTime()); - }else{ - start = DateTimeUtil.humanDateToDate(EntityConstants.FIXED_READ_START_HUMANTIME); - end = DateTimeUtil.humanDateToDate(EntityConstants.FIXED_READ_END_HUMANTIME); - } - byte[][] outputQualifiers = null; - if(!condition.isOutputAll()) { - // Generate the output qualifiers - outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef, condition.getOutputFields()); - } - HBaseLogReader2 reader = new HBaseLogReader2(entityDef, condition.getPartitionValues(), start, end, condition.getFilter(), condition.getStartRowkey(), outputQualifiers, this.prefix); - try{ - reader.open(); - InternalLog log; - int count = 0; - while ((log = reader.read()) != null) { - TaggedLogAPIEntity entity = HBaseInternalLogHelper.buildEntity(log, entityDef); - if (lastTimestamp < entity.getTimestamp()) { - lastTimestamp = entity.getTimestamp(); - } - if(firstTimestamp > entity.getTimestamp() || firstTimestamp == 0){ - firstTimestamp = entity.getTimestamp(); - } + @Override + public void readAsStream() throws Exception { + Date start = null; + Date end = null; + // shortcut to avoid read when pageSize=0 + if (condition.getPageSize() <= 0) { + return; // return nothing + } + // Process the time range if needed + if (entityDef.isTimeSeries()) { + start = DateTimeUtil.humanDateToDate(condition.getStartTime()); + end = DateTimeUtil.humanDateToDate(condition.getEndTime()); + } else { + start = DateTimeUtil.humanDateToDate(EntityConstants.FIXED_READ_START_HUMANTIME); + end = DateTimeUtil.humanDateToDate(EntityConstants.FIXED_READ_END_HUMANTIME); + } + byte[][] outputQualifiers = null; + if (!condition.isOutputAll()) { + // Generate the output qualifiers + outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef, condition.getOutputFields()); + } + HBaseLogReader2 reader = new HBaseLogReader2(entityDef, condition.getPartitionValues(), start, end, condition.getFilter(), condition.getStartRowkey(), outputQualifiers, this.prefix); + try { + reader.open(); + InternalLog log; + int count = 0; + while ((log = reader.read()) != null) { + TaggedLogAPIEntity entity = HBaseInternalLogHelper.buildEntity(log, entityDef); + if (lastTimestamp < entity.getTimestamp()) { + lastTimestamp = entity.getTimestamp(); + } + if (firstTimestamp > entity.getTimestamp() || firstTimestamp == 0) { + firstTimestamp = entity.getTimestamp(); + } - entity.setSerializeVerbose(condition.isOutputVerbose()); - entity.setSerializeAlias(condition.getOutputAlias()); + entity.setSerializeVerbose(condition.isOutputVerbose()); + entity.setSerializeAlias(condition.getOutputAlias()); - for(EntityCreationListener l : _listeners){ - l.entityCreated(entity); - } - if(++count == condition.getPageSize()) - break; - } - }catch(IOException ioe){ - LOG.error("Fail reading log", ioe); - throw ioe; - }finally{ - reader.close(); - } - } + for (EntityCreationListener l : listeners) { + l.entityCreated(entity); + } + if (++count == condition.getPageSize()) { + break; + } + } + } catch (IOException ioe) { + LOG.error("Fail reading log", ioe); + throw ioe; + } finally { + reader.close(); + } + } - @Override - public long getFirstTimestamp() { - return this.firstTimestamp; - } + @Override + public long getFirstTimestamp() { + return this.firstTimestamp; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReader.java index c3d916ec14..1e4d029377 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReader.java @@ -29,97 +29,97 @@ import java.util.List; public class GenericEntityStreamReader extends StreamReader { - private static final Logger LOG = LoggerFactory.getLogger(GenericEntityStreamReader.class); - - private EntityDefinition entityDef; - private SearchCondition condition; - private String prefix; - private StreamReader readerAfterPlan; - - public GenericEntityStreamReader(String serviceName, SearchCondition condition) throws InstantiationException, IllegalAccessException{ - this(serviceName, condition, null); - } - - public GenericEntityStreamReader(EntityDefinition entityDef, SearchCondition condition) throws InstantiationException, IllegalAccessException{ - this(entityDef, condition, entityDef.getPrefix()); - } - - public GenericEntityStreamReader(String serviceName, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException{ - this.prefix = prefix; - checkNotNull(serviceName, "serviceName"); - this.entityDef = EntityDefinitionManager.getEntityByServiceName(serviceName); - checkNotNull(entityDef, "EntityDefinition"); - this.condition = condition; - this.readerAfterPlan = selectQueryReader(); - } - - public GenericEntityStreamReader(EntityDefinition entityDef, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException{ - this.prefix = prefix; - checkNotNull(entityDef, "entityDef"); - this.entityDef = entityDef; - checkNotNull(entityDef, "EntityDefinition"); - this.condition = condition; - this.readerAfterPlan = selectQueryReader(); - } - - private void checkNotNull(Object o, String message){ - if(o == null){ - throw new IllegalArgumentException(message + " should not be null"); - } - } - - public EntityDefinition getEntityDefinition() { - return entityDef; - } - - public SearchCondition getSearchCondition() { - return condition; - } - - @Override - public void readAsStream() throws Exception{ - readerAfterPlan._listeners.addAll(this._listeners); - readerAfterPlan.readAsStream(); - } - - private StreamReader selectQueryReader() throws InstantiationException, IllegalAccessException { - final ORExpression query = condition.getQueryExpression(); - IndexDefinition[] indexDefs = entityDef.getIndexes(); + private static final Logger LOG = LoggerFactory.getLogger(GenericEntityStreamReader.class); + + private EntityDefinition entityDef; + private SearchCondition condition; + private String prefix; + private StreamReader readerAfterPlan; + + public GenericEntityStreamReader(String serviceName, SearchCondition condition) throws InstantiationException, IllegalAccessException { + this(serviceName, condition, null); + } + + public GenericEntityStreamReader(EntityDefinition entityDef, SearchCondition condition) throws InstantiationException, IllegalAccessException { + this(entityDef, condition, entityDef.getPrefix()); + } + + public GenericEntityStreamReader(String serviceName, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException { + this.prefix = prefix; + checkNotNull(serviceName, "serviceName"); + this.entityDef = EntityDefinitionManager.getEntityByServiceName(serviceName); + checkNotNull(entityDef, "EntityDefinition"); + this.condition = condition; + this.readerAfterPlan = selectQueryReader(); + } + + public GenericEntityStreamReader(EntityDefinition entityDef, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException { + this.prefix = prefix; + checkNotNull(entityDef, "entityDef"); + this.entityDef = entityDef; + checkNotNull(entityDef, "EntityDefinition"); + this.condition = condition; + this.readerAfterPlan = selectQueryReader(); + } + + private void checkNotNull(Object o, String message) { + if (o == null) { + throw new IllegalArgumentException(message + " should not be null"); + } + } + + public EntityDefinition getEntityDefinition() { + return entityDef; + } + + public SearchCondition getSearchCondition() { + return condition; + } + + @Override + public void readAsStream() throws Exception { + readerAfterPlan.listeners.addAll(this.listeners); + readerAfterPlan.readAsStream(); + } + + private StreamReader selectQueryReader() throws InstantiationException, IllegalAccessException { + final ORExpression query = condition.getQueryExpression(); + IndexDefinition[] indexDefs = entityDef.getIndexes(); // Index just works with query condition - if (indexDefs != null && condition.getQueryExpression()!=null) { - List rowkeys = new ArrayList<>(); - for (IndexDefinition index : indexDefs) { - // Check unique index first - if (index.isUnique()) { - final IndexDefinition.IndexType type = index.canGoThroughIndex(query, rowkeys); - if (!IndexDefinition.IndexType.NON_INDEX.equals(type)) { - LOG.info("Selectd query unique index " + index.getIndexName() + " for query: " + condition.getQueryExpression()); - return new UniqueIndexStreamReader(index, condition, rowkeys); - } - } - } - for (IndexDefinition index : indexDefs) { - // Check non-clustered index - if (!index.isUnique()) { - final IndexDefinition.IndexType type = index.canGoThroughIndex(query, rowkeys); - if (!IndexDefinition.IndexType.NON_INDEX.equals(type)) { - LOG.info("Selectd query non clustered index " + index.getIndexName() + " for query: " + condition.getQueryExpression().toString()); - return new NonClusteredIndexStreamReader(index, condition, rowkeys); - } - } - } - } - return new GenericEntityScanStreamReader(entityDef, condition, this.prefix); - } - - @Override - public long getLastTimestamp() { - return readerAfterPlan.getLastTimestamp(); - } - - @Override - public long getFirstTimestamp() { - return readerAfterPlan.getFirstTimestamp(); - } + if (indexDefs != null && condition.getQueryExpression() != null) { + List rowkeys = new ArrayList<>(); + for (IndexDefinition index : indexDefs) { + // Check unique index first + if (index.isUnique()) { + final IndexDefinition.IndexType type = index.canGoThroughIndex(query, rowkeys); + if (!IndexDefinition.IndexType.NON_INDEX.equals(type)) { + LOG.info("Selectd query unique index " + index.getIndexName() + " for query: " + condition.getQueryExpression()); + return new UniqueIndexStreamReader(index, condition, rowkeys); + } + } + } + for (IndexDefinition index : indexDefs) { + // Check non-clustered index + if (!index.isUnique()) { + final IndexDefinition.IndexType type = index.canGoThroughIndex(query, rowkeys); + if (!IndexDefinition.IndexType.NON_INDEX.equals(type)) { + LOG.info("Selectd query non clustered index " + index.getIndexName() + " for query: " + condition.getQueryExpression().toString()); + return new NonClusteredIndexStreamReader(index, condition, rowkeys); + } + } + } + } + return new GenericEntityScanStreamReader(entityDef, condition, this.prefix); + } + + @Override + public long getLastTimestamp() { + return readerAfterPlan.getLastTimestamp(); + } + + @Override + public long getFirstTimestamp() { + return readerAfterPlan.getFirstTimestamp(); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReaderMT.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReaderMT.java index 1946d6ce3e..8d7ee0c928 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReaderMT.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReaderMT.java @@ -16,136 +16,138 @@ */ package org.apache.eagle.log.entity; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.Future; - +import org.apache.eagle.common.DateTimeUtil; import org.apache.eagle.common.config.EagleConfigFactory; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.eagle.common.DateTimeUtil; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.Future; /** * multi-threading stream readers which only applies to time-series entity where we split the query into * different time range - * - * When this class is used together with list query or aggregate query, be aware that the query's behavior could - * be changed for example pageSize does not work well, output sequence is not determined + * + *

When this class is used together with list query or aggregate query, be aware that the query's behavior could + * be changed for example pageSize does not work well, output sequence is not determined. */ -public class GenericEntityStreamReaderMT extends StreamReader{ - private static final Logger LOG = LoggerFactory.getLogger(GenericEntityStreamReaderMT.class); - private List readers = new ArrayList(); - - public GenericEntityStreamReaderMT(String serviceName, SearchCondition condition, int numThreads) throws Exception{ - checkIsTimeSeries(serviceName); - checkNumThreads(numThreads); - long queryStartTime = DateTimeUtil.humanDateToSeconds(condition.getStartTime())*1000; - long queryEndTime = DateTimeUtil.humanDateToSeconds(condition.getEndTime())*1000; - long subStartTime = queryStartTime; - long subEndTime = 0; - long interval = (queryEndTime-queryStartTime) / numThreads; - for(int i=0; i= 1"); - } - } - - /** - * default to 2 threads - * @param serviceName - * @param condition - */ - public GenericEntityStreamReaderMT(String serviceName, SearchCondition condition) throws Exception{ - this(serviceName, condition, 2); - } - - @Override - public void readAsStream() throws Exception{ - // populate listeners to all readers - for(EntityCreationListener l : _listeners){ - for(GenericEntityStreamReader r : readers){ - r.register(l); - } - } +public class GenericEntityStreamReaderMT extends StreamReader { + private static final Logger LOG = LoggerFactory.getLogger(GenericEntityStreamReaderMT.class); + private List readers = new ArrayList(); + + public GenericEntityStreamReaderMT(String serviceName, SearchCondition condition, int numThreads) throws Exception { + checkIsTimeSeries(serviceName); + checkNumThreads(numThreads); + long queryStartTime = DateTimeUtil.humanDateToSeconds(condition.getStartTime()) * 1000; + long queryEndTime = DateTimeUtil.humanDateToSeconds(condition.getEndTime()) * 1000; + long subStartTime = queryStartTime; + long subEndTime = 0; + long interval = (queryEndTime - queryStartTime) / numThreads; + for (int i = 0; i < numThreads; i++) { + // split search condition by time range + subStartTime = queryStartTime + i * interval; + if (i == numThreads - 1) { + subEndTime = queryEndTime; + } else { + subEndTime = subStartTime + interval; + } + String strStartTime = DateTimeUtil.millisecondsToHumanDateWithSeconds(subStartTime); + String strEndTime = DateTimeUtil.millisecondsToHumanDateWithSeconds(subEndTime); + SearchCondition sc = new SearchCondition(condition); + sc.setStartTime(strStartTime); + sc.setEndTime(strEndTime); + GenericEntityStreamReader reader = new GenericEntityStreamReader(serviceName, sc); + readers.add(reader); + } + } + + private void checkIsTimeSeries(String serviceName) throws Exception { + EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(serviceName); + if (!ed.isTimeSeries()) { + throw new IllegalArgumentException("Multi-threading stream reader must be applied to time series table"); + } + } + + private void checkNumThreads(int numThreads) { + if (numThreads <= 0) { + throw new IllegalArgumentException("Multi-threading stream reader must have numThreads >= 1"); + } + } + + /** + * default to 2 threads. + * + * @param serviceName + * @param condition + */ + public GenericEntityStreamReaderMT(String serviceName, SearchCondition condition) throws Exception { + this(serviceName, condition, 2); + } + + @Override + public void readAsStream() throws Exception { + // populate listeners to all readers + for (EntityCreationListener l : listeners) { + for (GenericEntityStreamReader r : readers) { + r.register(l); + } + } + + List> futures = new ArrayList>(); + for (GenericEntityStreamReader r : readers) { + SingleReader reader = new SingleReader(r); + Future readFuture = EagleConfigFactory.load().getExecutor().submit(reader); + futures.add(readFuture); + } + + // join threads and check exceptions + for (Future future : futures) { + try { + future.get(); + } catch (Exception ex) { + LOG.error("Error in read", ex); + throw ex; + } + } + } + + private static class SingleReader implements Callable { + private GenericEntityStreamReader reader; + + public SingleReader(GenericEntityStreamReader reader) { + this.reader = reader; + } - List> futures = new ArrayList>(); - for(GenericEntityStreamReader r : readers){ - SingleReader reader = new SingleReader(r); - Future readFuture = EagleConfigFactory.load().getExecutor().submit(reader); - futures.add(readFuture); - } - - // join threads and check exceptions - for(Future future : futures){ - try{ - future.get(); - }catch(Exception ex){ - LOG.error("Error in read", ex); - throw ex; - } - } - } - - private static class SingleReader implements Callable{ - private GenericEntityStreamReader reader; - public SingleReader(GenericEntityStreamReader reader){ - this.reader = reader; - } - @Override - public Void call() throws Exception{ - reader.readAsStream(); - return null; - } - } + @Override + public Void call() throws Exception { + reader.readAsStream(); + return null; + } + } - @Override - public long getLastTimestamp() { - long lastTimestamp = 0; - for (GenericEntityStreamReader reader : readers) { - if (lastTimestamp < reader.getLastTimestamp()) { - lastTimestamp = reader.getLastTimestamp(); - } - } - return lastTimestamp; - } + @Override + public long getLastTimestamp() { + long lastTimestamp = 0; + for (GenericEntityStreamReader reader : readers) { + if (lastTimestamp < reader.getLastTimestamp()) { + lastTimestamp = reader.getLastTimestamp(); + } + } + return lastTimestamp; + } - @Override - public long getFirstTimestamp() { - long firstTimestamp = 0; - for (GenericEntityStreamReader reader : readers) { - if (firstTimestamp > reader.getLastTimestamp() || firstTimestamp == 0) { - firstTimestamp = reader.getLastTimestamp(); - } - } - return firstTimestamp; - } + @Override + public long getFirstTimestamp() { + long firstTimestamp = 0; + for (GenericEntityStreamReader reader : readers) { + if (firstTimestamp > reader.getLastTimestamp() || firstTimestamp == 0) { + firstTimestamp = reader.getLastTimestamp(); + } + } + return firstTimestamp; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityWriter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityWriter.java index 5c8b12dab2..2c7aa45997 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityWriter.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityWriter.java @@ -16,10 +16,10 @@ */ package org.apache.eagle.log.entity; +import org.apache.eagle.common.EagleBase64Wrapper; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; -import org.apache.eagle.common.EagleBase64Wrapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -27,52 +27,47 @@ import java.util.List; public class GenericEntityWriter { - private static final Logger LOG = LoggerFactory.getLogger(GenericEntityWriter.class); - private EntityDefinition entityDef; + private static final Logger LOG = LoggerFactory.getLogger(GenericEntityWriter.class); + private EntityDefinition entityDef; + + public GenericEntityWriter(String serviceName) throws InstantiationException, IllegalAccessException { + this.entityDef = EntityDefinitionManager.getEntityByServiceName(serviceName); + checkNotNull(entityDef, "serviceName"); + } + + public GenericEntityWriter(EntityDefinition entityDef) throws InstantiationException, IllegalAccessException { + this.entityDef = entityDef; + checkNotNull(entityDef, "serviceName"); + } - public GenericEntityWriter(String serviceName) throws InstantiationException, IllegalAccessException{ - this.entityDef = EntityDefinitionManager.getEntityByServiceName(serviceName); - checkNotNull(entityDef, "serviceName"); - } + private void checkNotNull(Object o, String message) { + if (o == null) { + throw new IllegalArgumentException(message + " should not be null"); + } + } - public GenericEntityWriter(EntityDefinition entityDef) throws InstantiationException, IllegalAccessException{ - this.entityDef = entityDef; - checkNotNull(entityDef, "serviceName"); - } - - private void checkNotNull(Object o, String message) { - if(o == null){ - throw new IllegalArgumentException(message + " should not be null"); - } - } + public List write(List entities) throws Exception { + HBaseLogWriter writer = new HBaseLogWriter(entityDef.getTable(), entityDef.getColumnFamily()); + List rowkeys = new ArrayList(entities.size()); + List logs = new ArrayList(entities.size()); - /** - * @param entities - * @return row keys - * @throws Exception - */ - public List write(List entities) throws Exception{ - HBaseLogWriter writer = new HBaseLogWriter(entityDef.getTable(), entityDef.getColumnFamily()); - List rowkeys = new ArrayList(entities.size()); - List logs = new ArrayList(entities.size()); - - try{ - writer.open(); - for(TaggedLogAPIEntity entity : entities){ - final InternalLog entityLog = HBaseInternalLogHelper.convertToInternalLog(entity, entityDef); - logs.add(entityLog); - } - List bRowkeys = writer.write(logs); - for (byte[] rowkey : bRowkeys) { - rowkeys.add(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey)); - } + try { + writer.open(); + for (TaggedLogAPIEntity entity : entities) { + final InternalLog entityLog = HBaseInternalLogHelper.convertToInternalLog(entity, entityDef); + logs.add(entityLog); + } + List bRowkeys = writer.write(logs); + for (byte[] rowkey : bRowkeys) { + rowkeys.add(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey)); + } - }catch(Exception ex){ - LOG.error("fail writing tagged log", ex); - throw ex; - }finally{ - writer.close(); - } - return rowkeys; - } + } catch (Exception ex) { + LOG.error("fail writing tagged log", ex); + throw ex; + } finally { + writer.close(); + } + return rowkeys; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntity.java index b7260f8fd6..afbd2d32dd 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntity.java @@ -22,32 +22,32 @@ /** * GenericMetricEntity should use prefix field which is extended from TaggedLogAPIEntity as metric name - * metric name is used to partition the metric tables + * metric name is used to partition the metric tables. */ -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) @Table("eagle_metric") @ColumnFamily("f") @Prefix(GenericMetricEntity.GENERIC_METRIC_PREFIX_PLACE_HOLDER) @Service(GenericMetricEntity.GENERIC_METRIC_SERVICE) @TimeSeries(true) -@Metric(interval=60000) +@Metric(interval = 60000) @ServicePath(path = "/metric") // TODO: -@Tags({"site","application","policyId","alertExecutorId", "streamName","source","partitionSeq"}) +@Tags( {"site", "application", "policyId", "alertExecutorId", "streamName", "source", "partitionSeq"}) public class GenericMetricEntity extends TaggedLogAPIEntity { - public static final String GENERIC_METRIC_SERVICE = "GenericMetricService"; - public static final String GENERIC_METRIC_PREFIX_PLACE_HOLDER = "GENERIC_METRIC_PREFIX_PLACEHODLER"; - public static final String VALUE_FIELD ="value"; + public static final String GENERIC_METRIC_SERVICE = "GenericMetricService"; + public static final String GENERIC_METRIC_PREFIX_PLACE_HOLDER = "GENERIC_METRIC_PREFIX_PLACEHODLER"; + public static final String VALUE_FIELD = "value"; - @Column("a") - private double[] value; + @Column("a") + private double[] value; - public double[] getValue() { - return value; - } + public double[] getValue() { + return value; + } - public void setValue(double[] value) { - this.value = value; - pcs.firePropertyChange("value", null, null); - } + public void setValue(double[] value) { + this.value = value; + pcs.firePropertyChange("value", null, null); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityBatchReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityBatchReader.java index 84b02ae051..61c8721721 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityBatchReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityBatchReader.java @@ -23,32 +23,36 @@ import java.util.ArrayList; import java.util.List; -public class GenericMetricEntityBatchReader implements EntityCreationListener{ - private static final Logger LOG = LoggerFactory.getLogger(GenericEntityBatchReader.class); - - private List entities = new ArrayList(); - private GenericEntityStreamReader reader; - - public GenericMetricEntityBatchReader(String metricName, SearchCondition condition) throws Exception{ - reader = new GenericEntityStreamReader(GenericMetricEntity.GENERIC_METRIC_SERVICE, condition, metricName); - } - - public long getLastTimestamp() { - return reader.getLastTimestamp(); - } - public long getFirstTimestamp() { - return reader.getFirstTimestamp(); - } - @Override - public void entityCreated(TaggedLogAPIEntity entity){ - entities.add(entity); - } - - @SuppressWarnings("unchecked") - public List read() throws Exception{ - if(LOG.isDebugEnabled()) LOG.debug("Start reading as batch mode"); - reader.register(this); - reader.readAsStream(); - return (List)entities; - } +public class GenericMetricEntityBatchReader implements EntityCreationListener { + private static final Logger LOG = LoggerFactory.getLogger(GenericEntityBatchReader.class); + + private List entities = new ArrayList(); + private GenericEntityStreamReader reader; + + public GenericMetricEntityBatchReader(String metricName, SearchCondition condition) throws Exception { + reader = new GenericEntityStreamReader(GenericMetricEntity.GENERIC_METRIC_SERVICE, condition, metricName); + } + + public long getLastTimestamp() { + return reader.getLastTimestamp(); + } + + public long getFirstTimestamp() { + return reader.getFirstTimestamp(); + } + + @Override + public void entityCreated(TaggedLogAPIEntity entity) { + entities.add(entity); + } + + @SuppressWarnings("unchecked") + public List read() throws Exception { + if (LOG.isDebugEnabled()) { + LOG.debug("Start reading as batch mode"); + } + reader.register(this); + reader.readAsStream(); + return (List) entities; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityDecompactionStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityDecompactionStreamReader.java index b1dd64c84f..6eca07ac1e 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityDecompactionStreamReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityDecompactionStreamReader.java @@ -16,83 +16,85 @@ */ package org.apache.eagle.log.entity; +import org.apache.eagle.common.DateTimeUtil; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; -import org.apache.eagle.common.DateTimeUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.text.ParseException; -public class GenericMetricEntityDecompactionStreamReader extends StreamReader implements EntityCreationListener{ - @SuppressWarnings("unused") - private static final Logger LOG = LoggerFactory.getLogger(GenericMetricEntityDecompactionStreamReader.class); - private GenericEntityStreamReader reader; - private EntityDefinition ed; - private String serviceName = GenericMetricEntity.GENERIC_METRIC_SERVICE; - private long start; - private long end; - private GenericMetricShadowEntity single = new GenericMetricShadowEntity(); - - /** - * it makes sense that serviceName should not be provided while metric name should be provided as prefix - * @param metricName - * @param condition - * @throws InstantiationException - * @throws IllegalAccessException - * @throws ParseException - */ - public GenericMetricEntityDecompactionStreamReader(String metricName, SearchCondition condition) throws InstantiationException, IllegalAccessException, ParseException{ - ed = EntityDefinitionManager.getEntityByServiceName(serviceName); - checkIsMetric(ed); - reader = new GenericEntityStreamReader(serviceName, condition, metricName); - start = DateTimeUtil.humanDateToSeconds(condition.getStartTime())*1000; - end = DateTimeUtil.humanDateToSeconds(condition.getEndTime())*1000; - } - - private void checkIsMetric(EntityDefinition ed){ - if(ed.getMetricDefinition() == null) - throw new IllegalArgumentException("Only metric entity comes here"); - } - - @Override - public void entityCreated(TaggedLogAPIEntity entity) throws Exception{ - GenericMetricEntity e = (GenericMetricEntity)entity; - double[] value = e.getValue(); - if(value != null) { - int count =value.length; - @SuppressWarnings("unused") - Class cls = ed.getMetricDefinition().getSingleTimestampEntityClass(); - for (int i = 0; i < count; i++) { - long ts = entity.getTimestamp() + i * ed.getMetricDefinition().getInterval(); - // exclude those entity which is not within the time range in search condition. [start, end) - if (ts < start || ts >= end) { - continue; - } - single.setTimestamp(ts); - single.setTags(entity.getTags()); - single.setValue(e.getValue()[i]); - for (EntityCreationListener l : _listeners) { - l.entityCreated(single); - } - } - } - } - - @Override - public void readAsStream() throws Exception{ - reader.register(this); - reader.readAsStream(); - } +public class GenericMetricEntityDecompactionStreamReader extends StreamReader implements EntityCreationListener { + @SuppressWarnings("unused") + private static final Logger LOG = LoggerFactory.getLogger(GenericMetricEntityDecompactionStreamReader.class); + private GenericEntityStreamReader reader; + private EntityDefinition ed; + private String serviceName = GenericMetricEntity.GENERIC_METRIC_SERVICE; + private long start; + private long end; + private GenericMetricShadowEntity single = new GenericMetricShadowEntity(); + + /** + * it makes sense that serviceName should not be provided while metric name should be provided as prefix. + * + * @param metricName + * @param condition + * @throws InstantiationException + * @throws IllegalAccessException + * @throws ParseException + */ + public GenericMetricEntityDecompactionStreamReader(String metricName, SearchCondition condition) throws InstantiationException, IllegalAccessException, ParseException { + ed = EntityDefinitionManager.getEntityByServiceName(serviceName); + checkIsMetric(ed); + reader = new GenericEntityStreamReader(serviceName, condition, metricName); + start = DateTimeUtil.humanDateToSeconds(condition.getStartTime()) * 1000; + end = DateTimeUtil.humanDateToSeconds(condition.getEndTime()) * 1000; + } + + private void checkIsMetric(EntityDefinition ed) { + if (ed.getMetricDefinition() == null) { + throw new IllegalArgumentException("Only metric entity comes here"); + } + } + + @Override + public void entityCreated(TaggedLogAPIEntity entity) throws Exception { + GenericMetricEntity e = (GenericMetricEntity) entity; + double[] value = e.getValue(); + if (value != null) { + int count = value.length; + @SuppressWarnings("unused") + Class cls = ed.getMetricDefinition().getSingleTimestampEntityClass(); + for (int i = 0; i < count; i++) { + long ts = entity.getTimestamp() + i * ed.getMetricDefinition().getInterval(); + // exclude those entity which is not within the time range in search condition. [start, end) + if (ts < start || ts >= end) { + continue; + } + single.setTimestamp(ts); + single.setTags(entity.getTags()); + single.setValue(e.getValue()[i]); + for (EntityCreationListener l : listeners) { + l.entityCreated(single); + } + } + } + } + + @Override + public void readAsStream() throws Exception { + reader.register(this); + reader.readAsStream(); + } - @Override - public long getLastTimestamp() { - return reader.getLastTimestamp(); - } + @Override + public long getLastTimestamp() { + return reader.getLastTimestamp(); + } - @Override - public long getFirstTimestamp() { - return reader.getFirstTimestamp(); - } + @Override + public long getFirstTimestamp() { + return reader.getFirstTimestamp(); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricShadowEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricShadowEntity.java index acd1290a00..537677b4dd 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricShadowEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricShadowEntity.java @@ -19,16 +19,16 @@ import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; /** - * just a shadow class to avoid dynamically create the class and instantiate using reflection + * just a shadow class to avoid dynamically create the class and instantiate using reflection. */ public class GenericMetricShadowEntity extends TaggedLogAPIEntity { - private double value; + private double value; - public double getValue() { - return value; - } + public double getValue() { + return value; + } - public void setValue(double value) { - this.value = value; - } + public void setValue(double value) { + this.value = value; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntity.java index 6869c7cd74..49f4ce56ed 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntity.java @@ -29,30 +29,31 @@ import java.util.Map; /** - * The only GenericServiceAPIResponseEntity for both client and server side + * The only GenericServiceAPIResponseEntity for both client and server side. * * @see GenericServiceAPIResponseEntityDeserializer */ @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) -@XmlType(propOrder = {"success","exception","meta","type","obj"}) -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +@XmlType(propOrder = {"success", "exception", "meta", "type", "obj"}) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) @JsonDeserialize(using = GenericServiceAPIResponseEntityDeserializer.class) -@JsonIgnoreProperties(ignoreUnknown=true) -public class GenericServiceAPIResponseEntity{ +@JsonIgnoreProperties(ignoreUnknown = true) +public class GenericServiceAPIResponseEntity { /** - * Please use primitive type of value in meta as possible + * Please use primitive type of value in meta as possible. */ - private Map meta; - private boolean success; - private String exception; + private Map meta; + private boolean success; + private String exception; private List obj; private Class type; - public GenericServiceAPIResponseEntity(){ + public GenericServiceAPIResponseEntity() { // default constructor } - public GenericServiceAPIResponseEntity(Class type){ + + public GenericServiceAPIResponseEntity(Class type) { this.setType(type); } @@ -72,7 +73,7 @@ public void setObj(List obj) { this.obj = obj; } - public void setObj(List obj,Class type) { + public void setObj(List obj, Class type) { this.setObj(obj); this.setType(type); } @@ -82,19 +83,19 @@ public Class getType() { } /** - * Set the first object's class as type + * Set the first object's class as type. */ @SuppressWarnings("unused") - public void setTypeByObj(){ - for(T t:this.obj){ - if(this.type == null && t!=null){ + public void setTypeByObj() { + for (T t : this.obj) { + if (this.type == null && t != null) { this.type = (Class) t.getClass(); } } } /** - * can explicitly change type class + * can explicitly change type class. * * @param type */ @@ -102,17 +103,19 @@ public void setType(Class type) { this.type = type; } - public boolean isSuccess() { - return success; - } - public void setSuccess(boolean success) { - this.success = success; - } - public String getException() { - return exception; - } - - public void setException(Exception exceptionObj){ + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getException() { + return exception; + } + + public void setException(Exception exceptionObj) { this.exception = EagleExceptionWrapper.wrap(exceptionObj); } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntityDeserializer.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntityDeserializer.java index 836295b41d..42dd9b432d 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntityDeserializer.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntityDeserializer.java @@ -27,17 +27,18 @@ import com.google.common.base.Preconditions; import java.io.IOException; -import java.util.*; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + -/** - * @since 3/18/15 - */ public class GenericServiceAPIResponseEntityDeserializer extends JsonDeserializer { - private final static String META_FIELD="meta"; - private final static String SUCCESS_FIELD="success"; - private final static String EXCEPTION_FIELD="exception"; - private final static String OBJ_FIELD="obj"; - private final static String TYPE_FIELD="type"; + private static final String META_FIELD = "meta"; + private static final String SUCCESS_FIELD = "success"; + private static final String EXCEPTION_FIELD = "exception"; + private static final String OBJ_FIELD = "obj"; + private static final String TYPE_FIELD = "type"; @Override public GenericServiceAPIResponseEntity deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { @@ -45,40 +46,40 @@ public GenericServiceAPIResponseEntity deserialize(JsonParser jp, Deserializatio ObjectCodec objectCodec = jp.getCodec(); JsonNode rootNode = jp.getCodec().readTree(jp); - if(rootNode.isObject()){ - Iterator> fields = rootNode.fields(); + if (rootNode.isObject()) { + Iterator> fields = rootNode.fields(); JsonNode objNode = null; - while(fields.hasNext()){ - Map.Entry field = fields.next(); - if (META_FIELD.equals(field.getKey()) && field.getValue() != null) + while (fields.hasNext()) { + Map.Entry field = fields.next(); + if (META_FIELD.equals(field.getKey()) && field.getValue() != null) { entity.setMeta(objectCodec.readValue(field.getValue().traverse(), Map.class)); - else if(SUCCESS_FIELD.equals(field.getKey()) && field.getValue() != null){ + } else if (SUCCESS_FIELD.equals(field.getKey()) && field.getValue() != null) { entity.setSuccess(field.getValue().booleanValue()); - }else if(EXCEPTION_FIELD.equals(field.getKey()) && field.getValue() != null){ + } else if (EXCEPTION_FIELD.equals(field.getKey()) && field.getValue() != null) { entity.setException(new Exception(field.getValue().textValue())); - }else if(TYPE_FIELD.endsWith(field.getKey()) && field.getValue() != null){ - Preconditions.checkNotNull(field.getValue().textValue(),"Response type class is null"); + } else if (TYPE_FIELD.endsWith(field.getKey()) && field.getValue() != null) { + Preconditions.checkNotNull(field.getValue().textValue(), "Response type class is null"); try { entity.setType(Class.forName(field.getValue().textValue())); } catch (ClassNotFoundException e) { throw new IOException(e); } - }else if(OBJ_FIELD.equals(field.getKey()) && field.getValue() != null){ + } else if (OBJ_FIELD.equals(field.getKey()) && field.getValue() != null) { objNode = field.getValue(); } } - if(objNode!=null) { - JavaType collectionType=null; + if (objNode != null) { + JavaType collectionType = null; if (entity.getType() != null) { collectionType = TypeFactory.defaultInstance().constructCollectionType(LinkedList.class, entity.getType()); - }else{ + } else { collectionType = TypeFactory.defaultInstance().constructCollectionType(LinkedList.class, Map.class); } List obj = objectCodec.readValue(objNode.traverse(), collectionType); entity.setObj(obj); } - }else{ + } else { throw new IOException("root node is not object"); } return entity; diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseInternalLogHelper.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseInternalLogHelper.java index 7a38033690..4df4006e0a 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseInternalLogHelper.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseInternalLogHelper.java @@ -16,12 +16,12 @@ */ package org.apache.eagle.log.entity; +import org.apache.eagle.common.ByteUtil; +import org.apache.eagle.common.EagleBase64Wrapper; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.apache.eagle.log.expression.ExpressionParser; import org.apache.eagle.log.entity.meta.*; +import org.apache.eagle.log.expression.ExpressionParser; import org.apache.eagle.query.parser.TokenConstant; -import org.apache.eagle.common.ByteUtil; -import org.apache.eagle.common.EagleBase64Wrapper; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Result; import org.slf4j.Logger; @@ -30,216 +30,221 @@ import java.util.*; public class HBaseInternalLogHelper { - private final static Logger LOG = LoggerFactory.getLogger(HBaseInternalLogHelper.class); - - private static final EntitySerDeserializer ENTITY_SERDESER = new EntitySerDeserializer(); - - /** - * - * @param ed - * @param r - * @param qualifiers if null, return all qualifiers defined in ed - * @return - */ - public static InternalLog parse(EntityDefinition ed, Result r, byte[][] qualifiers) { - final byte[] row = r.getRow(); - // skip the first 4 bytes : prefix - final int offset = (ed.getPartitions() == null) ? (4) : (4 + ed.getPartitions().length * 4); - long timestamp = ByteUtil.bytesToLong(row, offset); - // reverse timestamp - timestamp = Long.MAX_VALUE - timestamp; - final byte[] family = ed.getColumnFamily().getBytes(); - final Map allQualifierValues = new HashMap(); - - if (qualifiers != null) { - int count = qualifiers.length; - final byte[][] values = new byte[count][]; - for (int i = 0; i < count; i++) { - // TODO if returned value is null, it means no this column for this row, so why set null to the object? - values[i] = r.getValue(family, qualifiers[i]); - allQualifierValues.put(new String(qualifiers[i]), values[i]); - } - }else{ - // return all qualifiers - for(KeyValue kv:r.list()){ - byte[] qualifier = kv.getQualifier(); - byte[] value = kv.getValue(); - allQualifierValues.put(new String(qualifier),value); - } - } - final InternalLog log = buildObject(ed, row, timestamp, allQualifierValues); - return log; - } - - /** - * - * @param ed - * @param row - * @param timestamp - * @param allQualifierValues Map < Qualifier name (not display name),Value in bytes array > - * @return - */ - public static InternalLog buildObject(EntityDefinition ed, byte[] row, long timestamp, Map allQualifierValues) { - InternalLog log = new InternalLog(); - String myRow = EagleBase64Wrapper.encodeByteArray2URLSafeString(row); - log.setEncodedRowkey(myRow); - log.setPrefix(ed.getPrefix()); - log.setTimestamp(timestamp); - - Map logQualifierValues = new HashMap(); - Map logTags = new HashMap(); - Map extra = null; - - Map doubleMap = null; - // handle with metric - boolean isMetricEntity = GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(ed.getService()); - double[] metricValueArray = null; - - for (Map.Entry entry : allQualifierValues.entrySet()) { - if (ed.isTag(entry.getKey())) { - if (entry.getValue() != null) { - logTags.put(entry.getKey(), new String(entry.getValue())); - }else if (TokenConstant.isExpression(entry.getKey())){ - if(doubleMap == null) doubleMap = EntityQualifierUtils.bytesMapToDoubleMap(allQualifierValues, ed); - // Caculate expression based fields - String expression = TokenConstant.parseExpressionContent(entry.getKey()); - if (extra == null) extra = new HashMap(); - - // Evaluation expression as output based on entity - // ----------------------------------------------- - // 1) Firstly, check whether is metric entity and expression requires value and also value is not number (i.e. double[]) - // 2) Treat all required fields as double, if not number, then set result as NaN - - try { - ExpressionParser parser = ExpressionParser.parse(expression); - boolean isRequiringValue = parser.getDependentFields().contains(GenericMetricEntity.VALUE_FIELD); - - if(isMetricEntity && isRequiringValue && doubleMap.get(GenericMetricEntity.VALUE_FIELD)!=null - && Double.isNaN(doubleMap.get(GenericMetricEntity.VALUE_FIELD))) // EntityQualifierUtils will convert non-number field into Double.NaN - { - // if dependent fields require "value" - // and value exists but value's type is double[] instead of double - - // handle with metric value array based expression - // lazily extract metric value as double array if required - if(metricValueArray == null){ - // if(allQualifierValues.containsKey(GenericMetricEntity.VALUE_FIELD)){ - Qualifier qualifier = ed.getDisplayNameMap().get(GenericMetricEntity.VALUE_FIELD); - EntitySerDeser serDeser = qualifier.getSerDeser(); - if(serDeser instanceof DoubleArraySerDeser){ - byte[] value = allQualifierValues.get(qualifier.getQualifierName()); - if(value !=null ) metricValueArray = (double[]) serDeser.deserialize(value); - } - // } - } - - if(metricValueArray!=null){ - double[] resultBucket = new double[metricValueArray.length]; - Map _doubleMap = new HashMap(doubleMap); - _doubleMap.remove(entry.getKey()); - for(int i=0;i< resultBucket.length;i++) { - _doubleMap.put(GenericMetricEntity.VALUE_FIELD, metricValueArray[i]); - resultBucket[i]= parser.eval(_doubleMap); - } - extra.put(expression,resultBucket); - }else{ - LOG.warn("Failed convert metric value into double[] type which is required by expression: "+expression); - // if require value in double[] is NaN - double value = parser.eval(doubleMap); - extra.put(expression, value); - } - }else { - double value = parser.eval(doubleMap); - extra.put(expression, value); - // LOG.info("DEBUG: "+entry.getKey()+" = "+ value); - } - } catch (Exception e) { - LOG.error("Failed to eval expression "+expression+", exception: "+e.getMessage(),e); - } - } - } else { - logQualifierValues.put(entry.getKey(),entry.getValue()); - } - } - log.setQualifierValues(logQualifierValues); - log.setTags(logTags); - log.setExtraValues(extra); - return log; - } - - public static TaggedLogAPIEntity buildEntity(InternalLog log, EntityDefinition entityDef) throws Exception { - Map qualifierValues = log.getQualifierValues(); - TaggedLogAPIEntity entity = ENTITY_SERDESER.readValue(qualifierValues, entityDef); - if (entity.getTags() == null && log.getTags() != null) { - entity.setTags(log.getTags()); - } - entity.setExp(log.getExtraValues()); - entity.setTimestamp(log.getTimestamp()); - entity.setEncodedRowkey(log.getEncodedRowkey()); - entity.setPrefix(log.getPrefix()); - return entity; - } - - public static List buildEntities(List logs, EntityDefinition entityDef) throws Exception { - final List result = new ArrayList(logs.size()); - for (InternalLog log : logs) { - result.add(buildEntity(log, entityDef)); - } - return result; - } - - public static byte[][] getOutputQualifiers(EntityDefinition entityDef, List outputFields) { - final byte[][] result = new byte[outputFields.size()][]; - int index = 0; - for(String field : outputFields){ - // convert displayName to qualifierName - Qualifier q = entityDef.getDisplayNameMap().get(field); - if(q == null){ // for tag case - result[index++] = field.getBytes(); - }else{ // for qualifier case - result[index++] = q.getQualifierName().getBytes(); - } - } - return result; - } - - public static InternalLog convertToInternalLog(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception { - final InternalLog log = new InternalLog(); - final Map inputTags = entity.getTags(); - final Map tags = new TreeMap(); - if(inputTags!=null) { - for (Map.Entry entry : inputTags.entrySet()) { - tags.put(entry.getKey(), entry.getValue()); - } - } - log.setTags(tags); - if(entityDef.isTimeSeries()){ - log.setTimestamp(entity.getTimestamp()); - }else{ - log.setTimestamp(EntityConstants.FIXED_WRITE_TIMESTAMP); // set timestamp to MAX, then actually stored 0 - } - - // For Metric entity, prefix is populated along with entity instead of EntityDefinition - if(entity.getPrefix() != null && !entity.getPrefix().isEmpty()){ - log.setPrefix(entity.getPrefix()); - }else{ - log.setPrefix(entityDef.getPrefix()); - } - - log.setPartitions(entityDef.getPartitions()); - EntitySerDeserializer des = new EntitySerDeserializer(); - log.setQualifierValues(des.writeValue(entity, entityDef)); - - final IndexDefinition[] indexDefs = entityDef.getIndexes(); - if (indexDefs != null) { - final List indexRowkeys = new ArrayList(); - for (int i = 0; i < indexDefs.length; ++i) { - final IndexDefinition indexDef = indexDefs[i]; - final byte[] indexRowkey = indexDef.generateIndexRowkey(entity); - indexRowkeys.add(indexRowkey); - } - log.setIndexRowkeys(indexRowkeys); - } - return log; - } + private static final Logger LOG = LoggerFactory.getLogger(HBaseInternalLogHelper.class); + + private static final EntitySerDeserializer ENTITY_SERDESER = new EntitySerDeserializer(); + + /** + * parse. + * @param ed + * @param r + * @param qualifiers if null, return all qualifiers defined in ed + * @return + */ + public static InternalLog parse(EntityDefinition ed, Result r, byte[][] qualifiers) { + final byte[] row = r.getRow(); + // skip the first 4 bytes : prefix + final int offset = (ed.getPartitions() == null) ? (4) : (4 + ed.getPartitions().length * 4); + long timestamp = ByteUtil.bytesToLong(row, offset); + // reverse timestamp + timestamp = Long.MAX_VALUE - timestamp; + final byte[] family = ed.getColumnFamily().getBytes(); + final Map allQualifierValues = new HashMap(); + + if (qualifiers != null) { + int count = qualifiers.length; + final byte[][] values = new byte[count][]; + for (int i = 0; i < count; i++) { + // TODO if returned value is null, it means no this column for this row, so why set null to the object? + values[i] = r.getValue(family, qualifiers[i]); + allQualifierValues.put(new String(qualifiers[i]), values[i]); + } + } else { + // return all qualifiers + for (KeyValue kv : r.list()) { + byte[] qualifier = kv.getQualifier(); + byte[] value = kv.getValue(); + allQualifierValues.put(new String(qualifier), value); + } + } + final InternalLog log = buildObject(ed, row, timestamp, allQualifierValues); + return log; + } + + /** + * build object. + * @param ed + * @param row + * @param timestamp + * @param allQualifierValues Map < Qualifier name (not display name),Value in bytes array > + * @return + */ + public static InternalLog buildObject(EntityDefinition ed, byte[] row, long timestamp, Map allQualifierValues) { + InternalLog log = new InternalLog(); + String myRow = EagleBase64Wrapper.encodeByteArray2URLSafeString(row); + log.setEncodedRowkey(myRow); + log.setPrefix(ed.getPrefix()); + log.setTimestamp(timestamp); + + Map logQualifierValues = new HashMap(); + Map logTags = new HashMap(); + Map extra = null; + + Map doubleMap = null; + // handle with metric + boolean isMetricEntity = GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(ed.getService()); + double[] metricValueArray = null; + + for (Map.Entry entry : allQualifierValues.entrySet()) { + if (ed.isTag(entry.getKey())) { + if (entry.getValue() != null) { + logTags.put(entry.getKey(), new String(entry.getValue())); + } else if (TokenConstant.isExpression(entry.getKey())) { + if (doubleMap == null) { + doubleMap = EntityQualifierUtils.bytesMapToDoubleMap(allQualifierValues, ed); + } + // Caculate expression based fields + String expression = TokenConstant.parseExpressionContent(entry.getKey()); + if (extra == null) { + extra = new HashMap(); + } + + // Evaluation expression as output based on entity + // ----------------------------------------------- + // 1) Firstly, check whether is metric entity and expression requires value and also value is not number (i.e. double[]) + // 2) Treat all required fields as double, if not number, then set result as NaN + + try { + ExpressionParser parser = ExpressionParser.parse(expression); + boolean isRequiringValue = parser.getDependentFields().contains(GenericMetricEntity.VALUE_FIELD); + + if (isMetricEntity && isRequiringValue && doubleMap.get(GenericMetricEntity.VALUE_FIELD) != null + && Double.isNaN(doubleMap.get(GenericMetricEntity.VALUE_FIELD))) { // EntityQualifierUtils will convert non-number field into Double.NaN + // if dependent fields require "value" + // and value exists but value's type is double[] instead of double + + // handle with metric value array based expression + // lazily extract metric value as double array if required + if (metricValueArray == null) { + // if(allQualifierValues.containsKey(GenericMetricEntity.VALUE_FIELD)){ + Qualifier qualifier = ed.getDisplayNameMap().get(GenericMetricEntity.VALUE_FIELD); + EntitySerDeser serDeser = qualifier.getSerDeser(); + if (serDeser instanceof DoubleArraySerDeser) { + byte[] value = allQualifierValues.get(qualifier.getQualifierName()); + if (value != null) { + metricValueArray = (double[]) serDeser.deserialize(value); + } + } + // } + } + + if (metricValueArray != null) { + double[] resultBucket = new double[metricValueArray.length]; + Map _doubleMap = new HashMap(doubleMap); + _doubleMap.remove(entry.getKey()); + for (int i = 0; i < resultBucket.length; i++) { + _doubleMap.put(GenericMetricEntity.VALUE_FIELD, metricValueArray[i]); + resultBucket[i] = parser.eval(_doubleMap); + } + extra.put(expression, resultBucket); + } else { + LOG.warn("Failed convert metric value into double[] type which is required by expression: " + expression); + // if require value in double[] is NaN + double value = parser.eval(doubleMap); + extra.put(expression, value); + } + } else { + double value = parser.eval(doubleMap); + extra.put(expression, value); + // LOG.info("DEBUG: "+entry.getKey()+" = "+ value); + } + } catch (Exception e) { + LOG.error("Failed to eval expression " + expression + ", exception: " + e.getMessage(), e); + } + } + } else { + logQualifierValues.put(entry.getKey(), entry.getValue()); + } + } + log.setQualifierValues(logQualifierValues); + log.setTags(logTags); + log.setExtraValues(extra); + return log; + } + + public static TaggedLogAPIEntity buildEntity(InternalLog log, EntityDefinition entityDef) throws Exception { + Map qualifierValues = log.getQualifierValues(); + TaggedLogAPIEntity entity = ENTITY_SERDESER.readValue(qualifierValues, entityDef); + if (entity.getTags() == null && log.getTags() != null) { + entity.setTags(log.getTags()); + } + entity.setExp(log.getExtraValues()); + entity.setTimestamp(log.getTimestamp()); + entity.setEncodedRowkey(log.getEncodedRowkey()); + entity.setPrefix(log.getPrefix()); + return entity; + } + + public static List buildEntities(List logs, EntityDefinition entityDef) throws Exception { + final List result = new ArrayList(logs.size()); + for (InternalLog log : logs) { + result.add(buildEntity(log, entityDef)); + } + return result; + } + + public static byte[][] getOutputQualifiers(EntityDefinition entityDef, List outputFields) { + final byte[][] result = new byte[outputFields.size()][]; + int index = 0; + for (String field : outputFields) { + // convert displayName to qualifierName + Qualifier q = entityDef.getDisplayNameMap().get(field); + if (q == null) { // for tag case + result[index++] = field.getBytes(); + } else { // for qualifier case + result[index++] = q.getQualifierName().getBytes(); + } + } + return result; + } + + public static InternalLog convertToInternalLog(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception { + final InternalLog log = new InternalLog(); + final Map inputTags = entity.getTags(); + final Map tags = new TreeMap(); + if (inputTags != null) { + for (Map.Entry entry : inputTags.entrySet()) { + tags.put(entry.getKey(), entry.getValue()); + } + } + log.setTags(tags); + if (entityDef.isTimeSeries()) { + log.setTimestamp(entity.getTimestamp()); + } else { + log.setTimestamp(EntityConstants.FIXED_WRITE_TIMESTAMP); // set timestamp to MAX, then actually stored 0 + } + + // For Metric entity, prefix is populated along with entity instead of EntityDefinition + if (entity.getPrefix() != null && !entity.getPrefix().isEmpty()) { + log.setPrefix(entity.getPrefix()); + } else { + log.setPrefix(entityDef.getPrefix()); + } + + log.setPartitions(entityDef.getPartitions()); + EntitySerDeserializer des = new EntitySerDeserializer(); + log.setQualifierValues(des.writeValue(entity, entityDef)); + + final IndexDefinition[] indexDefs = entityDef.getIndexes(); + if (indexDefs != null) { + final List indexRowkeys = new ArrayList(); + for (int i = 0; i < indexDefs.length; ++i) { + final IndexDefinition indexDef = indexDefs[i]; + final byte[] indexRowkey = indexDef.generateIndexRowkey(entity); + indexRowkeys.add(indexRowkey); + } + log.setIndexRowkeys(indexRowkeys); + } + return log; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogReader2.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogReader2.java index c8b9a33c24..30e5df88ee 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogReader2.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogReader2.java @@ -28,59 +28,60 @@ import java.util.List; public class HBaseLogReader2 extends AbstractHBaseLogReader { - protected ResultScanner rs; + protected ResultScanner rs; - public HBaseLogReader2(EntityDefinition ed, List partitions, Date startTime, Date endTime, Filter filter, String lastScanKey, byte[][] outputQualifiers) { - super(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers); - } + public HBaseLogReader2(EntityDefinition ed, List partitions, Date startTime, Date endTime, Filter filter, String lastScanKey, byte[][] outputQualifiers) { + super(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers); + } - /** - * This constructor supports partition. - * - * @param ed entity definition - * @param partitions partition values, which is sorted in partition definition order. TODO: in future we need to support - * multiple values for one partition field - * @param startTime start time of the query - * @param endTime end time of the query - * @param filter filter for the hbase scan - * @param lastScanKey the key of last scan - * @param outputQualifiers the bytes of output qualifier names - * @param prefix can be populated from outside world specifically for generic metric reader - */ - public HBaseLogReader2(EntityDefinition ed, List partitions, Date startTime, Date endTime, Filter filter, String lastScanKey, byte[][] outputQualifiers, String prefix) { - super(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers, prefix); - } + /** + * This constructor supports partition. + * + * @param ed entity definition + * @param partitions partition values, which is sorted in partition definition order. TODO: in future we need to support + * multiple values for one partition field + * @param startTime start time of the query + * @param endTime end time of the query + * @param filter filter for the hbase scan + * @param lastScanKey the key of last scan + * @param outputQualifiers the bytes of output qualifier names + * @param prefix can be populated from outside world specifically for generic metric reader + */ + public HBaseLogReader2(EntityDefinition ed, List partitions, Date startTime, Date endTime, Filter filter, String lastScanKey, byte[][] outputQualifiers, String prefix) { + super(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers, prefix); + } - @Override - protected void onOpen(HTableInterface tbl, Scan scan) throws IOException { - rs = tbl.getScanner(scan); - } + @Override + protected void onOpen(HTableInterface tbl, Scan scan) throws IOException { + rs = tbl.getScanner(scan); + } - /** - *

Close:

- * 1. Call super.close(): release current table connection

- * 2. Close Scanner

- * - * @throws IOException - */ - @Override - public void close() throws IOException { - super.close(); - if(rs != null){ - rs.close(); - } - } + /** + *

Close:

+ * 1. Call super.close(): release current table connection + * 2. Close Scanner. + * + * @throws IOException + */ + @Override + public void close() throws IOException { + super.close(); + if (rs != null) { + rs.close(); + } + } - @Override - public InternalLog read() throws IOException { - if (rs == null) - throw new IllegalArgumentException( - "ResultScanner must be initialized before reading"); - InternalLog t = null; - Result r = rs.next(); - if (r != null) { - t = HBaseInternalLogHelper.parse(_ed, r, qualifiers); - } - return t; - } + @Override + public InternalLog read() throws IOException { + if (rs == null) { + throw new IllegalArgumentException( + "ResultScanner must be initialized before reading"); + } + InternalLog t = null; + Result r = rs.next(); + if (r != null) { + t = HBaseInternalLogHelper.parse(entityDefinition, r, qualifiers); + } + return t; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogWriter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogWriter.java index 059ee7fe9b..cd62f842b5 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogWriter.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogWriter.java @@ -16,11 +16,6 @@ */ package org.apache.eagle.log.entity; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - import org.apache.eagle.common.config.EagleConfigFactory; import org.apache.hadoop.hbase.client.HTableFactory; import org.apache.hadoop.hbase.client.HTableInterface; @@ -28,125 +23,131 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + public class HBaseLogWriter implements LogWriter { - private static Logger LOG = LoggerFactory.getLogger(HBaseLogWriter.class); - private static byte[] EMPTY_INDEX_QUALIFER_VALUE = "".getBytes(); - - private HTableInterface tbl; - private String table; - private String columnFamily; - - public HBaseLogWriter(String table, String columnFamily) { - // TODO assert for non-null of table and columnFamily - this.table = table; - this.columnFamily = columnFamily; - } - - @Override - public void open() throws IOException { - try{ - tbl = EagleConfigFactory.load().getHTable(this.table); -// LOGGER.info("HBase table " + table + " audo reflush is " + (tbl.isAutoFlush() ? "enabled" : "disabled")); - }catch(Exception ex){ - LOG.error("Cannot create htable", ex); - throw new IOException(ex); - } - } - - @Override - public void close() throws IOException { - if(tbl != null){ - new HTableFactory().releaseHTableInterface(tbl); - } - } - - @Override - public void flush() throws IOException { - tbl.flushCommits(); - } - - protected void populateColumnValues(Put p, InternalLog log){ - Map qualifierValues = log.getQualifierValues(); - // iterate all qualifierValues - for(Map.Entry entry : qualifierValues.entrySet()){ - p.add(columnFamily.getBytes(), entry.getKey().getBytes(), entry.getValue()); - } - - Map tags = log.getTags(); - // iterate all tags, each tag will be stored as a column qualifier - if(tags != null){ - for(Map.Entry entry : tags.entrySet()){ - // TODO need a consistent handling of null values - if(entry.getValue() != null) - p.add(columnFamily.getBytes(), entry.getKey().getBytes(), entry.getValue().getBytes()); - } - } - } - - /** - * TODO need think about if multi-PUT is necessary, by checking if autoFlush works - */ - @Override - public byte[] write(InternalLog log) throws IOException{ - final byte[] rowkey = RowkeyBuilder.buildRowkey(log); - final Put p = new Put(rowkey); - populateColumnValues(p, log); - tbl.put(p); - final List indexRowkeys = log.getIndexRowkeys(); - if (indexRowkeys != null) { - writeIndexes(rowkey, indexRowkeys); - } - return rowkey; - } - - /** - * TODO need think about if multi-PUT is necessary, by checking if autoFlush works - */ - public List write(List logs) throws IOException{ - final List puts = new ArrayList(logs.size()); - final List result = new ArrayList(logs.size()); - for (InternalLog log : logs) { - final byte[] rowkey = RowkeyBuilder.buildRowkey(log); - final Put p = new Put(rowkey); - populateColumnValues(p, log); - puts.add(p); - final List indexRowkeys = log.getIndexRowkeys(); - if (indexRowkeys != null) { - writeIndexes(rowkey, indexRowkeys, puts); - } - result.add(rowkey); - } - tbl.put(puts); - return result; - } - - @Override - public void updateByRowkey(byte[] rowkey, InternalLog log) throws IOException{ - Put p = new Put(rowkey); - populateColumnValues(p, log); - tbl.put(p); - final List indexRowkeys = log.getIndexRowkeys(); - if (indexRowkeys != null) { - writeIndexes(rowkey, indexRowkeys); - } - } - - private void writeIndexes(byte[] rowkey, List indexRowkeys) throws IOException { - for (byte[] indexRowkey : indexRowkeys) { - Put p = new Put(indexRowkey); - p.add(columnFamily.getBytes(), rowkey, EMPTY_INDEX_QUALIFER_VALUE); - tbl.put(p); - } - } - - private void writeIndexes(byte[] rowkey, List indexRowkeys, List puts) throws IOException { - for (byte[] indexRowkey : indexRowkeys) { - Put p = new Put(indexRowkey); - p.add(columnFamily.getBytes(), rowkey, EMPTY_INDEX_QUALIFER_VALUE); - puts.add(p); -// tbl.put(p); - } - } - - + private static Logger LOG = LoggerFactory.getLogger(HBaseLogWriter.class); + private static byte[] EMPTY_INDEX_QUALIFER_VALUE = "".getBytes(); + + private HTableInterface tbl; + private String table; + private String columnFamily; + + public HBaseLogWriter(String table, String columnFamily) { + // TODO assert for non-null of table and columnFamily + this.table = table; + this.columnFamily = columnFamily; + } + + @Override + public void open() throws IOException { + try { + tbl = EagleConfigFactory.load().getHTable(this.table); + // LOGGER.info("HBase table " + table + " audo reflush is " + (tbl.isAutoFlush() ? "enabled" : "disabled")); + } catch (Exception ex) { + LOG.error("Cannot create htable", ex); + throw new IOException(ex); + } + } + + @Override + public void close() throws IOException { + if (tbl != null) { + new HTableFactory().releaseHTableInterface(tbl); + } + } + + @Override + public void flush() throws IOException { + tbl.flushCommits(); + } + + protected void populateColumnValues(Put p, InternalLog log) { + Map qualifierValues = log.getQualifierValues(); + // iterate all qualifierValues + for (Map.Entry entry : qualifierValues.entrySet()) { + p.add(columnFamily.getBytes(), entry.getKey().getBytes(), entry.getValue()); + } + + Map tags = log.getTags(); + // iterate all tags, each tag will be stored as a column qualifier + if (tags != null) { + for (Map.Entry entry : tags.entrySet()) { + // TODO need a consistent handling of null values + if (entry.getValue() != null) { + p.add(columnFamily.getBytes(), entry.getKey().getBytes(), entry.getValue().getBytes()); + } + } + } + } + + /** + * TODO need think about if multi-PUT is necessary, by checking if autoFlush works. + */ + @Override + public byte[] write(InternalLog log) throws IOException { + final byte[] rowkey = RowkeyBuilder.buildRowkey(log); + final Put p = new Put(rowkey); + populateColumnValues(p, log); + tbl.put(p); + final List indexRowkeys = log.getIndexRowkeys(); + if (indexRowkeys != null) { + writeIndexes(rowkey, indexRowkeys); + } + return rowkey; + } + + /** + * TODO need think about if multi-PUT is necessary, by checking if autoFlush works. + */ + public List write(List logs) throws IOException { + final List puts = new ArrayList(logs.size()); + final List result = new ArrayList(logs.size()); + for (InternalLog log : logs) { + final byte[] rowkey = RowkeyBuilder.buildRowkey(log); + final Put p = new Put(rowkey); + populateColumnValues(p, log); + puts.add(p); + final List indexRowkeys = log.getIndexRowkeys(); + if (indexRowkeys != null) { + writeIndexes(rowkey, indexRowkeys, puts); + } + result.add(rowkey); + } + tbl.put(puts); + return result; + } + + @Override + public void updateByRowkey(byte[] rowkey, InternalLog log) throws IOException { + Put p = new Put(rowkey); + populateColumnValues(p, log); + tbl.put(p); + final List indexRowkeys = log.getIndexRowkeys(); + if (indexRowkeys != null) { + writeIndexes(rowkey, indexRowkeys); + } + } + + private void writeIndexes(byte[] rowkey, List indexRowkeys) throws IOException { + for (byte[] indexRowkey : indexRowkeys) { + Put p = new Put(indexRowkey); + p.add(columnFamily.getBytes(), rowkey, EMPTY_INDEX_QUALIFER_VALUE); + tbl.put(p); + } + } + + private void writeIndexes(byte[] rowkey, List indexRowkeys, List puts) throws IOException { + for (byte[] indexRowkey : indexRowkeys) { + Put p = new Put(indexRowkey); + p.add(columnFamily.getBytes(), rowkey, EMPTY_INDEX_QUALIFER_VALUE); + puts.add(p); + // tbl.put(p); + } + } + + } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/InternalLog.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/InternalLog.java index 82766400a9..e31d4380c1 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/InternalLog.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/InternalLog.java @@ -22,118 +22,136 @@ import java.util.Map; /** - * TODO we should decouple BaseLog during write time and BaseLog during read time + * TODO we should decouple BaseLog during write time and BaseLog during read time. */ public class InternalLog { - private String encodedRowkey; - private String prefix; - private String[] partitions; - private long timestamp; - private Map qualifierValues; - - private Map extraValues; - private Map tags; - private Map> searchTags; - private List indexRowkeys; - - public String getEncodedRowkey() { - return encodedRowkey; - } - - public void setEncodedRowkey(String encodedRowkey) { - this.encodedRowkey = encodedRowkey; - } - - public Map getQualifierValues() { - return qualifierValues; - } - public void setQualifierValues(Map qualifierValues) { - this.qualifierValues = qualifierValues; - } - - public Map> getSearchTags() { - return searchTags; - } - public void setSearchTags(Map> searchTags) { - this.searchTags = searchTags; - } - public String getPrefix() { - return prefix; - } - public void setPrefix(String prefix) { - this.prefix = prefix; - } - public String[] getPartitions() { - return partitions; - } - public void setPartitions(String[] partitions) { - this.partitions = partitions; - } - public long getTimestamp() { - return timestamp; - } - public void setTimestamp(long timestamp) { - this.timestamp = timestamp; - } - public Map getTags() { - return tags; - } - public void setTags(Map tags) { - this.tags = tags; - } - public List getIndexRowkeys() { - return indexRowkeys; - } - public void setIndexRowkeys(List indexRowkeys) { - this.indexRowkeys = indexRowkeys; - } - public Map getExtraValues() { return extraValues; } - public void setExtraValues(Map extraValues) { this.extraValues = extraValues; } - - public String toString(){ - StringBuffer sb = new StringBuffer(); - sb.append(prefix); - sb.append("|"); - sb.append(DateTimeUtil.millisecondsToHumanDateWithMilliseconds(timestamp)); - sb.append("("); - sb.append(timestamp); - sb.append(")"); - sb.append("|searchTags:"); - if(searchTags != null){ - for(String tagkey : searchTags.keySet()){ - sb.append(tagkey); - sb.append('='); - List tagValues = searchTags.get(tagkey); - sb.append("("); - for(String tagValue : tagValues){ - sb.append(tagValue); - sb.append(","); - } - sb.append(")"); - sb.append(","); - } - } - sb.append("|tags:"); - if(tags != null){ - for(Map.Entry entry : tags.entrySet()){ - sb.append(entry.getKey()); - sb.append("="); - sb.append(entry.getValue()); - sb.append(","); - } - } - sb.append("|columns:"); - if(qualifierValues != null){ - for(String qualifier : qualifierValues.keySet()){ - byte[] value = qualifierValues.get(qualifier); - sb.append(qualifier); - sb.append("="); - if(value != null){ - sb.append(new String(value)); - } - sb.append(","); - } - } - return sb.toString(); - } + private String encodedRowkey; + private String prefix; + private String[] partitions; + private long timestamp; + private Map qualifierValues; + + private Map extraValues; + private Map tags; + private Map> searchTags; + private List indexRowkeys; + + public String getEncodedRowkey() { + return encodedRowkey; + } + + public void setEncodedRowkey(String encodedRowkey) { + this.encodedRowkey = encodedRowkey; + } + + public Map getQualifierValues() { + return qualifierValues; + } + + public void setQualifierValues(Map qualifierValues) { + this.qualifierValues = qualifierValues; + } + + public Map> getSearchTags() { + return searchTags; + } + + public void setSearchTags(Map> searchTags) { + this.searchTags = searchTags; + } + + public String getPrefix() { + return prefix; + } + + public void setPrefix(String prefix) { + this.prefix = prefix; + } + + public String[] getPartitions() { + return partitions; + } + + public void setPartitions(String[] partitions) { + this.partitions = partitions; + } + + public long getTimestamp() { + return timestamp; + } + + public void setTimestamp(long timestamp) { + this.timestamp = timestamp; + } + + public Map getTags() { + return tags; + } + + public void setTags(Map tags) { + this.tags = tags; + } + + public List getIndexRowkeys() { + return indexRowkeys; + } + + public void setIndexRowkeys(List indexRowkeys) { + this.indexRowkeys = indexRowkeys; + } + + public Map getExtraValues() { + return extraValues; + } + + public void setExtraValues(Map extraValues) { + this.extraValues = extraValues; + } + + public String toString() { + StringBuffer sb = new StringBuffer(); + sb.append(prefix); + sb.append("|"); + sb.append(DateTimeUtil.millisecondsToHumanDateWithMilliseconds(timestamp)); + sb.append("("); + sb.append(timestamp); + sb.append(")"); + sb.append("|searchTags:"); + if (searchTags != null) { + for (String tagkey : searchTags.keySet()) { + sb.append(tagkey); + sb.append('='); + List tagValues = searchTags.get(tagkey); + sb.append("("); + for (String tagValue : tagValues) { + sb.append(tagValue); + sb.append(","); + } + sb.append(")"); + sb.append(","); + } + } + sb.append("|tags:"); + if (tags != null) { + for (Map.Entry entry : tags.entrySet()) { + sb.append(entry.getKey()); + sb.append("="); + sb.append(entry.getValue()); + sb.append(","); + } + } + sb.append("|columns:"); + if (qualifierValues != null) { + for (String qualifier : qualifierValues.keySet()) { + byte[] value = qualifierValues.get(qualifier); + sb.append(qualifier); + sb.append("="); + if (value != null) { + sb.append(new String(value)); + } + sb.append(","); + } + } + return sb.toString(); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/ListQueryAPIResponseEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/ListQueryAPIResponseEntity.java index 15de94633e..dab4e186af 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/ListQueryAPIResponseEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/ListQueryAPIResponseEntity.java @@ -20,59 +20,74 @@ import org.codehaus.jackson.map.annotate.JsonSerialize; /** - * TODO: (hchen9) currently we disable firstTimestamp in response avoid breaking older client implementation, but we may need to remove "firstTimestamp" from @JsonIgnoreProperties(ignoreUnknown = true,value={"firstTimestamp"}) to enable the feature later + * TODO: (hchen9) currently we disable firstTimestamp in response avoid breaking older client implementation, but we may need to remove "firstTimestamp" from @JsonIgnoreProperties(ignoreUnknown = + * true,value={"firstTimestamp"}) to enable the feature later. */ -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) -@JsonIgnoreProperties(ignoreUnknown = true,value={"firstTimestamp"}) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) +@JsonIgnoreProperties(ignoreUnknown = true, value = {"firstTimestamp"}) public class ListQueryAPIResponseEntity { - private boolean success; - private String exception; - private int totalResults; - private long elapsedms; - private long lastTimestamp; - private long firstTimestamp; - public long getFirstTimestamp() { - return firstTimestamp; - } - public void setFirstTimestamp(long firstTimestamp) { - this.firstTimestamp = firstTimestamp; - } - private Object obj; - - public long getElapsedms() { - return elapsedms; - } - public void setElapsedms(long elapsedms) { - this.elapsedms = elapsedms; - } - public boolean isSuccess() { - return success; - } - public void setSuccess(boolean success) { - this.success = success; - } - public String getException() { - return exception; - } - public void setException(String exception) { - this.exception = exception; - } - public int getTotalResults() { - return totalResults; - } - public void setTotalResults(int totalResults) { - this.totalResults = totalResults; - } - public long getLastTimestamp() { - return lastTimestamp; - } - public void setLastTimestamp(long lastTimestamp) { - this.lastTimestamp = lastTimestamp; - } - public Object getObj() { - return obj; - } - public void setObj(Object obj) { - this.obj = obj; - } + private boolean success; + private String exception; + private int totalResults; + private long elapsedms; + private long lastTimestamp; + private long firstTimestamp; + + public long getFirstTimestamp() { + return firstTimestamp; + } + + public void setFirstTimestamp(long firstTimestamp) { + this.firstTimestamp = firstTimestamp; + } + + private Object obj; + + public long getElapsedms() { + return elapsedms; + } + + public void setElapsedms(long elapsedms) { + this.elapsedms = elapsedms; + } + + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getException() { + return exception; + } + + public void setException(String exception) { + this.exception = exception; + } + + public int getTotalResults() { + return totalResults; + } + + public void setTotalResults(int totalResults) { + this.totalResults = totalResults; + } + + public long getLastTimestamp() { + return lastTimestamp; + } + + public void setLastTimestamp(long lastTimestamp) { + this.lastTimestamp = lastTimestamp; + } + + public Object getObj() { + return obj; + } + + public void setObj(Object obj) { + this.obj = obj; + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogReader.java index da1e1ab50f..e9dc7a4bff 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogReader.java @@ -19,10 +19,10 @@ import java.io.Closeable; import java.io.IOException; -public interface LogReader extends Closeable{ - public void open() throws IOException; +public interface LogReader extends Closeable { + public void open() throws IOException; - public void close() throws IOException; - - public T read() throws IOException; + public void close() throws IOException; + + public T read() throws IOException; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogWriter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogWriter.java index 6ef4ee3bfe..32397815ff 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogWriter.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogWriter.java @@ -19,14 +19,14 @@ import java.io.Closeable; import java.io.IOException; -public interface LogWriter extends Closeable{ - public void flush() throws IOException; +public interface LogWriter extends Closeable { + public void flush() throws IOException; - public void open() throws IOException; + public void open() throws IOException; - public void close() throws IOException; + public void close() throws IOException; - public byte[] write(InternalLog log) throws IOException; - - public void updateByRowkey(byte[] rowkey, InternalLog log) throws IOException; + public byte[] write(InternalLog log) throws IOException; + + public void updateByRowkey(byte[] rowkey, InternalLog log) throws IOException; } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/MetricMetadataEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/MetricMetadataEntity.java index 9a58d35621..bfbad89ed7 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/MetricMetadataEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/MetricMetadataEntity.java @@ -16,102 +16,109 @@ */ package org.apache.eagle.log.entity; -import org.apache.eagle.log.entity.meta.Column; -import org.apache.eagle.log.entity.meta.ColumnFamily; -import org.apache.eagle.log.entity.meta.Indexes; -import org.apache.eagle.log.entity.meta.Service; -import org.apache.eagle.log.entity.meta.Index; -import org.codehaus.jackson.map.annotate.JsonSerialize; - import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.apache.eagle.log.entity.meta.Prefix; -import org.apache.eagle.log.entity.meta.Table; -import org.apache.eagle.log.entity.meta.TimeSeries; +import org.apache.eagle.log.entity.meta.*; +import org.codehaus.jackson.map.annotate.JsonSerialize; -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) @Table("eagle_metric") @ColumnFamily("f") @Prefix("dmeta") @Service("MetricMetadataService") @TimeSeries(false) -@Indexes({ - @Index(name="Index_1_name", columns = { "name" }, unique = true) - }) +@Indexes( { + @Index(name = "Index_1_name", columns = {"name"}, unique = true) + }) public class MetricMetadataEntity extends TaggedLogAPIEntity { - - @Column("a") - private String storeType; - @Column("b") - private String displayName; - @Column("c") - private String defaultDownSamplingFunction; - @Column("d") - private String defaultAggregateFunction; - @Column("e") - private String aggFunctions; - @Column("f") - private String downSamplingFunctions; - @Column("g") - private String resolutions; - @Column("h") - private String drillDownPaths; - - public String getStoreType() { - return storeType; - } - public void setStoreType(String storeType) { - this.storeType = storeType; - pcs.firePropertyChange("storeType", null, null); - } - public String getDisplayName() { - return displayName; - } - public void setDisplayName(String displayName) { - this.displayName = displayName; - pcs.firePropertyChange("displayName", null, null); - } - public String getDefaultDownSamplingFunction() { - return defaultDownSamplingFunction; - } - public void setDefaultDownSamplingFunction(String defaultDownSamplingFunction) { - this.defaultDownSamplingFunction = defaultDownSamplingFunction; - pcs.firePropertyChange("defaultDownSamplingFunction", null, null); - } - public String getDefaultAggregateFunction() { - return defaultAggregateFunction; - } - public void setDefaultAggregateFunction(String defaultAggregateFunction) { - this.defaultAggregateFunction = defaultAggregateFunction; - pcs.firePropertyChange("defaultAggregateFunction", null, null); - } - public String getAggFunctions() { - return aggFunctions; - } - public void setAggFunctions(String aggFunctions) { - this.aggFunctions = aggFunctions; - pcs.firePropertyChange("aggFunctions", null, null); - } - public String getDownSamplingFunctions() { - return downSamplingFunctions; - } - public void setDownSamplingFunctions(String downSamplingFunctions) { - this.downSamplingFunctions = downSamplingFunctions; - pcs.firePropertyChange("downSamplingFunctions", null, null); - } - public String getResolutions() { - return resolutions; - } - public void setResolutions(String resolutions) { - this.resolutions = resolutions; - pcs.firePropertyChange("resolutions", null, null); - } - public String getDrillDownPaths() { - return drillDownPaths; - } - public void setDrillDownPaths(String drillDownPaths) { - this.drillDownPaths = drillDownPaths; - pcs.firePropertyChange("drillDownPaths", null, null); - } - + + @Column("a") + private String storeType; + @Column("b") + private String displayName; + @Column("c") + private String defaultDownSamplingFunction; + @Column("d") + private String defaultAggregateFunction; + @Column("e") + private String aggFunctions; + @Column("f") + private String downSamplingFunctions; + @Column("g") + private String resolutions; + @Column("h") + private String drillDownPaths; + + public String getStoreType() { + return storeType; + } + + public void setStoreType(String storeType) { + this.storeType = storeType; + pcs.firePropertyChange("storeType", null, null); + } + + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + pcs.firePropertyChange("displayName", null, null); + } + + public String getDefaultDownSamplingFunction() { + return defaultDownSamplingFunction; + } + + public void setDefaultDownSamplingFunction(String defaultDownSamplingFunction) { + this.defaultDownSamplingFunction = defaultDownSamplingFunction; + pcs.firePropertyChange("defaultDownSamplingFunction", null, null); + } + + public String getDefaultAggregateFunction() { + return defaultAggregateFunction; + } + + public void setDefaultAggregateFunction(String defaultAggregateFunction) { + this.defaultAggregateFunction = defaultAggregateFunction; + pcs.firePropertyChange("defaultAggregateFunction", null, null); + } + + public String getAggFunctions() { + return aggFunctions; + } + + public void setAggFunctions(String aggFunctions) { + this.aggFunctions = aggFunctions; + pcs.firePropertyChange("aggFunctions", null, null); + } + + public String getDownSamplingFunctions() { + return downSamplingFunctions; + } + + public void setDownSamplingFunctions(String downSamplingFunctions) { + this.downSamplingFunctions = downSamplingFunctions; + pcs.firePropertyChange("downSamplingFunctions", null, null); + } + + public String getResolutions() { + return resolutions; + } + + public void setResolutions(String resolutions) { + this.resolutions = resolutions; + pcs.firePropertyChange("resolutions", null, null); + } + + public String getDrillDownPaths() { + return drillDownPaths; + } + + public void setDrillDownPaths(String drillDownPaths) { + this.drillDownPaths = drillDownPaths; + pcs.firePropertyChange("drillDownPaths", null, null); + } + } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierCreationListener.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierCreationListener.java index b0eeaede20..cc1783aa2e 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierCreationListener.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierCreationListener.java @@ -19,10 +19,10 @@ import java.util.Map; public interface QualifierCreationListener { - /** - * Qualifier display name mapped to qualifier value in bytes[] - * - * @param qualifiers - */ - public void qualifierCreated(Map qualifiers); + /** + * Qualifier display name mapped to qualifier value in bytes[]. + * + * @param qualifiers + */ + public void qualifierCreated(Map qualifiers); } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierNotDefinedException.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierNotDefinedException.java index 88135bbad4..9a6c131739 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierNotDefinedException.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierNotDefinedException.java @@ -16,13 +16,11 @@ */ package org.apache.eagle.log.entity; -public class QualifierNotDefinedException extends Exception{ - /** - * - */ - private static final long serialVersionUID = 1L; +public class QualifierNotDefinedException extends Exception { - public QualifierNotDefinedException(String message){ - super(message); - } + private static final long serialVersionUID = 1L; + + public QualifierNotDefinedException(String message) { + super(message); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyBuilder.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyBuilder.java index 1978d43888..252dee9f45 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyBuilder.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyBuilder.java @@ -16,125 +16,126 @@ */ package org.apache.eagle.log.entity; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.SortedMap; -import java.util.TreeMap; - +import org.apache.eagle.common.ByteUtil; import org.apache.eagle.log.entity.meta.EntityConstants; import org.apache.eagle.log.entity.meta.EntityDefinition; -import org.apache.eagle.common.ByteUtil; + +import java.util.*; public class RowkeyBuilder { - - public static final int EMPTY_PARTITION_DEFAULT_HASH_CODE = 0; - - /** - * Generate the internal sorted hashmap for tags. Please note the partition tags should not be included in the result map. - * @param partitions array of partition tags in order - * @param tags tags of the entity - * @return the sorted hash map of the tags - */ - public static SortedMap generateSortedTagMap(String[] partitions, Map tags) { - final SortedMap tagHashMap = new TreeMap(); - for (Map.Entry entry: tags.entrySet()) { - final String tagName = entry.getKey(); - final String tagValue = entry.getValue(); - // If it's a partition tag, we need to remove it from tag hash list. It need to - // put to the fix partition hash slot in rowkey. - if (tagValue == null || isPartitionTag(partitions, tagName)) - continue; - tagHashMap.put(tagName.hashCode(), tagValue.hashCode()); - } - return tagHashMap; - } - - /** - * build rowkey from InternalLog object - * @param log internal log entity to write - * @return the rowkey of the entity - */ - public static byte[] buildRowkey(InternalLog log) { - final String[] partitions = log.getPartitions(); - final Map tags = log.getTags(); - final SortedMap tagHashMap = generateSortedTagMap(partitions, tags); - - // reverse timestamp - long ts = Long.MAX_VALUE - log.getTimestamp(); - - List partitionHashValues = new ArrayList(); - if (partitions != null) { - for (String partition : partitions) { - final String tagValue = tags.get(partition); - if (tagValue != null) { - partitionHashValues.add(tagValue.hashCode()); - } else { - partitionHashValues.add(EMPTY_PARTITION_DEFAULT_HASH_CODE); - } - } - } - return buildRowkey(log.getPrefix().hashCode(), partitionHashValues, ts, tagHashMap); - } - - public static long getTimestamp(byte[] rowkey, EntityDefinition ed) { - if (!ed.isTimeSeries()) { - return EntityConstants.FIXED_WRITE_TIMESTAMP; - } - final int offset = (ed.getPartitions() == null) ? 4 : (4 + ed.getPartitions().length * 4); - return Long.MAX_VALUE - ByteUtil.bytesToLong(rowkey, offset); - } - - /** - * Check if the tagName is one of the partition tags - * @param partitions paritition tags of the entity - * @param tagName the tag name that needs to check - * @return - */ - private static boolean isPartitionTag(String[] partitions, String tagName) { - if (partitions != null) { - for (String partition : partitions) { - if (partition.equals(tagName)) { - return true; - } - } - } - return false; - } - - /** - * rowkey is: prefixHash:4 + (partitionValueHash:4)* + timestamp:8 + (tagnameHash:4 + tagvalueHash:4)* - * partition fields are sorted by partition definition order, while tag fields are sorted by tag name's - * hash code values. - */ - private static byte[] buildRowkey(int prefixHash, List partitionHashValues, long timestamp, SortedMap tags){ - // alloacate byte array for rowkey - final int len = 4 + 8 + tags.size() * (4 + 4) + (partitionHashValues.size() * 4); - final byte[] rowkey = new byte[len]; - int offset = 0; - - // 1. set prefix - ByteUtil.intToBytes(prefixHash, rowkey, offset); - offset += 4; - - // 2. set partition - for (Integer partHash : partitionHashValues) { - ByteUtil.intToBytes(partHash, rowkey, offset); - offset += 4; - } - - // 3. set timestamp - ByteUtil.longToBytes(timestamp, rowkey, offset); - offset += 8; - - // 4. set tag key/value hashes - for (Map.Entry entry : tags.entrySet()) { - ByteUtil.intToBytes(entry.getKey(), rowkey, offset); - offset += 4; - ByteUtil.intToBytes(entry.getValue(), rowkey, offset); - offset += 4; - } - - return rowkey; - } + + public static final int EMPTY_PARTITION_DEFAULT_HASH_CODE = 0; + + /** + * Generate the internal sorted hashmap for tags. Please note the partition tags should not be included in the result map. + * + * @param partitions array of partition tags in order + * @param tags tags of the entity + * @return the sorted hash map of the tags + */ + public static SortedMap generateSortedTagMap(String[] partitions, Map tags) { + final SortedMap tagHashMap = new TreeMap(); + for (Map.Entry entry : tags.entrySet()) { + final String tagName = entry.getKey(); + final String tagValue = entry.getValue(); + // If it's a partition tag, we need to remove it from tag hash list. It need to + // put to the fix partition hash slot in rowkey. + if (tagValue == null || isPartitionTag(partitions, tagName)) { + continue; + } + tagHashMap.put(tagName.hashCode(), tagValue.hashCode()); + } + return tagHashMap; + } + + /** + * build rowkey from InternalLog object. + * + * @param log internal log entity to write + * @return the rowkey of the entity + */ + public static byte[] buildRowkey(InternalLog log) { + final String[] partitions = log.getPartitions(); + final Map tags = log.getTags(); + final SortedMap tagHashMap = generateSortedTagMap(partitions, tags); + + // reverse timestamp + long ts = Long.MAX_VALUE - log.getTimestamp(); + + List partitionHashValues = new ArrayList(); + if (partitions != null) { + for (String partition : partitions) { + final String tagValue = tags.get(partition); + if (tagValue != null) { + partitionHashValues.add(tagValue.hashCode()); + } else { + partitionHashValues.add(EMPTY_PARTITION_DEFAULT_HASH_CODE); + } + } + } + return buildRowkey(log.getPrefix().hashCode(), partitionHashValues, ts, tagHashMap); + } + + /** + * rowkey is: prefixHash:4 + (partitionValueHash:4)* + timestamp:8 + (tagnameHash:4 + tagvalueHash:4)* + * partition fields are sorted by partition definition order, while tag fields are sorted by tag name's + * hash code values. + */ + private static byte[] buildRowkey(int prefixHash, List partitionHashValues, long timestamp, SortedMap tags) { + // alloacate byte array for rowkey + final int len = 4 + 8 + tags.size() * (4 + 4) + (partitionHashValues.size() * 4); + final byte[] rowkey = new byte[len]; + int offset = 0; + + // 1. set prefix + ByteUtil.intToBytes(prefixHash, rowkey, offset); + offset += 4; + + // 2. set partition + for (Integer partHash : partitionHashValues) { + ByteUtil.intToBytes(partHash, rowkey, offset); + offset += 4; + } + + // 3. set timestamp + ByteUtil.longToBytes(timestamp, rowkey, offset); + offset += 8; + + // 4. set tag key/value hashes + for (Map.Entry entry : tags.entrySet()) { + ByteUtil.intToBytes(entry.getKey(), rowkey, offset); + offset += 4; + ByteUtil.intToBytes(entry.getValue(), rowkey, offset); + offset += 4; + } + + return rowkey; + } + + public static long getTimestamp(byte[] rowkey, EntityDefinition ed) { + if (!ed.isTimeSeries()) { + return EntityConstants.FIXED_WRITE_TIMESTAMP; + } + final int offset = (ed.getPartitions() == null) ? 4 : (4 + ed.getPartitions().length * 4); + return Long.MAX_VALUE - ByteUtil.bytesToLong(rowkey, offset); + } + + /** + * Check if the tagName is one of the partition tags. + * + * @param partitions paritition tags of the entity + * @param tagName the tag name that needs to check + * @return + */ + private static boolean isPartitionTag(String[] partitions, String tagName) { + if (partitions != null) { + for (String partition : partitions) { + if (partition.equals(tagName)) { + return true; + } + } + } + return false; + } + } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyQueryAPIResponseEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyQueryAPIResponseEntity.java index 1745f74b52..47952c7d2b 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyQueryAPIResponseEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyQueryAPIResponseEntity.java @@ -18,28 +18,33 @@ import org.codehaus.jackson.map.annotate.JsonSerialize; -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) public class RowkeyQueryAPIResponseEntity { - private boolean success; - private String exception; - private Object obj; - - public boolean isSuccess() { - return success; - } - public void setSuccess(boolean success) { - this.success = success; - } - public String getException() { - return exception; - } - public void setException(String exception) { - this.exception = exception; - } - public Object getObj() { - return obj; - } - public void setObj(Object obj) { - this.obj = obj; - } + private boolean success; + private String exception; + private Object obj; + + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getException() { + return exception; + } + + public void setException(String exception) { + this.exception = exception; + } + + public Object getObj() { + return obj; + } + + public void setObj(Object obj) { + this.obj = obj; + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/SearchCondition.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/SearchCondition.java index fada0e231d..30ae1c55aa 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/SearchCondition.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/SearchCondition.java @@ -26,123 +26,141 @@ * search condition includes the following: * 1. prefix - part of rowkey * 2. startTime,endTime - timestamp, part of rowkey - * 3. hbase filter converted from query + * 3. hbase filter converted from query * 4. aggregate parameters * 4. sort options * 5. output fields and tags * 6. entityName * 7. pagination: pageSize and startRowkey */ -public class SearchCondition{ - private String startTime; - private String endTime; - private Filter filter; - private List outputFields; - private boolean outputAll; - private long pageSize; - private String startRowkey; - private String entityName; - private List partitionValues; - private ORExpression queryExpression; - - public boolean isOutputVerbose() { - return outputVerbose; - } - - public void setOutputVerbose(boolean outputVerbose) { - this.outputVerbose = outputVerbose; - } - - public Map getOutputAlias() { - return outputAlias; - } - - public void setOutputAlias(Map outputAlias) { - this.outputAlias = outputAlias; - } - - private boolean outputVerbose; - private Map outputAlias; - - /** - * copy constructor - * @param sc - */ - public SearchCondition(SearchCondition sc){ - this.startTime = sc.startTime; - this.endTime = sc.endTime; - this.filter = sc.filter; - this.outputFields = sc.outputFields; - this.pageSize = sc.pageSize; - this.startRowkey = sc.startRowkey; - this.entityName = sc.entityName; - this.partitionValues = sc.partitionValues; - this.queryExpression = sc.queryExpression; - } - - public SearchCondition(){ - } - - public Filter getFilter() { - return filter; - } - public void setFilter(Filter filter) { - this.filter = filter; - } - public long getPageSize() { - return pageSize; - } - public void setPageSize(long pageSize) { - this.pageSize = pageSize; - } - public String getStartRowkey() { - return startRowkey; - } - public void setStartRowkey(String startRowkey) { - this.startRowkey = startRowkey; - } - public String getEntityName() { - return entityName; - } - public void setEntityName(String entityName) { - this.entityName = entityName; - } - public List getOutputFields() { - return outputFields; - } - public void setOutputFields(List outputFields) { - this.outputFields = outputFields; - } - public String getStartTime() { - return startTime; - } - public void setStartTime(String startTime) { - this.startTime = startTime; - } - public String getEndTime() { - return endTime; - } - public void setEndTime(String endTime) { - this.endTime = endTime; - } - public List getPartitionValues() { - return partitionValues; - } - public void setPartitionValues(List partitionValues) { - this.partitionValues = partitionValues; - } - public ORExpression getQueryExpression() { - return queryExpression; - } - public void setQueryExpression(ORExpression queryExpression) { - this.queryExpression = queryExpression; - } - - public boolean isOutputAll() { - return outputAll; - } - - public void setOutputAll(boolean outputAll) { - this.outputAll = outputAll; - } +public class SearchCondition { + private String startTime; + private String endTime; + private Filter filter; + private List outputFields; + private boolean outputAll; + private long pageSize; + private String startRowkey; + private String entityName; + private List partitionValues; + private ORExpression queryExpression; + + public boolean isOutputVerbose() { + return outputVerbose; + } + + public void setOutputVerbose(boolean outputVerbose) { + this.outputVerbose = outputVerbose; + } + + public Map getOutputAlias() { + return outputAlias; + } + + public void setOutputAlias(Map outputAlias) { + this.outputAlias = outputAlias; + } + + private boolean outputVerbose; + private Map outputAlias; + + /** + * copy constructor. + * + * @param sc + */ + public SearchCondition(SearchCondition sc) { + this.startTime = sc.startTime; + this.endTime = sc.endTime; + this.filter = sc.filter; + this.outputFields = sc.outputFields; + this.pageSize = sc.pageSize; + this.startRowkey = sc.startRowkey; + this.entityName = sc.entityName; + this.partitionValues = sc.partitionValues; + this.queryExpression = sc.queryExpression; + } + + public SearchCondition() { + } + + public Filter getFilter() { + return filter; + } + + public void setFilter(Filter filter) { + this.filter = filter; + } + + public long getPageSize() { + return pageSize; + } + + public void setPageSize(long pageSize) { + this.pageSize = pageSize; + } + + public String getStartRowkey() { + return startRowkey; + } + + public void setStartRowkey(String startRowkey) { + this.startRowkey = startRowkey; + } + + public String getEntityName() { + return entityName; + } + + public void setEntityName(String entityName) { + this.entityName = entityName; + } + + public List getOutputFields() { + return outputFields; + } + + public void setOutputFields(List outputFields) { + this.outputFields = outputFields; + } + + public String getStartTime() { + return startTime; + } + + public void setStartTime(String startTime) { + this.startTime = startTime; + } + + public String getEndTime() { + return endTime; + } + + public void setEndTime(String endTime) { + this.endTime = endTime; + } + + public List getPartitionValues() { + return partitionValues; + } + + public void setPartitionValues(List partitionValues) { + this.partitionValues = partitionValues; + } + + public ORExpression getQueryExpression() { + return queryExpression; + } + + public void setQueryExpression(ORExpression queryExpression) { + this.queryExpression = queryExpression; + } + + public boolean isOutputAll() { + return outputAll; + } + + public void setOutputAll(boolean outputAll) { + this.outputAll = outputAll; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/StreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/StreamReader.java index 005a2e27a5..f79bbfbd73 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/StreamReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/StreamReader.java @@ -20,36 +20,39 @@ import java.util.List; public abstract class StreamReader { - protected List _listeners = new ArrayList(); + protected List listeners = new ArrayList(); - /** - * Listener can be only notified after it is added to listener list - * @param listener - */ - public synchronized void register(EntityCreationListener listener){ - _listeners.add(listener); - } - - /** - * Listener can not get notification once after it is removed from listener list - * @param listener - */ - public synchronized void unregister(EntityCreationListener listener){ - _listeners.remove(listener); - } - - public abstract void readAsStream() throws Exception; - - /** - * Get scanned last entity timestamp - * - * @return - */ - public abstract long getLastTimestamp(); - - /** - * Get scanned first entity timestamp - * @return - */ - public abstract long getFirstTimestamp(); + /** + * Listener can be only notified after it is added to listener list. + * + * @param listener + */ + public synchronized void register(EntityCreationListener listener) { + listeners.add(listener); + } + + /** + * Listener can not get notification once after it is removed from listener list. + * + * @param listener + */ + public synchronized void unregister(EntityCreationListener listener) { + listeners.remove(listener); + } + + public abstract void readAsStream() throws Exception; + + /** + * Get scanned last entity timestamp. + * + * @return + */ + public abstract long getLastTimestamp(); + + /** + * Get scanned first entity timestamp. + * + * @return + */ + public abstract long getFirstTimestamp(); } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/BooleanExpressionComparator.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/BooleanExpressionComparator.java index 0d71e10cdb..68943515d8 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/BooleanExpressionComparator.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/BooleanExpressionComparator.java @@ -16,6 +16,7 @@ */ package org.apache.eagle.log.entity.filter; +import org.apache.commons.lang.StringUtils; import org.apache.eagle.log.entity.EntityQualifierUtils; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntitySerDeser; @@ -24,7 +25,6 @@ import org.apache.eagle.log.expression.ParsiiInvalidException; import org.apache.eagle.query.parser.ComparisonOperator; import org.apache.eagle.query.parser.TokenType; -import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.io.WritableComparable; import org.slf4j.Logger; @@ -39,13 +39,12 @@ /** * BooleanExpressionComparator * - * Currently support double expression only. - * - * TODO: 1) thread-safe? 2) Rewrite filter expression to evaluate once + *

Currently support double expression only. * + *

TODO: 1) thread-safe? 2) Rewrite filter expression to evaluate once */ public class BooleanExpressionComparator implements WritableComparable> { - private final static Logger LOG = LoggerFactory.getLogger(BooleanExpressionComparator.class); + private static final Logger LOG = LoggerFactory.getLogger(BooleanExpressionComparator.class); // Should be Writable private QualifierFilterEntity filterEntity; @@ -62,22 +61,23 @@ public Set getRequiredFields() { private Set requiredFields = new HashSet(); - public BooleanExpressionComparator(){} + public BooleanExpressionComparator() { + } - public BooleanExpressionComparator(QualifierFilterEntity entity,EntityDefinition ed){ - this.filterEntity = entity; + public BooleanExpressionComparator(QualifierFilterEntity entity, EntityDefinition ed) { + this.filterEntity = entity; this.ed = ed; try { this.init(); } catch (Exception ex) { // Client side expression validation to fast fail if having error - LOG.error("Got exception: "+ex.getMessage(),ex); - throw new ExpressionEvaluationException(ex.getMessage(),ex); + LOG.error("Got exception: " + ex.getMessage(), ex); + throw new ExpressionEvaluationException(ex.getMessage(), ex); } } private void init() throws ParsiiInvalidException, ParseException { - LOG.info("Filter expression: "+filterEntity.toString()); + LOG.info("Filter expression: " + filterEntity.toString()); if (filterEntity.getKey() != null) { if (filterEntity.getKeyType() == TokenType.NUMBER) { leftValue = Double.parseDouble(filterEntity.getKey()); @@ -100,11 +100,13 @@ private void init() throws ParsiiInvalidException, ParseException { throw new IllegalStateException("QualifierFilterEntity value is null"); } - if (this.filterEntity.getOp() == null) + if (this.filterEntity.getOp() == null) { throw new IllegalStateException("QualifierFilterEntity op is null"); + } this.func = _opExprFuncMap.get(this.filterEntity.getOp()); - if (this.func == null) + if (this.func == null) { throw new IllegalStateException("No boolean evaluation function found for operation: " + this.filterEntity.getOp()); + } } /** @@ -114,24 +116,24 @@ private void init() throws ParsiiInvalidException, ParseException { * @return evaluation result as true (1) or false (0) * @throws Exception */ - private boolean eval(Map context) throws Exception { - if(filterEntity.getKeyType() != TokenType.NUMBER){ - leftValue = eval(filterEntity.getKey(),context); + private boolean eval(Map context) throws Exception { + if (filterEntity.getKeyType() != TokenType.NUMBER) { + leftValue = eval(filterEntity.getKey(), context); } - if(filterEntity.getValueType() != TokenType.NUMBER){ - rightValue = eval(filterEntity.getValue(),context); + if (filterEntity.getValueType() != TokenType.NUMBER) { + rightValue = eval(filterEntity.getValue(), context); } - if(Double.isInfinite(leftValue) || Double.isInfinite(rightValue)){ -// if(LOG.isDebugEnabled()) { + if (Double.isInfinite(leftValue) || Double.isInfinite(rightValue)) { + // if(LOG.isDebugEnabled()) { if (Double.isInfinite(leftValue)) { LOG.warn("Evaluation result of key: " + this.filterEntity.getKey() + " is " + leftValue + " (Infinite), ignore"); } else { - LOG.warn("Evaluation result of value: "+this.filterEntity.getValue()+" is "+rightValue+" (Infinite), ignore"); + LOG.warn("Evaluation result of value: " + this.filterEntity.getValue() + " is " + rightValue + " (Infinite), ignore"); } -// } + // } return false; } - return func.eval(leftValue,rightValue); + return func.eval(leftValue, rightValue); } /** @@ -142,38 +144,40 @@ private boolean eval(Map context) throws Exception { * @return * @throws Exception */ - private double eval(String expr,Map context) throws Exception { + private double eval(String expr, Map context) throws Exception { return ExpressionParser.parse(expr).eval(context); } /** + * compareTo. * * @param row List[KeyValue] All key values in a row - * * @return 0 to filter out row [false], otherwise to include row into scanner [true] */ @Override public int compareTo(List row) { - Map context = new HashMap(); - for(KeyValue kv:row){ + Map context = new HashMap(); + for (KeyValue kv : row) { String qualifierName = new String(kv.getQualifier()); // Because assume just handle about double value // so ignore tag whose value is String - if(!this.ed.isTag(qualifierName)){ + if (!this.ed.isTag(qualifierName)) { Qualifier qualifier = this.ed.getQualifierNameMap().get(qualifierName); String displayName = qualifier.getDisplayName(); - if(displayName == null) displayName = qualifierName; + if (displayName == null) { + displayName = qualifierName; + } try { - if(this.requiredFields.contains(displayName)) { + if (this.requiredFields.contains(displayName)) { EntitySerDeser serDeser = qualifier.getSerDeser(); double value = EntityQualifierUtils.convertObjToDouble(serDeser.deserialize(kv.getValue())); if (Double.isNaN(value)) { context.put(displayName, value); } } - }catch (Exception ex){ - LOG.warn("Failed to parse value of field "+displayName+" as double, ignore: "+ex.getMessage(),ex); + } catch (Exception ex) { + LOG.warn("Failed to parse value of field " + displayName + " as double, ignore: " + ex.getMessage(), ex); } } } @@ -182,22 +186,21 @@ public int compareTo(List row) { /** * @param context Map[String,Double] - * * @return context.keySet().containsAll(this.requiredFields) && eval(context) ? 1:0; */ - int compareTo(Map context){ + int compareTo(Map context) { try { - if(context.keySet().containsAll(this.requiredFields)){ - return eval(context)? 1:0; - }else{ - if(LOG.isDebugEnabled()) { + if (context.keySet().containsAll(this.requiredFields)) { + return eval(context) ? 1 : 0; + } else { + if (LOG.isDebugEnabled()) { LOG.debug("Require variables: [" + StringUtils.join(this.requiredFields, ",") + "], but just given: [" + StringUtils.join(context.keySet(), ",") + "]"); } return 0; } } catch (Exception e) { - LOG.error(e.getMessage(),e); - throw new ExpressionEvaluationException(e.getMessage(),e); + LOG.error(e.getMessage(), e); + throw new ExpressionEvaluationException(e.getMessage(), e); } } @@ -216,31 +219,31 @@ public void readFields(DataInput in) throws IOException { try { this.init(); - } catch (Exception ex){ - LOG.error("Got exception: "+ex.getMessage(),ex); - throw new IOException(ex.getMessage(),ex); + } catch (Exception ex) { + LOG.error("Got exception: " + ex.getMessage(), ex); + throw new IOException(ex.getMessage(), ex); } } - private static Map _opExprFuncMap = new HashMap(); + private static Map _opExprFuncMap = new HashMap(); static { - _opExprFuncMap.put(ComparisonOperator.EQUAL,new EqualExprFunc()); - _opExprFuncMap.put(ComparisonOperator.IS,new EqualExprFunc()); + _opExprFuncMap.put(ComparisonOperator.EQUAL, new EqualExprFunc()); + _opExprFuncMap.put(ComparisonOperator.IS, new EqualExprFunc()); - _opExprFuncMap.put(ComparisonOperator.NOT_EQUAL,new NotEqualExprFunc()); - _opExprFuncMap.put(ComparisonOperator.IS_NOT,new NotEqualExprFunc()); + _opExprFuncMap.put(ComparisonOperator.NOT_EQUAL, new NotEqualExprFunc()); + _opExprFuncMap.put(ComparisonOperator.IS_NOT, new NotEqualExprFunc()); - _opExprFuncMap.put(ComparisonOperator.LESS,new LessExprFunc()); - _opExprFuncMap.put(ComparisonOperator.LESS_OR_EQUAL,new LessOrEqualExprFunc()); - _opExprFuncMap.put(ComparisonOperator.GREATER,new GreaterExprFunc()); - _opExprFuncMap.put(ComparisonOperator.GREATER_OR_EQUAL,new GreaterOrEqualExprFunc()); + _opExprFuncMap.put(ComparisonOperator.LESS, new LessExprFunc()); + _opExprFuncMap.put(ComparisonOperator.LESS_OR_EQUAL, new LessOrEqualExprFunc()); + _opExprFuncMap.put(ComparisonOperator.GREATER, new GreaterExprFunc()); + _opExprFuncMap.put(ComparisonOperator.GREATER_OR_EQUAL, new GreaterOrEqualExprFunc()); // "Life should be much better with functional programming language" - Hao Chen Nov 18th, 2014 } private static interface BooleanExprFunc { - boolean eval(double val1,double val2); + boolean eval(double val1, double val2); } private static class EqualExprFunc implements BooleanExprFunc { @@ -249,6 +252,7 @@ public boolean eval(double val1, double val2) { return val1 == val2; } } + private static class NotEqualExprFunc implements BooleanExprFunc { @Override public boolean eval(double val1, double val2) { @@ -262,18 +266,21 @@ public boolean eval(double val1, double val2) { return val1 < val2; } } + private static class LessOrEqualExprFunc implements BooleanExprFunc { @Override public boolean eval(double val1, double val2) { return val1 <= val2; } } + private static class GreaterExprFunc implements BooleanExprFunc { @Override public boolean eval(double val1, double val2) { return val1 > val2; } } + private static class GreaterOrEqualExprFunc implements BooleanExprFunc { @Override public boolean eval(double val1, double val2) { @@ -281,13 +288,15 @@ public boolean eval(double val1, double val2) { } } - public static class ExpressionEvaluationException extends RuntimeException{ + public static class ExpressionEvaluationException extends RuntimeException { public ExpressionEvaluationException(String message, Throwable cause) { super(message, cause); } + public ExpressionEvaluationException(String message) { super(message); } + public ExpressionEvaluationException(Throwable cause) { super(cause); } @@ -295,6 +304,6 @@ public ExpressionEvaluationException(Throwable cause) { @Override public String toString() { - return this.getClass().getSimpleName()+" ("+this.filterEntity.toString()+")"; + return this.getClass().getSimpleName() + " (" + this.filterEntity.toString() + ")"; } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/HBaseFilterBuilder.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/HBaseFilterBuilder.java index 82094452d8..e6cc87753a 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/HBaseFilterBuilder.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/HBaseFilterBuilder.java @@ -16,11 +16,11 @@ */ package org.apache.eagle.log.entity.filter; +import org.apache.eagle.common.ByteUtil; import org.apache.eagle.common.config.EagleConfigFactory; import org.apache.eagle.log.entity.EntityQualifierUtils; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.Qualifier; -import org.apache.eagle.common.ByteUtil; import org.apache.eagle.query.parser.*; import org.apache.hadoop.hbase.filter.*; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; @@ -37,549 +37,544 @@ * the steps of building hbase filters * 1. receive ORExpression from eagle-antlr * 2. iterate all ANDExpression in ORExpression - * 2.1 put each ANDExpression to a new filter list with MUST_PASS_ONE option - * 2.2 iterate all AtomicExpression in ANDExpression - * 2.2.1 group AtomicExpression into 2 groups by looking up metadata, one is for tag filters, the other is for column filters - * 2.2.2 put the above 2 filters to a filter list with MUST_PASS_ALL option + * 2.1 put each ANDExpression to a new filter list with MUST_PASS_ONE option + * 2.2 iterate all AtomicExpression in ANDExpression + * 2.2.1 group AtomicExpression into 2 groups by looking up metadata, one is for tag filters, the other is for column filters + * 2.2.2 put the above 2 filters to a filter list with MUST_PASS_ALL option */ public class HBaseFilterBuilder { - private static final Logger LOG = LoggerFactory.getLogger(HBaseFilterBuilder.class); - - /** - * syntax is @ - */ -// private static final String fnRegex = "^@(.*)$"; - private static final Pattern _fnPattern = TokenConstant.ID_PATTERN;// Pattern.compile(fnRegex); - private static final Charset _defaultCharset = Charset.forName("ISO-8859-1"); - - private ORExpression _orExpr; - private EntityDefinition _ed; - private boolean _filterIfMissing; - private Charset _charset = _defaultCharset; - - /** - * TODO: Verify performance impact - * - * @return - */ - public Set getFilterFields() { - return _filterFields; - } - - /** - * Just add filter fields for expression filter - */ - private Set _filterFields; - - public HBaseFilterBuilder(EntityDefinition ed, ORExpression orExpr) { - this(ed, orExpr, false); - } - - public HBaseFilterBuilder(EntityDefinition ed, ORExpression orExpr, boolean filterIfMissing) { - this._ed = ed; - this._orExpr = orExpr; - this._filterIfMissing = filterIfMissing; - } - - public void setCharset(String charsetName){ - _charset = Charset.forName(charsetName); - } - - public Charset getCharset(){ - return _charset; - } - - /** - * Because we don't have metadata for tag, we regard non-qualifer field as tag. So one field possibly is not a real tag when this function return true. This happens - * when a user input an wrong field name which is neither tag or qualifier - * - * @param field - */ - private boolean isTag(String field){ - return _ed.isTag(field); - } - - /** - * check whether this field is one entity attribute or not - * @param fieldName - * @return - */ - private String parseEntityAttribute(String fieldName){ - Matcher m = _fnPattern.matcher(fieldName); - if(m.find()){ - return m.group(1); - } - return null; - } - - /** - * Return the partition values for each or expression. The size of the returned list should be equal to - * the size of FilterList that {@link #buildFilters()} returns. - * - * TODO: For now we don't support one query to query multiple partitions. In future if partition is defined, - * for the entity, internally We need to spawn multiple queries and send one query for each partition. - * - * @return Return the partition values for each or expression. Return null if the entity doesn't support - * partition - */ - public List getPartitionValues() { - final String[] partitions = _ed.getPartitions(); - if (partitions == null || partitions.length == 0) { - return null; - } - final List result = new ArrayList(); - final Map partitionKeyValueMap = new HashMap(); - for(ANDExpression andExpr : _orExpr.getANDExprList()) { - partitionKeyValueMap.clear(); - for(AtomicExpression ae : andExpr.getAtomicExprList()) { - // TODO temporarily ignore those fields which are not for attributes - if(ae.getKeyType() == TokenType.ID) { - final String fieldName = parseEntityAttribute(ae.getKey()); - if (fieldName == null) { - LOG.warn(fieldName + " field does not have format @, ignored"); - continue; - } - if (_ed.isPartitionTag(fieldName) && ComparisonOperator.EQUAL.equals(ae.getOp())) { - final String value = ae.getValue(); - partitionKeyValueMap.put(fieldName, value); - } - } - } - final String[] values = new String[partitions.length]; - result.add(values); - for (int i = 0; i < partitions.length; ++i) { - final String partition = partitions[i]; - final String value = partitionKeyValueMap.get(partition); - values[i] = value; - } - } - return result; - } - - /** - * @see org.apache.eagle.query.parser.TokenType - * - * @return - */ - public FilterList buildFilters(){ - // TODO: Optimize to select between row filter or column filter for better performance - // Use row key filter priority by default - boolean rowFilterPriority = true; - - FilterList fltList = new FilterList(Operator.MUST_PASS_ONE); - for(ANDExpression andExpr : _orExpr.getANDExprList()){ - - FilterList list = new FilterList(Operator.MUST_PASS_ALL); - Map> tagFilters = new HashMap>(); - List qualifierFilters = new ArrayList(); -// List tagLikeQualifierFilters = new ArrayList(); - - // TODO refactor not to use too much if/else - for(AtomicExpression ae : andExpr.getAtomicExprList()){ - // TODO temporarily ignore those fields which are not for attributes - - String fieldName = ae.getKey(); - if(ae.getKeyType() == TokenType.ID){ - fieldName = parseEntityAttribute(fieldName); - if(fieldName == null){ - LOG.warn(fieldName + " field does not have format @, ignored"); - continue; - } - } - - String value = ae.getValue(); - ComparisonOperator op = ae.getOp(); - TokenType keyType = ae.getKeyType(); - TokenType valueType = ae.getValueType(); - QualifierFilterEntity entry = new QualifierFilterEntity(fieldName,value,op,keyType,valueType); - - // TODO Exact match, need to add escape for those special characters here, including: - // "-", "[", "]", "/", "{", "}", "(", ")", "*", "+", "?", ".", "\\", "^", "$", "|" - - if(keyType == TokenType.ID && isTag(fieldName)){ - if ((ComparisonOperator.EQUAL.equals(op) || ComparisonOperator.IS.equals(op)) - && !TokenType.NULL.equals(valueType)) - { - // Use RowFilter for equal TAG - if(tagFilters.get(fieldName) == null) tagFilters.put(fieldName, new ArrayList()); - tagFilters.get(fieldName).add(value); - } else if (rowFilterPriority && ComparisonOperator.IN.equals(op)) - { - // Use RowFilter here by default - if(tagFilters.get(fieldName) == null) tagFilters.put(fieldName, new ArrayList()); - tagFilters.get(fieldName).addAll(EntityQualifierUtils.parseList(value)); - } else if (ComparisonOperator.LIKE.equals(op) - || ComparisonOperator.NOT_LIKE.equals(op) - || ComparisonOperator.CONTAINS.equals(op) - || ComparisonOperator.NOT_CONTAINS.equals(op) - || ComparisonOperator.IN.equals(op) - || ComparisonOperator.IS.equals(op) - || ComparisonOperator.IS_NOT.equals(op) - || ComparisonOperator.NOT_EQUAL.equals(op) - || ComparisonOperator.EQUAL.equals(op) - || ComparisonOperator.NOT_IN.equals(op)) - { - qualifierFilters.add(entry); - } else - { - LOG.warn("Don't support operation: \"" + op + "\" on tag field: " + fieldName + " yet, going to ignore"); - throw new IllegalArgumentException("Don't support operation: "+op+" on tag field: "+fieldName+", avaliable options: =, =!, =~, !=~, in, not in, contains, not contains"); - } - }else{ - qualifierFilters.add(entry); - } - } - - // Build RowFilter for equal tags - list.addFilter(buildTagFilter(tagFilters)); - - // Build SingleColumnValueFilter - FilterList qualifierFilterList = buildQualifierFilter(qualifierFilters); - if(qualifierFilterList != null && qualifierFilterList.getFilters().size()>0){ - list.addFilter(qualifierFilterList); - }else { - if(LOG.isDebugEnabled()) LOG.debug("Ignore empty qualifier filter from "+qualifierFilters.toString()); - } - fltList.addFilter(list); - } - LOG.info("Query: " + _orExpr.toString() + " => Filter: " + fltList.toString()); - return fltList; - } - - /** - * _charset is used to decode the byte array, in hbase server, RegexStringComparator uses the same - * charset to decode the byte array stored in qualifier - * for tag filter regex, it's always ISO-8859-1 as it only comes from String's hashcode (Integer) - * Note: regex comparasion is to compare String - */ - protected Filter buildTagFilter(Map> tagFilters){ - RegexStringComparator regexStringComparator = new RegexStringComparator(buildTagFilterRegex(tagFilters)); - regexStringComparator.setCharset(_charset); - RowFilter filter = new RowFilter(CompareOp.EQUAL, regexStringComparator); - return filter; - } - - /** - * all qualifiers' condition must be satisfied. - * - *

Use RegexStringComparator for:

- * IN - * LIKE - * NOT_LIKE - * - *

Use SubstringComparator for:

- * CONTAINS - * - *

Use EntityQualifierHelper for:

- * EQUALS - * NOT_EUQALS - * LESS - * LESS_OR_EQUAL - * GREATER - * GREATER_OR_EQUAL - * - *

- * TODO: Compare performance of RegexStringComparator ,SubstringComparator ,EntityQualifierHelper - *

- * - * @param qualifierFilters - * @return - */ - protected FilterList buildQualifierFilter(List qualifierFilters){ - FilterList list = new FilterList(Operator.MUST_PASS_ALL); - // iterate all the qualifiers - for(QualifierFilterEntity entry : qualifierFilters){ - // if contains expression based filter - if(entry.getKeyType() == TokenType.EXP - || entry.getValueType() == TokenType.EXP - || entry.getKeyType() != TokenType.ID){ - if(!EagleConfigFactory.load().isCoprocessorEnabled()) { - LOG.warn("Expression in filter may not support, because custom filter and coprocessor is disabled: " + entry.toString()); - } - list.addFilter(buildExpressionBasedFilter(entry)); - continue; - } - - // else using SingleColumnValueFilter - String qualifierName = entry.getKey(); - if(!isTag(entry.getKey())){ - Qualifier qualifier = _ed.getDisplayNameMap().get(entry.getKey()); - qualifierName = qualifier.getQualifierName(); - } - - // Comparator to be used for building HBase Filter - // WritableByteArrayComparable comparator; + private static final Logger LOG = LoggerFactory.getLogger(HBaseFilterBuilder.class); + + // private static final String fnRegex = "^@(.*)$"; + private static final Pattern _fnPattern = TokenConstant.ID_PATTERN;// Pattern.compile(fnRegex); + private static final Charset _defaultCharset = Charset.forName("ISO-8859-1"); + + private ORExpression orExpr; + private EntityDefinition ed; + private boolean filterIfMissing; + private Charset charset = _defaultCharset; + + /** + * get filter fields. + * TODO: Verify performance impact + * + * @return + */ + public Set getFilterFields() { + return filterFields; + } + + /** + * Just add filter fields for expression filter. + */ + private Set filterFields; + + public HBaseFilterBuilder(EntityDefinition ed, ORExpression orExpr) { + this(ed, orExpr, false); + } + + public HBaseFilterBuilder(EntityDefinition ed, ORExpression orExpr, boolean filterIfMissing) { + this.ed = ed; + this.orExpr = orExpr; + this.filterIfMissing = filterIfMissing; + } + + public void setCharset(String charsetName) { + charset = Charset.forName(charsetName); + } + + public Charset getCharset() { + return charset; + } + + /** + * Because we don't have metadata for tag, we regard non-qualifer field as tag. So one field possibly is not a real tag when this function return true. This happens + * when a user input an wrong field name which is neither tag or qualifier. + * + * @param field + */ + private boolean isTag(String field) { + return ed.isTag(field); + } + + /** + * check whether this field is one entity attribute or not. + * + * @param fieldName + * @return + */ + private String parseEntityAttribute(String fieldName) { + Matcher m = _fnPattern.matcher(fieldName); + if (m.find()) { + return m.group(1); + } + return null; + } + + /** + * Return the partition values for each or expression. The size of the returned list should be equal to + * the size of FilterList that {@link #buildFilters()} returns. + * + *

TODO: For now we don't support one query to query multiple partitions. In future if partition is defined, + * for the entity, internally We need to spawn multiple queries and send one query for each partition. + * + * @return Return the partition values for each or expression. Return null if the entity doesn't support partition + */ + public List getPartitionValues() { + final String[] partitions = ed.getPartitions(); + if (partitions == null || partitions.length == 0) { + return null; + } + final List result = new ArrayList(); + final Map partitionKeyValueMap = new HashMap(); + for (ANDExpression andExpr : orExpr.getANDExprList()) { + partitionKeyValueMap.clear(); + for (AtomicExpression ae : andExpr.getAtomicExprList()) { + // TODO temporarily ignore those fields which are not for attributes + if (ae.getKeyType() == TokenType.ID) { + final String fieldName = parseEntityAttribute(ae.getKey()); + if (fieldName == null) { + LOG.warn(fieldName + " field does not have format @, ignored"); + continue; + } + if (ed.isPartitionTag(fieldName) && ComparisonOperator.EQUAL.equals(ae.getOp())) { + final String value = ae.getValue(); + partitionKeyValueMap.put(fieldName, value); + } + } + } + final String[] values = new String[partitions.length]; + result.add(values); + for (int i = 0; i < partitions.length; ++i) { + final String partition = partitions[i]; + final String value = partitionKeyValueMap.get(partition); + values[i] = value; + } + } + return result; + } + + /** + * @return + * @see org.apache.eagle.query.parser.TokenType + */ + public FilterList buildFilters() { + // TODO: Optimize to select between row filter or column filter for better performance + // Use row key filter priority by default + boolean rowFilterPriority = true; + + FilterList fltList = new FilterList(Operator.MUST_PASS_ONE); + for (ANDExpression andExpr : orExpr.getANDExprList()) { + + FilterList list = new FilterList(Operator.MUST_PASS_ALL); + Map> tagFilters = new HashMap>(); + List qualifierFilters = new ArrayList(); + // List tagLikeQualifierFilters = new ArrayList(); + + // TODO refactor not to use too much if/else + for (AtomicExpression ae : andExpr.getAtomicExprList()) { + // TODO temporarily ignore those fields which are not for attributes + + String fieldName = ae.getKey(); + if (ae.getKeyType() == TokenType.ID) { + fieldName = parseEntityAttribute(fieldName); + if (fieldName == null) { + LOG.warn(fieldName + " field does not have format @, ignored"); + continue; + } + } + + String value = ae.getValue(); + ComparisonOperator op = ae.getOp(); + TokenType keyType = ae.getKeyType(); + TokenType valueType = ae.getValueType(); + QualifierFilterEntity entry = new QualifierFilterEntity(fieldName, value, op, keyType, valueType); + + // TODO Exact match, need to add escape for those special characters here, including: + // "-", "[", "]", "/", "{", "}", "(", ")", "*", "+", "?", ".", "\\", "^", "$", "|" + + if (keyType == TokenType.ID && isTag(fieldName)) { + if ((ComparisonOperator.EQUAL.equals(op) || ComparisonOperator.IS.equals(op)) + && !TokenType.NULL.equals(valueType)) { + // Use RowFilter for equal TAG + if (tagFilters.get(fieldName) == null) { + tagFilters.put(fieldName, new ArrayList()); + } + tagFilters.get(fieldName).add(value); + } else if (rowFilterPriority && ComparisonOperator.IN.equals(op)) { + // Use RowFilter here by default + if (tagFilters.get(fieldName) == null) { + tagFilters.put(fieldName, new ArrayList()); + } + tagFilters.get(fieldName).addAll(EntityQualifierUtils.parseList(value)); + } else if (ComparisonOperator.LIKE.equals(op) + || ComparisonOperator.NOT_LIKE.equals(op) + || ComparisonOperator.CONTAINS.equals(op) + || ComparisonOperator.NOT_CONTAINS.equals(op) + || ComparisonOperator.IN.equals(op) + || ComparisonOperator.IS.equals(op) + || ComparisonOperator.IS_NOT.equals(op) + || ComparisonOperator.NOT_EQUAL.equals(op) + || ComparisonOperator.EQUAL.equals(op) + || ComparisonOperator.NOT_IN.equals(op)) { + qualifierFilters.add(entry); + } else { + LOG.warn("Don't support operation: \"" + op + "\" on tag field: " + fieldName + " yet, going to ignore"); + throw new IllegalArgumentException("Don't support operation: " + op + " on tag field: " + fieldName + ", avaliable options: =, =!, =~, !=~, in, not in, contains, not " + + "contains"); + } + } else { + qualifierFilters.add(entry); + } + } + + // Build RowFilter for equal tags + list.addFilter(buildTagFilter(tagFilters)); + + // Build SingleColumnValueFilter + FilterList qualifierFilterList = buildQualifierFilter(qualifierFilters); + if (qualifierFilterList != null && qualifierFilterList.getFilters().size() > 0) { + list.addFilter(qualifierFilterList); + } else { + if (LOG.isDebugEnabled()) { + LOG.debug("Ignore empty qualifier filter from " + qualifierFilters.toString()); + } + } + fltList.addFilter(list); + } + LOG.info("Query: " + orExpr.toString() + " => Filter: " + fltList.toString()); + return fltList; + } + + /** + * _charset is used to decode the byte array, in hbase server, RegexStringComparator uses the same + * charset to decode the byte array stored in qualifier + * for tag filter regex, it's always ISO-8859-1 as it only comes from String's hashcode (Integer) + * Note: regex comparasion is to compare String. + */ + protected Filter buildTagFilter(Map> tagFilters) { + RegexStringComparator regexStringComparator = new RegexStringComparator(buildTagFilterRegex(tagFilters)); + regexStringComparator.setCharset(charset); + RowFilter filter = new RowFilter(CompareOp.EQUAL, regexStringComparator); + return filter; + } + + /** + * all qualifiers' condition must be satisfied. + *

Use RegexStringComparator for:

+ * IN + * LIKE + * NOT_LIKE + *

Use SubstringComparator for:

+ * CONTAINS + *

Use EntityQualifierHelper for:

+ * EQUALS + * NOT_EUQALS + * LESS + * LESS_OR_EQUAL + * GREATER + * GREATER_OR_EQUAL + *

+ * TODO: Compare performance of RegexStringComparator ,SubstringComparator ,EntityQualifierHelper + *

+ * + * @param qualifierFilters + * @return + */ + protected FilterList buildQualifierFilter(List qualifierFilters) { + FilterList list = new FilterList(Operator.MUST_PASS_ALL); + // iterate all the qualifiers + for (QualifierFilterEntity entry : qualifierFilters) { + // if contains expression based filter + if (entry.getKeyType() == TokenType.EXP + || entry.getValueType() == TokenType.EXP + || entry.getKeyType() != TokenType.ID) { + if (!EagleConfigFactory.load().isCoprocessorEnabled()) { + LOG.warn("Expression in filter may not support, because custom filter and coprocessor is disabled: " + entry.toString()); + } + list.addFilter(buildExpressionBasedFilter(entry)); + continue; + } + + // else using SingleColumnValueFilter + String qualifierName = entry.getKey(); + if (!isTag(entry.getKey())) { + Qualifier qualifier = ed.getDisplayNameMap().get(entry.getKey()); + qualifierName = qualifier.getQualifierName(); + } + + // Comparator to be used for building HBase Filter + // WritableByteArrayComparable comparator; ByteArrayComparable comparable; - if(ComparisonOperator.IN.equals(entry.getOp()) - || ComparisonOperator.NOT_IN.equals(entry.getOp())){ - Filter setFilter = buildListQualifierFilter(entry); - if(setFilter!=null){ - list.addFilter(setFilter); - } - }else{ - // If [=,!=,is,is not] NULL, use NullComparator else throw exception - if(TokenType.NULL.equals(entry.getValueType())){ - if(ComparisonOperator.EQUAL.equals(entry.getOp()) - ||ComparisonOperator.NOT_EQUAL.equals(entry.getOp()) - ||ComparisonOperator.IS.equals(entry.getOp()) - ||ComparisonOperator.IS_NOT.equals(entry.getOp())) + if (ComparisonOperator.IN.equals(entry.getOp()) + || ComparisonOperator.NOT_IN.equals(entry.getOp())) { + Filter setFilter = buildListQualifierFilter(entry); + if (setFilter != null) { + list.addFilter(setFilter); + } + } else { + // If [=,!=,is,is not] NULL, use NullComparator else throw exception + if (TokenType.NULL.equals(entry.getValueType())) { + if (ComparisonOperator.EQUAL.equals(entry.getOp()) + || ComparisonOperator.NOT_EQUAL.equals(entry.getOp()) + || ComparisonOperator.IS.equals(entry.getOp()) + || ComparisonOperator.IS_NOT.equals(entry.getOp())) { comparable = new NullComparator(); - else - throw new IllegalArgumentException("Operation: "+entry.getOp()+" with NULL is not supported yet: "+entry.toString()+", avaliable options: [=, !=, is, is not] null|NULL"); - } - // If [contains, not contains],use SubstringComparator - else if (ComparisonOperator.CONTAINS.equals(entry.getOp()) - || ComparisonOperator.NOT_CONTAINS.equals(entry.getOp())) { + } else { + throw new IllegalArgumentException("Operation: " + entry.getOp() + " with NULL is not supported yet: " + entry.toString() + ", avaliable options: [=, !=, is, is not] " + + "null|NULL"); + } + } else if (ComparisonOperator.CONTAINS.equals(entry.getOp()) // If [contains, not contains],use SubstringComparator + || ComparisonOperator.NOT_CONTAINS.equals(entry.getOp())) { comparable = new SubstringComparator(entry.getValue()); - } - // If [like, not like], use RegexStringComparator - else if (ComparisonOperator.LIKE.equals(entry.getOp()) - || ComparisonOperator.NOT_LIKE.equals(entry.getOp())){ - // Use RegexStringComparator for LIKE / NOT_LIKE - RegexStringComparator _comparator = new RegexStringComparator(buildQualifierRegex(entry.getValue())); - _comparator.setCharset(_charset); + } else if (ComparisonOperator.LIKE.equals(entry.getOp()) // If [like, not like], use RegexStringComparator + || ComparisonOperator.NOT_LIKE.equals(entry.getOp())) { + // Use RegexStringComparator for LIKE / NOT_LIKE + RegexStringComparator _comparator = new RegexStringComparator(buildQualifierRegex(entry.getValue())); + _comparator.setCharset(charset); comparable = _comparator; - } else{ - Class type = EntityQualifierUtils.getType(_ed, entry.getKey()); - // if type is null (is Tag or not found) or not defined for TypedByteArrayComparator - if(!EagleConfigFactory.load().isCoprocessorEnabled() || type == null || TypedByteArrayComparator.get(type) == null){ - comparable = new BinaryComparator(EntityQualifierUtils.toBytes(_ed, entry.getKey(), entry.getValue())); - }else { - comparable = new TypedByteArrayComparator(EntityQualifierUtils.toBytes(_ed, entry.getKey(), entry.getValue()),type); - } - } - - SingleColumnValueFilter filter = - new SingleColumnValueFilter(_ed.getColumnFamily().getBytes(), qualifierName.getBytes(), convertToHBaseCompareOp(entry.getOp()), comparable); - filter.setFilterIfMissing(_filterIfMissing); - list.addFilter(filter); - } - } - - return list; - } - - private Filter buildExpressionBasedFilter(QualifierFilterEntity entry) { - BooleanExpressionComparator expressionComparator = new BooleanExpressionComparator(entry,_ed); - _filterFields = expressionComparator.getRequiredFields(); - RowValueFilter filter = new RowValueFilter(expressionComparator); - return filter; - } - - /** - * Currently use BinaryComparator only - *

TODO:

- * Possibility to tune performance by using: OR[BinaryComparator,...] instead of RegexStringComparator? - * - *

- * - * ! Check op must be IN or NOTIN in caller - * - * @param entry - * @return - */ - private Filter buildListQualifierFilter(QualifierFilterEntity entry){ - List valueSet = EntityQualifierUtils.parseList(entry.getValue()); - Iterator it = valueSet.iterator(); - String fieldName = entry.getKey(); - String qualifierName = fieldName; - if(!_ed.isTag(entry.getKey())){ - qualifierName = _ed.getDisplayNameMap().get(entry.getKey()).getQualifierName(); - } - -// TODO: Try to use RegExp just work if possible -// Because single SingleColumnValueFilter is much faster than multi SingleColumnValueFilters in OR list. -// Class qualifierType = EntityQualifierHelper.getType(_ed,fieldName); -// if( qualifierType == null || qualifierType == String.class){ -// boolean first = true; -// StringBuilder filterRegex = new StringBuilder(); -// filterRegex.append("^("); -// while(it.hasNext()) { -// String value = it.next(); -// if(value == null) { -// logger.warn("ignore empty value in set qualifier filter: "+entry.toString()); -// continue; -// } -// if(!first) filterRegex.append("|"); -// filterRegex.append(value); -// first = false; -// } -// filterRegex.append(")$"); -// RegexStringComparator regexStringComparator = new RegexStringComparator(filterRegex.toString()); -// return new SingleColumnValueFilter(_ed.getColumnFamily().getBytes(), qualifierName.getBytes(), -// convertToHBaseCompareOp(entry.getOp()), regexStringComparator); -// }else{ - FilterList setFilterList; - if(ComparisonOperator.IN.equals(entry.getOp())){ - setFilterList = new FilterList(Operator.MUST_PASS_ONE); - }else if(ComparisonOperator.NOT_IN.equals(entry.getOp())) { - setFilterList = new FilterList(Operator.MUST_PASS_ALL); - }else{ - throw new IllegalArgumentException(String.format("Don't support operation: %s on LIST type of value yet: %s, valid options: IN/NOT IN [LIST]",entry.getOp(),entry.toString())); - } - - while(it.hasNext()) { - String value = it.next(); - BinaryComparator comparator = new BinaryComparator(EntityQualifierUtils.toBytes(_ed, fieldName, value)); - SingleColumnValueFilter filter = - new SingleColumnValueFilter(_ed.getColumnFamily().getBytes(), qualifierName.getBytes(), convertToHBaseCompareOp(entry.getOp()), comparator); - filter.setFilterIfMissing(_filterIfMissing); - setFilterList.addFilter(filter); - } - - return setFilterList; -// } - } - - /** - * Just used for LIKE and NOT_LIKE - * - * @param qualifierValue - * @return - */ - protected String buildQualifierRegex(String qualifierValue){ - StringBuilder sb = new StringBuilder(); -// sb.append("(?s)"); - sb.append("^"); - sb.append(qualifierValue); - sb.append("$"); - return sb.toString(); - } - - /** - * Appends the given ID to the given buffer, followed by "\\E". - * [steal it from opentsdb, thanks opentsdb :) https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java] - */ - private static void addId(final StringBuilder buf, final byte[] id) { - buf.append("\\Q"); - boolean backslash = false; - for (final byte b : id) { - buf.append((char) (b & 0xFF)); - if (b == 'E' && backslash) { // If we saw a `\' and now we have a `E'. - // So we just terminated the quoted section because we just added \E - // to `buf'. So let's put a litteral \E now and start quoting again. - buf.append("\\\\E\\Q"); - } else { - backslash = b == '\\'; - } - } - buf.append("\\E"); - } - - @SuppressWarnings("unused") - private static void addId(final StringBuilder buf, final String id) { - buf.append("\\Q"); - int len = id.length()-1; - boolean backslash = false; - for (int i =0; i < len; i++) { - char c = id.charAt(i); - buf.append(c); - if (c == 'E' && backslash) { // If we saw a `\' and now we have a `E'. - // So we just terminated the quoted section because we just added \E - // to `buf'. So let's put a litteral \E now and start quoting again. - buf.append("\\\\E\\Q"); - } else { - backslash = c == '\\'; - } - } - buf.append("\\E"); - } - - /** - * one search tag may have multiple values which have OR relationship, and relationship between - * different search tags is AND - * the query is like "(TAG1=value11 OR TAG1=value12) AND TAG2=value2" - * @param tags - * @return - */ - protected String buildTagFilterRegex(Map> tags){ - // TODO need consider that \E could be part of tag, refer to https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java - final SortedMap> tagHash = new TreeMap>(); - final int numOfPartitionFields = (_ed.getPartitions() == null) ? 0 : _ed.getPartitions().length; - for(Map.Entry> entry : tags.entrySet()){ - String tagName = entry.getKey(); - // Ignore tag if the tag is one of partition fields - if (_ed.isPartitionTag(tagName)) { - continue; - } - List stringValues = entry.getValue(); - List hashValues = new ArrayList(stringValues.size()); - for(String value : stringValues){ - hashValues.add(value.hashCode()); - } - tagHash.put(tagName.hashCode(), hashValues); - } - - // header = prefix(4 bytes) + partition_hashes(4*N bytes) + timestamp (8 bytes) - final int headerLength = 4 + numOfPartitionFields * 4 + 8; - - // ... - StringBuilder sb = new StringBuilder(); - sb.append("(?s)"); - sb.append("^(?:.{").append(headerLength).append("})"); - sb.append("(?:.{").append(8).append("})*"); // for any number of tags - for (Map.Entry> entry : tagHash.entrySet()) { - try { - addId(sb, ByteUtil.intToBytes(entry.getKey())); - List hashValues = entry.getValue(); - sb.append("(?:"); - boolean first = true; - for(Integer value : hashValues){ - if(!first){ - sb.append('|'); - } - addId(sb, ByteUtil.intToBytes(value)); - first = false; - } - sb.append(")"); - sb.append("(?:.{").append(8).append("})*"); // for any number of tags - } catch (Exception ex) { - LOG.error("constructing regex error", ex); - } - } - sb.append("$"); - if(LOG.isDebugEnabled()) LOG.debug("Tag filter pattern is " + sb.toString()); - return sb.toString(); - } - - /** - * Convert ComparisonOperator to native HBase CompareOp - * - * Support: - * =, =~,CONTAINS,<,<=,>,>=,!=,!=~ - * - * @param comp - * @return - */ - protected static CompareOp convertToHBaseCompareOp(ComparisonOperator comp) { - if(comp == ComparisonOperator.EQUAL || comp == ComparisonOperator.LIKE - || comp == ComparisonOperator.CONTAINS - || comp == ComparisonOperator.IN - || comp == ComparisonOperator.IS - ) { - return CompareOp.EQUAL; - }else if(comp == ComparisonOperator.LESS) { - return CompareOp.LESS; - } else if(comp == ComparisonOperator.LESS_OR_EQUAL){ - return CompareOp.LESS_OR_EQUAL; - }else if(comp == ComparisonOperator.GREATER) { - return CompareOp.GREATER; - } else if(comp == ComparisonOperator.GREATER_OR_EQUAL){ - return CompareOp.GREATER_OR_EQUAL; - } else if(comp == ComparisonOperator.NOT_EQUAL - || comp == ComparisonOperator.NOT_LIKE - || comp == ComparisonOperator.NOT_CONTAINS - || comp == ComparisonOperator.IS_NOT - || comp == ComparisonOperator.NOT_IN) - { - return CompareOp.NOT_EQUAL; - } else { - LOG.error("{} operation is not supported now\n", comp); - throw new IllegalArgumentException("Illegal operation: "+comp+ ", avaliable options: "+ Arrays.toString(ComparisonOperator.values())); - } - } - - protected static CompareOp getHBaseCompareOp(String comp) { - return convertToHBaseCompareOp(ComparisonOperator.locateOperator(comp)); - } + } else { + Class type = EntityQualifierUtils.getType(ed, entry.getKey()); + // if type is null (is Tag or not found) or not defined for TypedByteArrayComparator + if (!EagleConfigFactory.load().isCoprocessorEnabled() || type == null || TypedByteArrayComparator.get(type) == null) { + comparable = new BinaryComparator(EntityQualifierUtils.toBytes(ed, entry.getKey(), entry.getValue())); + } else { + comparable = new TypedByteArrayComparator(EntityQualifierUtils.toBytes(ed, entry.getKey(), entry.getValue()), type); + } + } + + SingleColumnValueFilter filter = + new SingleColumnValueFilter(ed.getColumnFamily().getBytes(), qualifierName.getBytes(), convertToHBaseCompareOp(entry.getOp()), comparable); + filter.setFilterIfMissing(filterIfMissing); + list.addFilter(filter); + } + } + + return list; + } + + private Filter buildExpressionBasedFilter(QualifierFilterEntity entry) { + BooleanExpressionComparator expressionComparator = new BooleanExpressionComparator(entry, ed); + filterFields = expressionComparator.getRequiredFields(); + RowValueFilter filter = new RowValueFilter(expressionComparator); + return filter; + } + + /** + * Currently use BinaryComparator only + *

TODO:

+ * Possibility to tune performance by using: OR[BinaryComparator,...] instead of RegexStringComparator? + *
+ * + *

! Check op must be IN or NOTIN in caller + * + * @param entry + * @return + */ + private Filter buildListQualifierFilter(QualifierFilterEntity entry) { + List valueSet = EntityQualifierUtils.parseList(entry.getValue()); + Iterator it = valueSet.iterator(); + String fieldName = entry.getKey(); + String qualifierName = fieldName; + if (!ed.isTag(entry.getKey())) { + qualifierName = ed.getDisplayNameMap().get(entry.getKey()).getQualifierName(); + } + + // TODO: Try to use RegExp just work if possible + // Because single SingleColumnValueFilter is much faster than multi SingleColumnValueFilters in OR list. + /*Class qualifierType = EntityQualifierHelper.getType(_ed,fieldName); + if( qualifierType == null || qualifierType == String.class){ + boolean first = true; + StringBuilder filterRegex = new StringBuilder(); + filterRegex.append("^("); + while(it.hasNext()) { + String value = it.next(); + if(value == null) { + logger.warn("ignore empty value in set qualifier filter: "+entry.toString()); + continue; + } + if(!first) filterRegex.append("|"); + filterRegex.append(value); + first = false; + } + filterRegex.append(")$"); + RegexStringComparator regexStringComparator = new RegexStringComparator(filterRegex.toString()); + return new SingleColumnValueFilter(_ed.getColumnFamily().getBytes(), qualifierName.getBytes(), + convertToHBaseCompareOp(entry.getOp()), regexStringComparator); + }else{*/ + FilterList setFilterList; + if (ComparisonOperator.IN.equals(entry.getOp())) { + setFilterList = new FilterList(Operator.MUST_PASS_ONE); + } else if (ComparisonOperator.NOT_IN.equals(entry.getOp())) { + setFilterList = new FilterList(Operator.MUST_PASS_ALL); + } else { + throw new IllegalArgumentException(String.format("Don't support operation: %s on LIST type of value yet: %s, valid options: IN/NOT IN [LIST]", entry.getOp(), entry.toString())); + } + + while (it.hasNext()) { + String value = it.next(); + BinaryComparator comparator = new BinaryComparator(EntityQualifierUtils.toBytes(ed, fieldName, value)); + SingleColumnValueFilter filter = + new SingleColumnValueFilter(ed.getColumnFamily().getBytes(), qualifierName.getBytes(), convertToHBaseCompareOp(entry.getOp()), comparator); + filter.setFilterIfMissing(filterIfMissing); + setFilterList.addFilter(filter); + } + + return setFilterList; + // } + } + + /** + * Just used for LIKE and NOT_LIKE. + * + * @param qualifierValue + * @return + */ + protected String buildQualifierRegex(String qualifierValue) { + StringBuilder sb = new StringBuilder(); + // sb.append("(?s)"); + sb.append("^"); + sb.append(qualifierValue); + sb.append("$"); + return sb.toString(); + } + + /** + * Appends the given ID to the given buffer, followed by "\\E". + * [steal it from opentsdb, thanks opentsdb :) https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java]. + */ + private static void addId(final StringBuilder buf, final byte[] id) { + buf.append("\\Q"); + boolean backslash = false; + for (final byte b : id) { + buf.append((char) (b & 0xFF)); + if (b == 'E' && backslash) { // If we saw a `\' and now we have a `E'. + // So we just terminated the quoted section because we just added \E + // to `buf'. So let's put a litteral \E now and start quoting again. + buf.append("\\\\E\\Q"); + } else { + backslash = b == '\\'; + } + } + buf.append("\\E"); + } + + @SuppressWarnings("unused") + private static void addId(final StringBuilder buf, final String id) { + buf.append("\\Q"); + int len = id.length() - 1; + boolean backslash = false; + for (int i = 0; i < len; i++) { + char c = id.charAt(i); + buf.append(c); + if (c == 'E' && backslash) { // If we saw a `\' and now we have a `E'. + // So we just terminated the quoted section because we just added \E + // to `buf'. So let's put a litteral \E now and start quoting again. + buf.append("\\\\E\\Q"); + } else { + backslash = c == '\\'; + } + } + buf.append("\\E"); + } + + /** + * one search tag may have multiple values which have OR relationship, and relationship between + * different search tags is AND + * the query is like "(TAG1=value11 OR TAG1=value12) AND TAG2=value2". + * + * @param tags + * @return + */ + protected String buildTagFilterRegex(Map> tags) { + // TODO need consider that \E could be part of tag, refer to https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java + final SortedMap> tagHash = new TreeMap>(); + final int numOfPartitionFields = (ed.getPartitions() == null) ? 0 : ed.getPartitions().length; + for (Map.Entry> entry : tags.entrySet()) { + String tagName = entry.getKey(); + // Ignore tag if the tag is one of partition fields + if (ed.isPartitionTag(tagName)) { + continue; + } + List stringValues = entry.getValue(); + List hashValues = new ArrayList(stringValues.size()); + for (String value : stringValues) { + hashValues.add(value.hashCode()); + } + tagHash.put(tagName.hashCode(), hashValues); + } + + // header = prefix(4 bytes) + partition_hashes(4*N bytes) + timestamp (8 bytes) + final int headerLength = 4 + numOfPartitionFields * 4 + 8; + + // ... + StringBuilder sb = new StringBuilder(); + sb.append("(?s)"); + sb.append("^(?:.{").append(headerLength).append("})"); + sb.append("(?:.{").append(8).append("})*"); // for any number of tags + for (Map.Entry> entry : tagHash.entrySet()) { + try { + addId(sb, ByteUtil.intToBytes(entry.getKey())); + List hashValues = entry.getValue(); + sb.append("(?:"); + boolean first = true; + for (Integer value : hashValues) { + if (!first) { + sb.append('|'); + } + addId(sb, ByteUtil.intToBytes(value)); + first = false; + } + sb.append(")"); + sb.append("(?:.{").append(8).append("})*"); // for any number of tags + } catch (Exception ex) { + LOG.error("constructing regex error", ex); + } + } + sb.append("$"); + if (LOG.isDebugEnabled()) { + LOG.debug("Tag filter pattern is " + sb.toString()); + } + return sb.toString(); + } + + /** + * Convert ComparisonOperator to native HBase CompareOp. + * + *

Support: + * =, =~,CONTAINS,<,<=,>,>=,!=,!=~ + * + * @param comp + * @return + */ + protected static CompareOp convertToHBaseCompareOp(ComparisonOperator comp) { + if (comp == ComparisonOperator.EQUAL || comp == ComparisonOperator.LIKE + || comp == ComparisonOperator.CONTAINS + || comp == ComparisonOperator.IN + || comp == ComparisonOperator.IS + ) { + return CompareOp.EQUAL; + } else if (comp == ComparisonOperator.LESS) { + return CompareOp.LESS; + } else if (comp == ComparisonOperator.LESS_OR_EQUAL) { + return CompareOp.LESS_OR_EQUAL; + } else if (comp == ComparisonOperator.GREATER) { + return CompareOp.GREATER; + } else if (comp == ComparisonOperator.GREATER_OR_EQUAL) { + return CompareOp.GREATER_OR_EQUAL; + } else if (comp == ComparisonOperator.NOT_EQUAL + || comp == ComparisonOperator.NOT_LIKE + || comp == ComparisonOperator.NOT_CONTAINS + || comp == ComparisonOperator.IS_NOT + || comp == ComparisonOperator.NOT_IN) { + return CompareOp.NOT_EQUAL; + } else { + LOG.error("{} operation is not supported now\n", comp); + throw new IllegalArgumentException("Illegal operation: " + comp + ", avaliable options: " + Arrays.toString(ComparisonOperator.values())); + } + } + + protected static CompareOp getHBaseCompareOp(String comp) { + return convertToHBaseCompareOp(ComparisonOperator.locateOperator(comp)); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/QualifierFilterEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/QualifierFilterEntity.java index 6cdc77b0d2..e610aa4207 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/QualifierFilterEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/QualifierFilterEntity.java @@ -24,82 +24,85 @@ import java.io.DataOutput; import java.io.IOException; -public class QualifierFilterEntity implements Writable{ - public String key; - public String value; - public ComparisonOperator op; - public TokenType valueType; - public TokenType keyType; - - public QualifierFilterEntity(){} - public QualifierFilterEntity(String key, String value, ComparisonOperator comp, TokenType keyType, TokenType valueType) { - super(); - this.key = key; - this.value = value; - this.op = comp; - this.keyType = keyType; - this.valueType = valueType; - } - - public String getKey() { - return key; - } - - public void setKey(String key) { - this.key = key; - } - - public String getValue() { - return value; - } - - public void setValue(String value) { - this.value = value; - } - - public ComparisonOperator getOp() { - return op; - } - - public void setOp(ComparisonOperator op) { - this.op = op; - } - - public TokenType getValueType() { - return valueType; - } - - public void setValueType(TokenType valueType) { - this.valueType = valueType; - } - - public void setKeyType(TokenType keyType){ - this.keyType = keyType; - } - public TokenType getKeyType(){ - return this.keyType; - } - - @Override - public String toString() { - return String.format("%s %s %s",this.key,this.op,this.value); - } - - @Override - public void write(DataOutput out) throws IOException { - out.writeUTF(this.key); - out.writeUTF(this.getValue()); - out.writeUTF(this.op.name()); - out.writeUTF(this.keyType.name()); - out.writeUTF(this.valueType.name()); - } - - @Override - public void readFields(DataInput in) throws IOException { - this.key = in.readUTF(); - this.value = in.readUTF(); - this.op = ComparisonOperator.valueOf(in.readUTF()); - this.keyType = TokenType.valueOf(in.readUTF()); - this.valueType = TokenType.valueOf(in.readUTF()); - } +public class QualifierFilterEntity implements Writable { + public String key; + public String value; + public ComparisonOperator op; + public TokenType valueType; + public TokenType keyType; + + public QualifierFilterEntity() { + } + + public QualifierFilterEntity(String key, String value, ComparisonOperator comp, TokenType keyType, TokenType valueType) { + super(); + this.key = key; + this.value = value; + this.op = comp; + this.keyType = keyType; + this.valueType = valueType; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public ComparisonOperator getOp() { + return op; + } + + public void setOp(ComparisonOperator op) { + this.op = op; + } + + public TokenType getValueType() { + return valueType; + } + + public void setValueType(TokenType valueType) { + this.valueType = valueType; + } + + public void setKeyType(TokenType keyType) { + this.keyType = keyType; + } + + public TokenType getKeyType() { + return this.keyType; + } + + @Override + public String toString() { + return String.format("%s %s %s", this.key, this.op, this.value); + } + + @Override + public void write(DataOutput out) throws IOException { + out.writeUTF(this.key); + out.writeUTF(this.getValue()); + out.writeUTF(this.op.name()); + out.writeUTF(this.keyType.name()); + out.writeUTF(this.valueType.name()); + } + + @Override + public void readFields(DataInput in) throws IOException { + this.key = in.readUTF(); + this.value = in.readUTF(); + this.op = ComparisonOperator.valueOf(in.readUTF()); + this.keyType = TokenType.valueOf(in.readUTF()); + this.valueType = TokenType.valueOf(in.readUTF()); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/RowValueFilter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/RowValueFilter.java index a4b97ea1d9..05f37a4290 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/RowValueFilter.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/RowValueFilter.java @@ -34,34 +34,36 @@ /** * TODO: Critical performance problem!!! - * TODO: Refactor to specified multi-column filter so that avoid return all qualifier columns from region server to client side + * TODO: Refactor to specified multi-column filter so that avoid return all qualifier columns from region server to client side. * * @since 2014/11/17 */ public class RowValueFilter extends FilterBase { - private final static Logger LOG = LoggerFactory.getLogger(RowValueFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(RowValueFilter.class); private boolean filterOutRow = false; private WritableComparable> comparator; // TODO: Use qualifiers to reduce network tranfer -// private List qualifiers; - public RowValueFilter(){} + // private List qualifiers; + public RowValueFilter() { + } /** * Filter out row if WritableComparable.compareTo return 0 + * * @param comparator WritableComparable[List[KeyValue]] */ - public RowValueFilter(WritableComparable> comparator){ + public RowValueFilter(WritableComparable> comparator) { this.comparator = comparator; } -// public RowValueFilter(List qualifiers,WritableComparable> comparator){ -// this.qualifiers = qualifiers; -// this.comparator = comparator; -// } + // public RowValueFilter(List qualifiers,WritableComparable> comparator){ + // this.qualifiers = qualifiers; + // this.comparator = comparator; + // } /** - * Old interface in hbase-0.94 + * Old interface in hbase-0.94. * * @param out * @throws IOException @@ -77,7 +79,6 @@ public void write(DataOutput out) throws IOException { * @param in * @throws IOException */ -// @Override @Deprecated public void readFields(DataInput in) throws IOException { this.comparator = new BooleanExpressionComparator(); @@ -101,20 +102,20 @@ public byte[] toByteArray() throws IOException { * TODO: Currently still use older serialization method from hbase-0.94, need to migrate into ProtoBuff based */ // Override static method - public static Filter parseFrom(final byte [] pbBytes) throws DeserializationException { + public static Filter parseFrom(final byte[] pbBytes) throws DeserializationException { ByteArrayDataInput byteArrayDataInput = ByteStreams.newDataInput(pbBytes); RowValueFilter filter = new RowValueFilter(); try { filter.readFields(byteArrayDataInput); } catch (IOException e) { - LOG.error("Got error to deserialize RowValueFilter from PB bytes",e); + LOG.error("Got error to deserialize RowValueFilter from PB bytes", e); throw new DeserializationException(e); } return filter; } @Override - public boolean hasFilterRow(){ + public boolean hasFilterRow() { return true; } @@ -124,21 +125,21 @@ public void filterRow(List row) { } @Override - public void reset() { - this.filterOutRow = false; + public boolean filterRow() { + return filterOutRow; } @Override - public boolean filterRow(){ - return filterOutRow; + public void reset() { + this.filterOutRow = false; } @Override public String toString() { - return super.toString()+" ( "+this.comparator.toString()+" )"; + return super.toString() + " ( " + this.comparator.toString() + " )"; } -// public List getQualifiers() { -// return qualifiers; -// } + // public List getQualifiers() { + // return qualifiers; + // } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/TypedByteArrayComparator.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/TypedByteArrayComparator.java index ecaf8ccf92..e34e68bceb 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/TypedByteArrayComparator.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/TypedByteArrayComparator.java @@ -33,14 +33,10 @@ /** *

TypedByteArrayComparator

- * * Compare byte array: byte[] value with class type: Class type - * - *
*
* Built-in support: - * - *
+ * 
  *    Double
  *    double
  *    Integer
@@ -52,13 +48,11 @@
  *    Boolean
  *    boolean
  *  
- * - * And can be extend by defining new {@link RawComparator} and register with {@link #define(Class type, RawComparator comparator)} - *
+ * And can be extend by defining new {@link RawComparator} and register with {@link #define(Class type, RawComparator comparator)}. *
*/ public class TypedByteArrayComparator extends ByteArrayComparable { - private final static Logger LOG = LoggerFactory.getLogger(TypedByteArrayComparator.class); + private static final Logger LOG = LoggerFactory.getLogger(TypedByteArrayComparator.class); private Class type; @@ -66,37 +60,40 @@ public class TypedByteArrayComparator extends ByteArrayComparable { private RawComparator comparator; /** - * Default constructor for writable + * Default constructor for writable. */ @SuppressWarnings("unused") - public TypedByteArrayComparator(){ + public TypedByteArrayComparator() { super(null); } - public TypedByteArrayComparator(byte[] value, Class type){ + public TypedByteArrayComparator(byte[] value, Class type) { super(value); this.type = type; this.comparator = get(this.type); - if(this.comparator == null) throw new IllegalArgumentException("No comparator found for class: "+type); + if (this.comparator == null) { + throw new IllegalArgumentException("No comparator found for class: " + type); + } } /** * @param in hbase-0.94 interface * @throws IOException */ -// @Override public void readFields(DataInput in) throws IOException { -// super.readFields(in); + // super.readFields(in); try { String _type = in.readUTF(); type = _primitiveTypeClassMap.get(_type); - if(type == null) { + if (type == null) { type = Class.forName(_type); } comparator = get(type); - if(comparator == null) throw new IllegalArgumentException("No comparator found for class: "+type); + if (comparator == null) { + throw new IllegalArgumentException("No comparator found for class: " + type); + } } catch (ClassNotFoundException e) { - throw new IOException(e.getMessage(),e); + throw new IOException(e.getMessage(), e); } } @@ -104,15 +101,14 @@ public void readFields(DataInput in) throws IOException { * @param out hbase-0.94 interface * @throws IOException */ -// @Override public void write(DataOutput out) throws IOException { -// super.write(out); + // super.write(out); String typeName = type.getName(); out.writeUTF(typeName); } /** - * For hbase 0.98 + * For hbase 0.98. * * @return serialized byte array */ @@ -123,7 +119,7 @@ public byte[] toByteArray() { this.write(byteArrayDataOutput); return byteArrayDataOutput.toByteArray(); } catch (IOException e) { - LOG.error("Failed to serialize due to: "+e.getMessage(),e); + LOG.error("Failed to serialize due to: " + e.getMessage(), e); throw new RuntimeException(e); } } @@ -135,14 +131,14 @@ public byte[] toByteArray() { * @return Comparator instance * @throws DeserializationException */ - public static TypedByteArrayComparator parseFrom(final byte [] bytes) - throws DeserializationException { + public static TypedByteArrayComparator parseFrom(final byte[] bytes) + throws DeserializationException { TypedByteArrayComparator comparator = new TypedByteArrayComparator(); ByteArrayDataInput byteArrayDataInput = ByteStreams.newDataInput(bytes); try { comparator.readFields(byteArrayDataInput); } catch (IOException e) { - LOG.error("Got error to deserialize TypedByteArrayComparator from PB bytes",e); + LOG.error("Got error to deserialize TypedByteArrayComparator from PB bytes", e); throw new DeserializationException(e); } return comparator; @@ -159,32 +155,35 @@ public int compareTo(byte[] value, int offset, int length) { *
  • If not found, try all possible WritableComparator
  • * * - * If not found finally, throw new IllegalArgumentException("unable to get comparator for class: "+type); + *

    If not found finally, throw new IllegalArgumentException("unable to get comparator for class: "+type). * * @param type value type class * @return RawComparator */ - public static RawComparator get(Class type){ + public static RawComparator get(Class type) { RawComparator comparator = null; try { comparator = _typedClassComparator.get(type); - }catch (ClassCastException ex){ + } catch (ClassCastException ex) { // ignore } try { - if (comparator == null) comparator = WritableComparator.get(type); - }catch (ClassCastException ex){ + if (comparator == null) { + comparator = WritableComparator.get(type); + } + } catch (ClassCastException ex) { // ignore } return comparator; } - private final static Map _typedClassComparator = new HashMap(); - public static void define(Class type, RawComparator comparator){ - _typedClassComparator.put(type,comparator); + private static final Map _typedClassComparator = new HashMap(); + + public static void define(Class type, RawComparator comparator) { + _typedClassComparator.put(type, comparator); } - static{ + static { define(Double.class, WritableComparator.get(DoubleWritable.class)); define(double.class, WritableComparator.get(DoubleWritable.class)); define(Integer.class, WritableComparator.get(IntWritable.class)); @@ -198,16 +197,17 @@ public static void define(Class type, RawComparator comparator){ } /** - * Because {@link Class#forName } can't find class for primitive type + * Because {@link Class#forName } can't find class for primitive type. */ - private final static Map _primitiveTypeClassMap = new HashMap(); + private static final Map _primitiveTypeClassMap = new HashMap(); + static { - _primitiveTypeClassMap.put(int.class.getName(),int.class); - _primitiveTypeClassMap.put(double.class.getName(),double.class); - _primitiveTypeClassMap.put(long.class.getName(),long.class); - _primitiveTypeClassMap.put(short.class.getName(),short.class); - _primitiveTypeClassMap.put(boolean.class.getName(),boolean.class); - _primitiveTypeClassMap.put(char.class.getName(),char.class); - _primitiveTypeClassMap.put(byte.class.getName(),byte.class); + _primitiveTypeClassMap.put(int.class.getName(), int.class); + _primitiveTypeClassMap.put(double.class.getName(), double.class); + _primitiveTypeClassMap.put(long.class.getName(), long.class); + _primitiveTypeClassMap.put(short.class.getName(), short.class); + _primitiveTypeClassMap.put(boolean.class.getName(), boolean.class); + _primitiveTypeClassMap.put(char.class.getName(), char.class); + _primitiveTypeClassMap.put(byte.class.getName(), byte.class); } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexLogReader.java index 418ab33115..3e96c81e63 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexLogReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexLogReader.java @@ -24,22 +24,22 @@ public abstract class IndexLogReader implements LogReader { - // TODO: Work around https://issues.apache.org/jira/browse/HBASE-2198. More graceful implementation should use SingleColumnValueExcludeFilter, - // but it's complicated in current implementation. - protected static void workaroundHBASE2198(Get get, Filter filter,byte[][] qualifiers) { - if (filter instanceof SingleColumnValueFilter) { - if(qualifiers == null) { - get.addFamily(((SingleColumnValueFilter) filter).getFamily()); - }else{ - get.addColumn(((SingleColumnValueFilter) filter).getFamily(), ((SingleColumnValueFilter) filter).getQualifier()); - } - return; - } - if (filter instanceof FilterList) { - for (Filter f : ((FilterList)filter).getFilters()) { - workaroundHBASE2198(get, f,qualifiers); - } - } - } + // TODO: Work around https://issues.apache.org/jira/browse/HBASE-2198. More graceful implementation should use SingleColumnValueExcludeFilter, + // but it's complicated in current implementation. + protected static void workaroundHBASE2198(Get get, Filter filter, byte[][] qualifiers) { + if (filter instanceof SingleColumnValueFilter) { + if (qualifiers == null) { + get.addFamily(((SingleColumnValueFilter) filter).getFamily()); + } else { + get.addColumn(((SingleColumnValueFilter) filter).getFamily(), ((SingleColumnValueFilter) filter).getQualifier()); + } + return; + } + if (filter instanceof FilterList) { + for (Filter f : ((FilterList) filter).getFilters()) { + workaroundHBASE2198(get, f, qualifiers); + } + } + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexStreamReader.java index 9e059f256a..579755fc90 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexStreamReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexStreamReader.java @@ -26,69 +26,69 @@ import java.io.IOException; import java.util.List; -public abstract class IndexStreamReader extends StreamReader { - protected final IndexDefinition indexDef; - protected final SearchCondition condition; - protected final List indexRowkeys; - protected LogReader reader; - protected long lastTimestamp = 0; - protected long firstTimestamp = 0; - - protected static final Logger LOG = LoggerFactory.getLogger(IndexStreamReader.class); +public abstract class IndexStreamReader extends StreamReader { + protected final IndexDefinition indexDef; + protected final SearchCondition condition; + protected final List indexRowkeys; + protected LogReader reader; + protected long lastTimestamp = 0; + protected long firstTimestamp = 0; - public IndexStreamReader(IndexDefinition indexDef, SearchCondition condition, List indexRowkeys) { - this.indexDef = indexDef; - this.condition = condition; - this.indexRowkeys = indexRowkeys; - this.reader = null; - } + protected static final Logger LOG = LoggerFactory.getLogger(IndexStreamReader.class); - @Override - public long getLastTimestamp() { - return lastTimestamp; - } + public IndexStreamReader(IndexDefinition indexDef, SearchCondition condition, List indexRowkeys) { + this.indexDef = indexDef; + this.condition = condition; + this.indexRowkeys = indexRowkeys; + this.reader = null; + } - @Override - public long getFirstTimestamp() { - return this.firstTimestamp; - } + @Override + public long getLastTimestamp() { + return lastTimestamp; + } - @Override - public void readAsStream() throws Exception { - if (reader == null) { - reader = createIndexReader(); - } - final EntityDefinition entityDef = indexDef.getEntityDefinition(); - try{ - reader.open(); - InternalLog log; - int count = 0; - while ((log = reader.read()) != null) { - TaggedLogAPIEntity entity = HBaseInternalLogHelper.buildEntity(log, entityDef); - entity.setSerializeAlias(condition.getOutputAlias()); - entity.setSerializeVerbose(condition.isOutputVerbose()); + @Override + public long getFirstTimestamp() { + return this.firstTimestamp; + } - if (lastTimestamp == 0 || lastTimestamp < entity.getTimestamp()) { - lastTimestamp = entity.getTimestamp(); - } - if(firstTimestamp == 0 || firstTimestamp > entity.getTimestamp()){ - firstTimestamp = entity.getTimestamp(); - } - for(EntityCreationListener l : _listeners){ - l.entityCreated(entity); - } - if(++count == condition.getPageSize()) { - break; - } - } - }catch(IOException ioe){ - LOG.error("Fail reading log", ioe); - throw ioe; - }finally{ - reader.close(); - } - } + @Override + public void readAsStream() throws Exception { + if (reader == null) { + reader = createIndexReader(); + } + final EntityDefinition entityDef = indexDef.getEntityDefinition(); + try { + reader.open(); + InternalLog log; + int count = 0; + while ((log = reader.read()) != null) { + TaggedLogAPIEntity entity = HBaseInternalLogHelper.buildEntity(log, entityDef); + entity.setSerializeAlias(condition.getOutputAlias()); + entity.setSerializeVerbose(condition.isOutputVerbose()); + + if (lastTimestamp == 0 || lastTimestamp < entity.getTimestamp()) { + lastTimestamp = entity.getTimestamp(); + } + if (firstTimestamp == 0 || firstTimestamp > entity.getTimestamp()) { + firstTimestamp = entity.getTimestamp(); + } + for (EntityCreationListener l : listeners) { + l.entityCreated(entity); + } + if (++count == condition.getPageSize()) { + break; + } + } + } catch (IOException ioe) { + LOG.error("Fail reading log", ioe); + throw ioe; + } finally { + reader.close(); + } + } + + protected abstract LogReader createIndexReader(); - protected abstract LogReader createIndexReader(); - } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexLogReader.java index e6a5c967f4..ef9ad5fd01 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexLogReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexLogReader.java @@ -16,11 +16,11 @@ */ package org.apache.eagle.log.entity.index; +import org.apache.eagle.common.ByteUtil; import org.apache.eagle.common.config.EagleConfigFactory; import org.apache.eagle.log.entity.HBaseInternalLogHelper; import org.apache.eagle.log.entity.InternalLog; import org.apache.eagle.log.entity.meta.IndexDefinition; -import org.apache.eagle.common.ByteUtil; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.filter.Filter; @@ -30,168 +30,169 @@ public class NonClusteredIndexLogReader extends IndexLogReader { - private final IndexDefinition indexDef; - private final List indexRowkeys; - private final byte[][] qualifiers; - private final Filter filter; - private HTableInterface tbl; - private boolean isOpen = false; - private Result[] results; - private int index = -1; - private final List scans; - private int currentScanIndex = 0; - private ResultScanner currentResultScanner; - - // Max tag key/value. - private static final byte[] MAX_TAG_VALUE_BYTES = {(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF}; - private static final int BATCH_MULTIGET_SIZE = 1000; - - public NonClusteredIndexLogReader(IndexDefinition indexDef, List indexRowkeys, byte[][] qualifiers, Filter filter) { - this.indexDef = indexDef; - this.indexRowkeys = indexRowkeys; - this.qualifiers = qualifiers; - this.filter = filter; - this.scans = buildScans(); - } - - - private List buildScans() { - final ArrayList result = new ArrayList(indexRowkeys.size()); - for (byte[] rowkey : indexRowkeys) { - Scan s = new Scan(); - s.setStartRow(rowkey); - // In rowkey the tag key/value is sorted by the hash code of the key, so MAX_TAG_VALUE_BYTES is enough as the end key - final byte[] stopRowkey = ByteUtil.concat(rowkey, MAX_TAG_VALUE_BYTES); - s.setStopRow(stopRowkey); - // TODO the # of cached rows should be minimum of (pagesize and 100) - int cs = EagleConfigFactory.load().getHBaseClientScanCacheSize(); - s.setCaching(cs); - // TODO not optimized for all applications - s.setCacheBlocks(true); - // scan specified columnfamily for all qualifiers - s.addFamily(indexDef.getEntityDefinition().getColumnFamily().getBytes()); - result.add(s); - } - return result; - } - - @Override - public void open() throws IOException { - if (isOpen) - return; // silently return - try { - tbl = EagleConfigFactory.load().getHTable(indexDef.getEntityDefinition().getTable()); - } catch (RuntimeException ex) { - throw new IOException(ex); - } - currentScanIndex = 0; - openNewScan(); - fillResults(); - } - - private boolean openNewScan() throws IOException { - closeCurrentScanResult(); - if (currentScanIndex >= scans.size()) { - return false; - } - final Scan scan = scans.get(currentScanIndex++); - currentResultScanner = tbl.getScanner(scan); - return true; - } - - private void fillResults() throws IOException { - if (currentResultScanner == null) { - return; - } - index = 0; - int count = 0; - Result r = null; + private final IndexDefinition indexDef; + private final List indexRowkeys; + private final byte[][] qualifiers; + private final Filter filter; + private HTableInterface tbl; + private boolean isOpen = false; + private Result[] results; + private int index = -1; + private final List scans; + private int currentScanIndex = 0; + private ResultScanner currentResultScanner; + + // Max tag key/value. + private static final byte[] MAX_TAG_VALUE_BYTES = {(byte) 0XFF, (byte) 0XFF, (byte) 0XFF, (byte) 0XFF, (byte) 0XFF, (byte) 0XFF, (byte) 0XFF, (byte) 0XFF, (byte) 0XFF}; + private static final int BATCH_MULTIGET_SIZE = 1000; + + public NonClusteredIndexLogReader(IndexDefinition indexDef, List indexRowkeys, byte[][] qualifiers, Filter filter) { + this.indexDef = indexDef; + this.indexRowkeys = indexRowkeys; + this.qualifiers = qualifiers; + this.filter = filter; + this.scans = buildScans(); + } + + + private List buildScans() { + final ArrayList result = new ArrayList(indexRowkeys.size()); + for (byte[] rowkey : indexRowkeys) { + Scan s = new Scan(); + s.setStartRow(rowkey); + // In rowkey the tag key/value is sorted by the hash code of the key, so MAX_TAG_VALUE_BYTES is enough as the end key + final byte[] stopRowkey = ByteUtil.concat(rowkey, MAX_TAG_VALUE_BYTES); + s.setStopRow(stopRowkey); + // TODO the # of cached rows should be minimum of (pagesize and 100) + int cs = EagleConfigFactory.load().getHBaseClientScanCacheSize(); + s.setCaching(cs); + // TODO not optimized for all applications + s.setCacheBlocks(true); + // scan specified columnfamily for all qualifiers + s.addFamily(indexDef.getEntityDefinition().getColumnFamily().getBytes()); + result.add(s); + } + return result; + } + + @Override + public void open() throws IOException { + if (isOpen) { + return; // silently return + } + try { + tbl = EagleConfigFactory.load().getHTable(indexDef.getEntityDefinition().getTable()); + } catch (RuntimeException ex) { + throw new IOException(ex); + } + currentScanIndex = 0; + openNewScan(); + fillResults(); + } + + private boolean openNewScan() throws IOException { + closeCurrentScanResult(); + if (currentScanIndex >= scans.size()) { + return false; + } + final Scan scan = scans.get(currentScanIndex++); + currentResultScanner = tbl.getScanner(scan); + return true; + } + + private void fillResults() throws IOException { + if (currentResultScanner == null) { + return; + } + index = 0; + int count = 0; + Result r = null; final List gets = new ArrayList(BATCH_MULTIGET_SIZE); - final byte[] family = indexDef.getEntityDefinition().getColumnFamily().getBytes(); - while (count < BATCH_MULTIGET_SIZE) { - r = currentResultScanner.next(); - if (r == null) { - if (openNewScan()) { - continue; - } else { - break; - } - } - for (byte[] rowkey : r.getFamilyMap(family).keySet()) { - if (rowkey.length == 0) { // invalid rowkey - continue; - } - final Get get = new Get(rowkey); + final byte[] family = indexDef.getEntityDefinition().getColumnFamily().getBytes(); + while (count < BATCH_MULTIGET_SIZE) { + r = currentResultScanner.next(); + if (r == null) { + if (openNewScan()) { + continue; + } else { + break; + } + } + for (byte[] rowkey : r.getFamilyMap(family).keySet()) { + if (rowkey.length == 0) { // invalid rowkey + continue; + } + final Get get = new Get(rowkey); if (filter != null) { - get.setFilter(filter); + get.setFilter(filter); + } + if (qualifiers != null) { + for (int j = 0; j < qualifiers.length; ++j) { + // Return the specified qualifiers + get.addColumn(family, qualifiers[j]); + } + } else { + get.addFamily(family); } - if(qualifiers != null) { - for (int j = 0; j < qualifiers.length; ++j) { - // Return the specified qualifiers - get.addColumn(family, qualifiers[j]); - } - }else { - get.addFamily(family); - } - workaroundHBASE2198(get, filter,qualifiers); - gets.add(get); - ++count; - } - } - if (count == 0) { - results = null; - return; - } - results = tbl.get(gets); - if (results == null || results.length == 0) { - fillResults(); - } - } - - - private void closeCurrentScanResult() { - if (currentResultScanner != null) { - currentResultScanner.close(); - currentResultScanner = null; - } - } - - - @Override - public void close() throws IOException { - if(tbl != null){ - new HTableFactory().releaseHTableInterface(tbl); - } - closeCurrentScanResult(); - } - - @Override - public InternalLog read() throws IOException { - if (tbl == null) { - throw new IllegalArgumentException("Haven't open before reading"); - } - - Result r = null; - InternalLog t = null; - while ((r = getNextResult()) != null) { - if (r.getRow() == null) { - continue; - } - t = HBaseInternalLogHelper.parse(indexDef.getEntityDefinition(), r, qualifiers); - break; - } - return t; - } - - - private Result getNextResult() throws IOException { - if (results == null || results.length == 0 || index >= results.length) { - fillResults(); - } - if (results == null || results.length == 0 || index >= results.length) { - return null; - } - return results[index++]; - } - + workaroundHBASE2198(get, filter, qualifiers); + gets.add(get); + ++count; + } + } + if (count == 0) { + results = null; + return; + } + results = tbl.get(gets); + if (results == null || results.length == 0) { + fillResults(); + } + } + + + private void closeCurrentScanResult() { + if (currentResultScanner != null) { + currentResultScanner.close(); + currentResultScanner = null; + } + } + + + @Override + public void close() throws IOException { + if (tbl != null) { + new HTableFactory().releaseHTableInterface(tbl); + } + closeCurrentScanResult(); + } + + @Override + public InternalLog read() throws IOException { + if (tbl == null) { + throw new IllegalArgumentException("Haven't open before reading"); + } + + Result r = null; + InternalLog t = null; + while ((r = getNextResult()) != null) { + if (r.getRow() == null) { + continue; + } + t = HBaseInternalLogHelper.parse(indexDef.getEntityDefinition(), r, qualifiers); + break; + } + return t; + } + + + private Result getNextResult() throws IOException { + if (results == null || results.length == 0 || index >= results.length) { + fillResults(); + } + if (results == null || results.length == 0 || index >= results.length) { + return null; + } + return results[index++]; + } + } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexStreamReader.java index ec5631a3a2..11df33abec 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexStreamReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexStreamReader.java @@ -27,25 +27,25 @@ import java.util.List; public class NonClusteredIndexStreamReader extends IndexStreamReader { - public NonClusteredIndexStreamReader(IndexDefinition indexDef, SearchCondition condition) { - super(indexDef, condition, new ArrayList()); - final IndexType type = indexDef.canGoThroughIndex(condition.getQueryExpression(), indexRowkeys); - if (!IndexType.NON_CLUSTER_INDEX.equals(type)) { - throw new IllegalArgumentException("This query can't go through index: " + condition.getQueryExpression()); - } - } + public NonClusteredIndexStreamReader(IndexDefinition indexDef, SearchCondition condition) { + super(indexDef, condition, new ArrayList()); + final IndexType type = indexDef.canGoThroughIndex(condition.getQueryExpression(), indexRowkeys); + if (!IndexType.NON_CLUSTER_INDEX.equals(type)) { + throw new IllegalArgumentException("This query can't go through index: " + condition.getQueryExpression()); + } + } - public NonClusteredIndexStreamReader(IndexDefinition indexDef, SearchCondition condition, List indexRowkeys) { - super(indexDef, condition, indexRowkeys); - } + public NonClusteredIndexStreamReader(IndexDefinition indexDef, SearchCondition condition, List indexRowkeys) { + super(indexDef, condition, indexRowkeys); + } - @Override - protected LogReader createIndexReader() { - final EntityDefinition entityDef = indexDef.getEntityDefinition(); - byte[][] outputQualifiers = null; - if(!condition.isOutputAll()) { - outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef, condition.getOutputFields()); - } - return new NonClusteredIndexLogReader(indexDef, indexRowkeys, outputQualifiers, condition.getFilter()); - } + @Override + protected LogReader createIndexReader() { + final EntityDefinition entityDef = indexDef.getEntityDefinition(); + byte[][] outputQualifiers = null; + if (!condition.isOutputAll()) { + outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef, condition.getOutputFields()); + } + return new NonClusteredIndexLogReader(indexDef, indexRowkeys, outputQualifiers, condition.getFilter()); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/RowKeyLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/RowKeyLogReader.java index 1c16dc80a4..a2676f6fd5 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/RowKeyLogReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/RowKeyLogReader.java @@ -16,27 +16,26 @@ */ package org.apache.eagle.log.entity.index; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - import org.apache.eagle.common.config.EagleConfigFactory; import org.apache.eagle.log.entity.HBaseInternalLogHelper; import org.apache.eagle.log.entity.InternalLog; +import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTableFactory; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; -import org.apache.eagle.log.entity.meta.EntityDefinition; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; public class RowKeyLogReader extends IndexLogReader { - private final EntityDefinition ed; - private final List rowkeys; + private final EntityDefinition ed; + private final List rowkeys; private final byte[][] qualifiers; private HTableInterface tbl; - private boolean isOpen = false; - private Result[] entityResult; + private boolean isOpen = false; + private Result[] entityResult; private int getIndex = -1; public RowKeyLogReader(EntityDefinition ed, byte[] rowkey) { @@ -46,38 +45,39 @@ public RowKeyLogReader(EntityDefinition ed, byte[] rowkey) { this.qualifiers = null; } - public RowKeyLogReader(EntityDefinition ed, byte[] rowkey,byte[][] qualifiers) { - this.ed = ed; - this.rowkeys = new ArrayList<>(); + public RowKeyLogReader(EntityDefinition ed, byte[] rowkey, byte[][] qualifiers) { + this.ed = ed; + this.rowkeys = new ArrayList<>(); this.rowkeys.add(rowkey); this.qualifiers = qualifiers; - } + } - public RowKeyLogReader(EntityDefinition ed, List rowkeys,byte[][] qualifiers) { - this.ed = ed; - this.rowkeys = rowkeys; + public RowKeyLogReader(EntityDefinition ed, List rowkeys, byte[][] qualifiers) { + this.ed = ed; + this.rowkeys = rowkeys; this.qualifiers = qualifiers; - } + } - @Override - public void open() throws IOException { - if (isOpen) - return; // silently return - try { - tbl = EagleConfigFactory.load().getHTable(ed.getTable()); - } catch (RuntimeException ex) { - throw new IOException(ex); - } - final byte[] family = ed.getColumnFamily().getBytes(); + @Override + public void open() throws IOException { + if (isOpen) { + return; // silently return + } + try { + tbl = EagleConfigFactory.load().getHTable(ed.getTable()); + } catch (RuntimeException ex) { + throw new IOException(ex); + } + final byte[] family = ed.getColumnFamily().getBytes(); List gets = new ArrayList<>(this.rowkeys.size()); - for(byte[] rowkey:rowkeys) { + for (byte[] rowkey : rowkeys) { Get get = new Get(rowkey); get.addFamily(family); - if(qualifiers != null) { - for(byte[] qualifier: qualifiers){ - get.addColumn(family,qualifier); + if (qualifiers != null) { + for (byte[] qualifier : qualifiers) { + get.addColumn(family, qualifier); } } @@ -85,23 +85,23 @@ public void open() throws IOException { } entityResult = tbl.get(gets); - isOpen = true; - } + isOpen = true; + } - @Override - public void close() throws IOException { - if(tbl != null){ - new HTableFactory().releaseHTableInterface(tbl); - } - } + @Override + public void close() throws IOException { + if (tbl != null) { + new HTableFactory().releaseHTableInterface(tbl); + } + } - @Override - public InternalLog read() throws IOException { - if(entityResult == null || entityResult.length == 0 || this.getIndex >= entityResult.length - 1){ + @Override + public InternalLog read() throws IOException { + if (entityResult == null || entityResult.length == 0 || this.getIndex >= entityResult.length - 1) { return null; } - getIndex ++; - InternalLog t = HBaseInternalLogHelper.parse(ed, entityResult[getIndex], this.qualifiers); - return t; - } + getIndex++; + InternalLog t = HBaseInternalLogHelper.parse(ed, entityResult[getIndex], this.qualifiers); + return t; + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexLogReader.java index 8ff3448234..ccafc1fb8b 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexLogReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexLogReader.java @@ -16,11 +16,6 @@ */ package org.apache.eagle.log.entity.index; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.NavigableMap; - import org.apache.eagle.common.config.EagleConfigFactory; import org.apache.eagle.log.entity.HBaseInternalLogHelper; import org.apache.eagle.log.entity.InternalLog; @@ -31,34 +26,40 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.filter.Filter; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.NavigableMap; + public class UniqueIndexLogReader extends IndexLogReader { - private final IndexDefinition indexDef; - private final List indexRowkeys; - private final byte[][] qualifiers; - private final Filter filter; - private HTableInterface tbl; - private boolean isOpen = false; - private Result[] entityResults; - private int index = -1; + private final IndexDefinition indexDef; + private final List indexRowkeys; + private final byte[][] qualifiers; + private final Filter filter; + private HTableInterface tbl; + private boolean isOpen = false; + private Result[] entityResults; + private int index = -1; - public UniqueIndexLogReader(IndexDefinition indexDef, List indexRowkeys, byte[][] qualifiers, Filter filter) { - this.indexDef = indexDef; - this.indexRowkeys = indexRowkeys; - this.qualifiers = qualifiers; - this.filter = filter; - } + public UniqueIndexLogReader(IndexDefinition indexDef, List indexRowkeys, byte[][] qualifiers, Filter filter) { + this.indexDef = indexDef; + this.indexRowkeys = indexRowkeys; + this.qualifiers = qualifiers; + this.filter = filter; + } - @Override - public void open() throws IOException { - if (isOpen) - return; // silently return - try { - tbl = EagleConfigFactory.load().getHTable(indexDef.getEntityDefinition().getTable()); - } catch (RuntimeException ex) { - throw new IOException(ex); - } - final byte[] family = indexDef.getEntityDefinition().getColumnFamily().getBytes(); + @Override + public void open() throws IOException { + if (isOpen) { + return; // silently return + } + try { + tbl = EagleConfigFactory.load().getHTable(indexDef.getEntityDefinition().getTable()); + } catch (RuntimeException ex) { + throw new IOException(ex); + } + final byte[] family = indexDef.getEntityDefinition().getColumnFamily().getBytes(); final List indexGets = new ArrayList<>(); for (byte[] rowkey : indexRowkeys) { Get get = new Get(rowkey); @@ -69,56 +70,56 @@ public void open() throws IOException { final Result[] indexResults = tbl.get(indexGets); indexGets.clear(); for (Result indexResult : indexResults) { - final NavigableMap map = indexResult.getFamilyMap(family); - if (map == null) { - continue; - } - for (byte[] entityRowkey : map.keySet()) { + final NavigableMap map = indexResult.getFamilyMap(family); + if (map == null) { + continue; + } + for (byte[] entityRowkey : map.keySet()) { Get get = new Get(entityRowkey); if (filter != null) { - get.setFilter(filter); + get.setFilter(filter); + } + if (qualifiers == null) { + // filter all qualifiers if output qualifiers are null + get.addFamily(family); + } else { + for (int i = 0; i < qualifiers.length; ++i) { + // Return the specified qualifiers + get.addColumn(family, qualifiers[i]); + } } - if(qualifiers == null) { - // filter all qualifiers if output qualifiers are null - get.addFamily(family); - }else { - for (int i = 0; i < qualifiers.length; ++i) { - // Return the specified qualifiers - get.addColumn(family, qualifiers[i]); - } - } - workaroundHBASE2198(get, filter,qualifiers); - indexGets.add(get); - } + workaroundHBASE2198(get, filter, qualifiers); + indexGets.add(get); + } } entityResults = tbl.get(indexGets); - isOpen = true; - } + isOpen = true; + } - @Override - public void close() throws IOException { - if(tbl != null){ - new HTableFactory().releaseHTableInterface(tbl); - } - } + @Override + public void close() throws IOException { + if (tbl != null) { + new HTableFactory().releaseHTableInterface(tbl); + } + } - @Override - public InternalLog read() throws IOException { - if (entityResults == null) { - throw new IllegalArgumentException("entityResults haven't been initialized before reading"); - } - InternalLog t = null; - while (entityResults.length > ++index) { - Result r = entityResults[index]; - if (r != null) { - if (r.getRow() == null) { - continue; - } - t = HBaseInternalLogHelper.parse(indexDef.getEntityDefinition(), r, qualifiers); - break; - } - } - return t; - } + @Override + public InternalLog read() throws IOException { + if (entityResults == null) { + throw new IllegalArgumentException("entityResults haven't been initialized before reading"); + } + InternalLog t = null; + while (entityResults.length > ++index) { + Result r = entityResults[index]; + if (r != null) { + if (r.getRow() == null) { + continue; + } + t = HBaseInternalLogHelper.parse(indexDef.getEntityDefinition(), r, qualifiers); + break; + } + } + return t; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexStreamReader.java index 0391d5706c..82c22ea422 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexStreamReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexStreamReader.java @@ -27,26 +27,25 @@ import java.util.List; public class UniqueIndexStreamReader extends IndexStreamReader { - public UniqueIndexStreamReader(IndexDefinition indexDef, SearchCondition condition) { - super(indexDef, condition, new ArrayList()); - final IndexType type = indexDef.canGoThroughIndex(condition.getQueryExpression(), indexRowkeys); - if (!IndexType.UNIQUE_INDEX.equals(type)) { - throw new IllegalArgumentException("This query can't go through index: " + condition.getQueryExpression()); - } - } + public UniqueIndexStreamReader(IndexDefinition indexDef, SearchCondition condition) { + super(indexDef, condition, new ArrayList()); + final IndexType type = indexDef.canGoThroughIndex(condition.getQueryExpression(), indexRowkeys); + if (!IndexType.UNIQUE_INDEX.equals(type)) { + throw new IllegalArgumentException("This query can't go through index: " + condition.getQueryExpression()); + } + } - public UniqueIndexStreamReader(IndexDefinition indexDef, SearchCondition condition, List indexRowkeys) { - super(indexDef, condition, indexRowkeys); - } + public UniqueIndexStreamReader(IndexDefinition indexDef, SearchCondition condition, List indexRowkeys) { + super(indexDef, condition, indexRowkeys); + } - @Override - protected LogReader createIndexReader() { - final EntityDefinition entityDef = indexDef.getEntityDefinition(); -// final - byte[][] outputQualifiers = null; - if(!condition.isOutputAll()) { - outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef, condition.getOutputFields()); - } - return new UniqueIndexLogReader(indexDef, indexRowkeys, outputQualifiers, condition.getFilter()); - } + @Override + protected LogReader createIndexReader() { + final EntityDefinition entityDef = indexDef.getEntityDefinition(); + byte[][] outputQualifiers = null; + if (!condition.isOutputAll()) { + outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef, condition.getOutputFields()); + } + return new UniqueIndexLogReader(indexDef, indexRowkeys, outputQualifiers, condition.getFilter()); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/BooleanSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/BooleanSerDeser.java index cf40e31f9c..9596c1c24f 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/BooleanSerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/BooleanSerDeser.java @@ -16,39 +16,37 @@ */ package org.apache.eagle.log.entity.meta; -/** - * @since : 7/3/14,2014 - */ public class BooleanSerDeser implements EntitySerDeser { - public BooleanSerDeser(){} + public BooleanSerDeser() { + } - @Override - public Boolean deserialize(byte[] bytes){ - if(bytes != null && bytes.length > 0){ - if(bytes[0] == 0){ - return false; - }else if(bytes[0] == 1){ - return true; - } - } - return null; - } + @Override + public Boolean deserialize(byte[] bytes) { + if (bytes != null && bytes.length > 0) { + if (bytes[0] == 0) { + return false; + } else if (bytes[0] == 1) { + return true; + } + } + return null; + } - @Override - public byte[] serialize(Boolean obj){ - if(obj != null){ - if(obj){ - return new byte[]{1}; - }else{ - return new byte[]{0}; - } - } - return null; - } + @Override + public byte[] serialize(Boolean obj) { + if (obj != null) { + if (obj) { + return new byte[] {1}; + } else { + return new byte[] {0}; + } + } + return null; + } - @Override - public Class type() { - return Boolean.class; - } + @Override + public Class type() { + return Boolean.class; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Column.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Column.java index b64e528616..c285104e59 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Column.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Column.java @@ -21,8 +21,8 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -@Target({ElementType.FIELD}) +@Target( {ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) public @interface Column { - String value() default ""; + String value() default ""; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ColumnFamily.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ColumnFamily.java index 6e3e9c6c85..6a3093afd3 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ColumnFamily.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ColumnFamily.java @@ -21,8 +21,8 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface ColumnFamily { - String value() default "f"; + String value() default "f"; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DefaultJavaObjctSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DefaultJavaObjctSerDeser.java index 24385a9a31..36fc63f404 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DefaultJavaObjctSerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DefaultJavaObjctSerDeser.java @@ -22,11 +22,11 @@ import org.apache.eagle.common.SerializableUtils; public class DefaultJavaObjctSerDeser implements EntitySerDeser { - public final static EntitySerDeser INSTANCE = new DefaultJavaObjctSerDeser(); + public static final EntitySerDeser INSTANCE = new DefaultJavaObjctSerDeser(); @Override public Object deserialize(byte[] bytes) { - return SerializableUtils.deserializeFromByteArray(bytes,"Deserialize from java object bytes"); + return SerializableUtils.deserializeFromByteArray(bytes, "Deserialize from java object bytes"); } @Override diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Double2DArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Double2DArraySerDeser.java index 27b011c831..8c1e85fa22 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Double2DArraySerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Double2DArraySerDeser.java @@ -21,30 +21,29 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; -/** - * @since 7/22/15 - */ + public class Double2DArraySerDeser implements EntitySerDeser { - private final int SIZE = 8; + private final int size = 8; + @Override - public double[][] deserialize(byte[] bytes){ -// if((bytes.length-4) % SIZE != 0) -// return null; + public double[][] deserialize(byte[] bytes) { + // if((bytes.length-4) % SIZE != 0) + // return null; int offset = 0; // get size of int array int rowSize = ByteUtil.bytesToInt(bytes, offset); offset += 4; double[][] data = new double[rowSize][]; - for(int i=0; i= 0){ + if (colSize >= 0) { values = new double[colSize]; for (int j = 0; j < colSize; j++) { values[j] = ByteUtil.bytesToDouble(bytes, offset); - offset += SIZE; + offset += size; } } data[i] = values; @@ -53,28 +52,25 @@ public double[][] deserialize(byte[] bytes){ return data; } - /** - * - * @param obj - * @return - */ @Override - public byte[] serialize(double[][] obj){ - if(obj == null) return null; + public byte[] serialize(double[][] obj) { + if (obj == null) { + return null; + } ByteArrayOutputStream data = new ByteArrayOutputStream(); int size = obj.length; byte[] sizeBytes = ByteUtil.intToBytes(size); - data.write(sizeBytes,0,sizeBytes.length); + data.write(sizeBytes, 0, sizeBytes.length); - try{ - for(double[] o:obj){ - if(o!=null){ + try { + for (double[] o : obj) { + if (o != null) { data.write(ByteUtil.intToBytes(o.length)); - for(double d:o){ - data.write(ByteUtil.doubleToBytes(d),0,SIZE); + for (double d : o) { + data.write(ByteUtil.doubleToBytes(d), 0, this.size); } - }else{ - data.write(ByteUtil.intToBytes(-1),0,4); + } else { + data.write(ByteUtil.intToBytes(-1), 0, 4); } } } catch (IOException e) { diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleArraySerDeser.java index d87e31c0e6..f789835156 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleArraySerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleArraySerDeser.java @@ -18,51 +18,50 @@ import org.apache.eagle.common.ByteUtil; -public class DoubleArraySerDeser implements EntitySerDeser{ +public class DoubleArraySerDeser implements EntitySerDeser { - public DoubleArraySerDeser(){} + public DoubleArraySerDeser() { + } - private final int SIZE = 8; - @Override - public double[] deserialize(byte[] bytes){ - if((bytes.length-4) % SIZE != 0) - return null; - int offset = 0; - // get size of int array - int size = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - double[] values = new double[size]; - for(int i=0; i type() { - return double[].class; - } + @Override + public double[] deserialize(byte[] bytes) { + if ((bytes.length - 4) % size != 0) { + return null; + } + int offset = 0; + // get size of int array + int size = ByteUtil.bytesToInt(bytes, offset); + offset += 4; + double[] values = new double[size]; + for (int i = 0; i < size; i++) { + values[i] = ByteUtil.bytesToDouble(bytes, offset); + offset += this.size; + } + return values; + } + + @Override + public byte[] serialize(double[] obj) { + if (obj == null) { + return null; + } + int size = obj.length; + byte[] array = new byte[4 + this.size * size]; + byte[] first = ByteUtil.intToBytes(size); + int offset = 0; + System.arraycopy(first, 0, array, offset, first.length); + offset += first.length; + for (int i = 0; i < size; i++) { + System.arraycopy(ByteUtil.doubleToBytes(obj[i]), 0, array, offset, this.size); + offset += this.size; + } + return array; + } + + @Override + public Class type() { + return double[].class; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleSerDeser.java index 330a99dbf4..bf58d39750 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleSerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleSerDeser.java @@ -18,24 +18,26 @@ import org.apache.eagle.common.ByteUtil; -public class DoubleSerDeser implements EntitySerDeser{ +public class DoubleSerDeser implements EntitySerDeser { - @Override - public Double deserialize(byte[] bytes){ - if(bytes.length < 8) - return null; - return ByteUtil.bytesToDouble(bytes); - } - - @Override - public byte[] serialize(Double obj){ - if(obj == null) - return null; - return ByteUtil.doubleToBytes(obj); - } + @Override + public Double deserialize(byte[] bytes) { + if (bytes.length < 8) { + return null; + } + return ByteUtil.bytesToDouble(bytes); + } - @Override - public Class type(){ - return Double.class; - } + @Override + public byte[] serialize(Double obj) { + if (obj == null) { + return null; + } + return ByteUtil.doubleToBytes(obj); + } + + @Override + public Class type() { + return Double.class; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityConstants.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityConstants.java index 930743e899..2350318fce 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityConstants.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityConstants.java @@ -19,12 +19,12 @@ import org.apache.eagle.common.DateTimeUtil; public class EntityConstants { - - public static final String FIXED_WRITE_HUMANTIME = "1970-01-02 00:00:00"; - public static final String FIXED_READ_START_HUMANTIME = "1970-01-01 00:00:00"; - public static final String FIXED_READ_END_HUMANTIME = "1970-01-03 00:00:00"; - - public static final long FIXED_WRITE_TIMESTAMP = - DateTimeUtil.humanDateToSecondsWithoutException(FIXED_WRITE_HUMANTIME) * 1000; + + public static final String FIXED_WRITE_HUMANTIME = "1970-01-02 00:00:00"; + public static final String FIXED_READ_START_HUMANTIME = "1970-01-01 00:00:00"; + public static final String FIXED_READ_END_HUMANTIME = "1970-01-03 00:00:00"; + + public static final long FIXED_WRITE_TIMESTAMP = + DateTimeUtil.humanDateToSecondsWithoutException(FIXED_WRITE_HUMANTIME) * 1000; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinition.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinition.java index d2d9eef95e..f346bf71c7 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinition.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinition.java @@ -32,295 +32,330 @@ import java.util.Map; /** - * * This object should be regarded as read-only metadata for an entity as it will be shared across all entity object - * with the same entity name, so don't try to set different values for any of the fields, - * otherwise it's not thread safe + * with the same entity name, so don't try to set different values for any of the fields, + * otherwise it's not thread safe. */ -public class EntityDefinition implements Writable{ - private final static Logger LOG = LoggerFactory.getLogger(EntityDefinition.class); - - private Class entityClass; - private String table; - private String columnFamily; - // TODO prefix be within search/get condition instead of entity definition. Topology entity should have pre-defined prefix. - private String prefix; - private String service; - private String serviceCreationPath; - private String serviceDeletionPath; - private String[] partitions; - private Map displayNameMap = new HashMap(); - private Map qualifierNameMap = new HashMap(); - private Map qualifierGetterMap = new HashMap(); - private String[] tags; - private boolean isTimeSeries; - private MetricDefinition metricDefinition; - private IndexDefinition[] indexes; - - - public EntityDefinition(){} - - public MetricDefinition getMetricDefinition() { - return metricDefinition; - } - public void setMetricDefinition(MetricDefinition metricDefinition) { - this.metricDefinition = metricDefinition; - } - public boolean isTimeSeries() { - return isTimeSeries; - } - public void setTimeSeries(boolean isTimeSeries) { - this.isTimeSeries = isTimeSeries; - } - public String getColumnFamily() { - return columnFamily; - } - public void setColumnFamily(String columnFamily) { - this.columnFamily = columnFamily; - } - public Class getEntityClass() { - return entityClass; - } - public void setEntityClass(Class entityClass) { - this.entityClass = entityClass; - } - public String getTable() { - return table; - } - public void setTable(String table) { - this.table = table; - } - public Map getDisplayNameMap() { - return displayNameMap; - } - public void setDisplayNameMap(Map displayNameMap) { - this.displayNameMap = displayNameMap; - } - public Map getQualifierNameMap() { - return qualifierNameMap; - } - public void setQualifierNameMap(Map qualifierNameMap) { - this.qualifierNameMap = qualifierNameMap; - } - public String getPrefix() { - return prefix; - } - public void setPrefix(String prefix) { - this.prefix = prefix; - } - public String getService() { - return service; - } - public void setService(String service) { - this.service = service; - } - public String getServiceCreationPath() { - return serviceCreationPath; - } - public void setServiceCreationPath(String serviceCreationPath) { - this.serviceCreationPath = serviceCreationPath; - } - public String getServiceDeletionPath() { - return serviceDeletionPath; - } - public void setServiceDeletionPath(String serviceDeletionPath) { - this.serviceDeletionPath = serviceDeletionPath; - } - public String[] getPartitions() { - return partitions; - } - public void setPartitions(String[] partitions) { - this.partitions = partitions; - } - public IndexDefinition[] getIndexes() { - return indexes; - } - public void setIndexes(IndexDefinition[] indexes) { - this.indexes = indexes; - } - public Map getQualifierGetterMap() { - return qualifierGetterMap; - } - public void setQualifierGetterMap(Map qualifierGetterMap) { - this.qualifierGetterMap = qualifierGetterMap; - } - public String[] getTags(){ - return tags; - } - public void setTags(String[] tags){ - this.tags = tags; - } - -// public Map getQualifierDisplayNameMap(){ -// Map qualifierDisplayNameMap = new HashMap(); -// for(Map.Entry entry: qualifierNameMap.entrySet()){ -// qualifierDisplayNameMap.put(entry.getKey(),entry.getValue().getDisplayName()); -// } -// return qualifierDisplayNameMap; -// } - - /** - * a filed is a tag when this field is neither in qualifierNameMap nor in displayNameMap - * @param field - * @return - */ - public boolean isTag(String field){ - return (qualifierNameMap.get(field) == null && displayNameMap.get(field) == null); -// return (qualifierNameMap.get(field) == null); - } - - /** - * Check if the specified field is a partition tag field - */ - public boolean isPartitionTag(String field) { - if (partitions == null || (!isTag(field))) { - return false; - } - for (String partition : partitions) { - if (partition.equals(field)) { - return true; - } - } - return false; - - } - - public Object getValue(TaggedLogAPIEntity entity, String field) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException { - if (!entityClass.equals(entity.getClass())) { - if ((entityClass.equals(GenericMetricEntity.class) && entity.getClass().equals(GenericMetricShadowEntity.class))) { - GenericMetricShadowEntity e = (GenericMetricShadowEntity)entity; - return e.getValue(); - } else { - throw new IllegalArgumentException("Invalid entity type: " + entity.getClass().getSimpleName()); - } - } - final Method m = qualifierGetterMap.get(field); - if (m == null) { - // The field is a tag - if (entity.getTags() != null) { - return entity.getTags().get(field); - } - } - if (m != null) { - return m.invoke(entity); - } - return null; - } - - - @Override - public void write(DataOutput out) throws IOException { - out.writeUTF(entityClass.getName()); - out.writeUTF(table); - out.writeUTF(columnFamily); - out.writeUTF(prefix); - out.writeUTF(service); - - int partitionsLen = 0; - if(partitions != null) partitionsLen =partitions.length; - out.writeInt(partitionsLen); - for (int i = 0; i < partitionsLen; i++) { - out.writeUTF(partitions[i]); - } - - int displayNameMapSize = displayNameMap.size(); - out.writeInt(displayNameMapSize); - for(Map.Entry entry: displayNameMap.entrySet()){ - out.writeUTF(entry.getKey()); - entry.getValue().write(out); - } - - int qualifierNameMapSize = qualifierNameMap.size(); - out.writeInt(qualifierNameMapSize); - for(Map.Entry entry: qualifierNameMap.entrySet()){ - out.writeUTF(entry.getKey()); - entry.getValue().write(out); - } - - // TODO: write qualifierGetterMap - out.writeBoolean(isTimeSeries); - - boolean hasMetricDefinition = metricDefinition != null; - out.writeBoolean(hasMetricDefinition); - if(hasMetricDefinition) { - // write MetricDefinition - metricDefinition.write(out); - } - - // TODO: write indexes - } - - - public void setEntityDefinition(EntityDefinition ed){ - this.entityClass = ed.getEntityClass(); - this.table = ed.getTable(); - this.columnFamily = ed.getColumnFamily(); - this.prefix = ed.getPrefix(); - this.service = ed.getService(); - this.partitions = ed.getPartitions(); - this.displayNameMap = ed.getDisplayNameMap(); - this.qualifierGetterMap = ed.getQualifierGetterMap(); +public class EntityDefinition implements Writable { + private static final Logger LOG = LoggerFactory.getLogger(EntityDefinition.class); + + private Class entityClass; + private String table; + private String columnFamily; + // TODO prefix be within search/get condition instead of entity definition. Topology entity should have pre-defined prefix. + private String prefix; + private String service; + private String serviceCreationPath; + private String serviceDeletionPath; + private String[] partitions; + private Map displayNameMap = new HashMap(); + private Map qualifierNameMap = new HashMap(); + private Map qualifierGetterMap = new HashMap(); + private String[] tags; + private boolean isTimeSeries; + private MetricDefinition metricDefinition; + private IndexDefinition[] indexes; + + + public EntityDefinition() { + } + + public MetricDefinition getMetricDefinition() { + return metricDefinition; + } + + public void setMetricDefinition(MetricDefinition metricDefinition) { + this.metricDefinition = metricDefinition; + } + + public boolean isTimeSeries() { + return isTimeSeries; + } + + public void setTimeSeries(boolean isTimeSeries) { + this.isTimeSeries = isTimeSeries; + } + + public String getColumnFamily() { + return columnFamily; + } + + public void setColumnFamily(String columnFamily) { + this.columnFamily = columnFamily; + } + + public Class getEntityClass() { + return entityClass; + } + + public void setEntityClass(Class entityClass) { + this.entityClass = entityClass; + } + + public String getTable() { + return table; + } + + public void setTable(String table) { + this.table = table; + } + + public Map getDisplayNameMap() { + return displayNameMap; + } + + public void setDisplayNameMap(Map displayNameMap) { + this.displayNameMap = displayNameMap; + } + + public Map getQualifierNameMap() { + return qualifierNameMap; + } + + public void setQualifierNameMap(Map qualifierNameMap) { + this.qualifierNameMap = qualifierNameMap; + } + + public String getPrefix() { + return prefix; + } + + public void setPrefix(String prefix) { + this.prefix = prefix; + } + + public String getService() { + return service; + } + + public void setService(String service) { + this.service = service; + } + + public String getServiceCreationPath() { + return serviceCreationPath; + } + + public void setServiceCreationPath(String serviceCreationPath) { + this.serviceCreationPath = serviceCreationPath; + } + + public String getServiceDeletionPath() { + return serviceDeletionPath; + } + + public void setServiceDeletionPath(String serviceDeletionPath) { + this.serviceDeletionPath = serviceDeletionPath; + } + + public String[] getPartitions() { + return partitions; + } + + public void setPartitions(String[] partitions) { + this.partitions = partitions; + } + + public IndexDefinition[] getIndexes() { + return indexes; + } + + public void setIndexes(IndexDefinition[] indexes) { + this.indexes = indexes; + } + + public Map getQualifierGetterMap() { + return qualifierGetterMap; + } + + public void setQualifierGetterMap(Map qualifierGetterMap) { + this.qualifierGetterMap = qualifierGetterMap; + } + + public String[] getTags() { + return tags; + } + + public void setTags(String[] tags) { + this.tags = tags; + } + + /* + public Map getQualifierDisplayNameMap(){ + Map qualifierDisplayNameMap = new HashMap(); + for(Map.Entry entry: qualifierNameMap.entrySet()){ + qualifierDisplayNameMap.put(entry.getKey(),entry.getValue().getDisplayName()); + } + return qualifierDisplayNameMap; + } + */ + + /** + * a filed is a tag when this field is neither in qualifierNameMap nor in displayNameMap. + * + * @param field + * @return + */ + public boolean isTag(String field) { + return (qualifierNameMap.get(field) == null && displayNameMap.get(field) == null); + } + + /** + * Check if the specified field is a partition tag field. + */ + public boolean isPartitionTag(String field) { + if (partitions == null || (!isTag(field))) { + return false; + } + for (String partition : partitions) { + if (partition.equals(field)) { + return true; + } + } + return false; + + } + + public Object getValue(TaggedLogAPIEntity entity, String field) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException { + if (!entityClass.equals(entity.getClass())) { + if ((entityClass.equals(GenericMetricEntity.class) && entity.getClass().equals(GenericMetricShadowEntity.class))) { + GenericMetricShadowEntity e = (GenericMetricShadowEntity) entity; + return e.getValue(); + } else { + throw new IllegalArgumentException("Invalid entity type: " + entity.getClass().getSimpleName()); + } + } + final Method m = qualifierGetterMap.get(field); + if (m == null) { + // The field is a tag + if (entity.getTags() != null) { + return entity.getTags().get(field); + } + } + if (m != null) { + return m.invoke(entity); + } + return null; + } + + + @Override + public void write(DataOutput out) throws IOException { + out.writeUTF(entityClass.getName()); + out.writeUTF(table); + out.writeUTF(columnFamily); + out.writeUTF(prefix); + out.writeUTF(service); + + int partitionsLen = 0; + if (partitions != null) { + partitionsLen = partitions.length; + } + out.writeInt(partitionsLen); + for (int i = 0; i < partitionsLen; i++) { + out.writeUTF(partitions[i]); + } + + int displayNameMapSize = displayNameMap.size(); + out.writeInt(displayNameMapSize); + for (Map.Entry entry : displayNameMap.entrySet()) { + out.writeUTF(entry.getKey()); + entry.getValue().write(out); + } + + int qualifierNameMapSize = qualifierNameMap.size(); + out.writeInt(qualifierNameMapSize); + for (Map.Entry entry : qualifierNameMap.entrySet()) { + out.writeUTF(entry.getKey()); + entry.getValue().write(out); + } + + // TODO: write qualifierGetterMap + out.writeBoolean(isTimeSeries); + + boolean hasMetricDefinition = metricDefinition != null; + out.writeBoolean(hasMetricDefinition); + if (hasMetricDefinition) { + // write MetricDefinition + metricDefinition.write(out); + } + + // TODO: write indexes + } + + + public void setEntityDefinition(EntityDefinition ed) { + this.entityClass = ed.getEntityClass(); + this.table = ed.getTable(); + this.columnFamily = ed.getColumnFamily(); + this.prefix = ed.getPrefix(); + this.service = ed.getService(); + this.partitions = ed.getPartitions(); + this.displayNameMap = ed.getDisplayNameMap(); + this.qualifierGetterMap = ed.getQualifierGetterMap(); this.qualifierNameMap = ed.getQualifierNameMap(); - this.isTimeSeries = ed.isTimeSeries(); - this.metricDefinition = ed.metricDefinition; - this.indexes = ed.getIndexes(); - } - - ////////////////////////////////////////////// - // TODO: Cache object for reading in region side - ////////////////////////////////////////////// - // private final static Map _classEntityDefinitionCache = new HashMap(); - - @Override - public void readFields(DataInput in) throws IOException { - String entityClassName = in.readUTF(); -// EntityDefinition _cached = _classEntityDefinitionCache.get(entityClassName); -// if(_cached !=null){ -// setEntityDefinition(_cached); -// LOG.info("Got cached definition for entity: "+entityClassName); -// return; -// } - if(LOG.isDebugEnabled()) LOG.debug("Reading EntityDefinition entity: "+entityClassName); - try { - entityClass = (Class) Class.forName(entityClassName); - } catch (Exception e) { - // ignore - } - table = in.readUTF(); - columnFamily = in.readUTF(); - prefix = in.readUTF(); - service = in.readUTF(); - - int partitionsLen = in.readInt(); - partitions = new String[partitionsLen]; - for (int i = 0; i < partitionsLen; i++) { - partitions[i] = in.readUTF(); - } - int displayNameMapSize = in.readInt(); - for(int i=0;i _classEntityDefinitionCache = new HashMap(); + + @Override + public void readFields(DataInput in) throws IOException { + String entityClassName = in.readUTF(); + //EntityDefinition _cached = _classEntityDefinitionCache.get(entityClassName); + //if(_cached !=null){ + //setEntityDefinition(_cached); + //LOG.info("Got cached definition for entity: "+entityClassName); + //return; + //} + if (LOG.isDebugEnabled()) { + LOG.debug("Reading EntityDefinition entity: " + entityClassName); + } + try { + entityClass = (Class) Class.forName(entityClassName); + } catch (Exception e) { + // ignore + } + table = in.readUTF(); + columnFamily = in.readUTF(); + prefix = in.readUTF(); + service = in.readUTF(); + + int partitionsLen = in.readInt(); + partitions = new String[partitionsLen]; + for (int i = 0; i < partitionsLen; i++) { + partitions[i] = in.readUTF(); + } + int displayNameMapSize = in.readInt(); + for (int i = 0; i < displayNameMapSize; i++) { + String key = in.readUTF(); + Qualifier value = new Qualifier(); + value.readFields(in); + displayNameMap.put(key, value); + } + int qualifierNameMapSize = in.readInt(); + for (int i = 0; i < qualifierNameMapSize; i++) { + String key = in.readUTF(); + Qualifier value = new Qualifier(); + value.readFields(in); + qualifierNameMap.put(key, value); + } + // TODO: readFields qualifierGetterMap + isTimeSeries = in.readBoolean(); + + // readFields MetricDefinition + boolean hasMetricDefinition = in.readBoolean(); + if (hasMetricDefinition) { + if (LOG.isDebugEnabled()) { + LOG.debug("reading metricDefinition"); + } + metricDefinition = new MetricDefinition(); + metricDefinition.readFields(in); + } + // TODO: readFields indexes + // _classEntityDefinitionCache.put(entityClassName,this); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinitionManager.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinitionManager.java index 7b1010dff0..183160dc1b 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinitionManager.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinitionManager.java @@ -32,443 +32,456 @@ import java.util.concurrent.ConcurrentHashMap; /** - * static initialization of all registered entities. As of now, dynamic registration is not supported + * static initialization of all registered entities. As of now, dynamic registration is not supported. */ public class EntityDefinitionManager { - private static final Logger LOG = LoggerFactory.getLogger(EntityDefinitionManager.class); - private static volatile boolean initialized = false; - /** - * using concurrent hashmap is due to the fact that entity can be registered any time from any thread - */ - private static Map entityServiceMap = new ConcurrentHashMap(); - private static Map, EntityDefinition> classMap = new ConcurrentHashMap, EntityDefinition>(); - private static Map, EntitySerDeser> _serDeserMap = new ConcurrentHashMap, EntitySerDeser>(); - private static Map, Integer> _serDeserClassIDMap = new ConcurrentHashMap, Integer>(); - private static Map> _serIDDeserClassMap = new ConcurrentHashMap>(); - private static Map> entityPrefixMap = new ConcurrentHashMap>(); - private static Map> indexPrefixMap = new ConcurrentHashMap>(); - - static{ - int id = 0; - _serDeserMap.put(NullObject.class, new NullSerDeser()); - _serIDDeserClassMap.put(id, NullObject.class); - _serDeserClassIDMap.put(NullObject.class, id++); - - _serDeserMap.put(String.class, new StringSerDeser()); - _serIDDeserClassMap.put(id, String.class); - _serDeserClassIDMap.put(String.class, id++); - - _serDeserMap.put(long.class, new LongSerDeser()); - _serIDDeserClassMap.put(id, long.class); - _serDeserClassIDMap.put(long.class, id++); - - _serDeserMap.put(Long.class, new LongSerDeser()); - _serIDDeserClassMap.put(id, Long.class); - _serDeserClassIDMap.put(Long.class, id++); - - _serDeserMap.put(int.class, new IntSerDeser()); - _serIDDeserClassMap.put(id, int.class); - _serDeserClassIDMap.put(int.class, id++); - - _serDeserMap.put(Integer.class, new IntSerDeser()); - _serIDDeserClassMap.put(id, Integer.class); - _serDeserClassIDMap.put(Integer.class, id++); - - _serDeserMap.put(Double.class, new DoubleSerDeser()); - _serIDDeserClassMap.put(id, Double.class); - _serDeserClassIDMap.put(Double.class, id++); - - _serDeserMap.put(double.class, new DoubleSerDeser()); - _serIDDeserClassMap.put(id, double.class); - _serDeserClassIDMap.put(double.class, id++); - - _serDeserMap.put(int[].class, new IntArraySerDeser()); - _serIDDeserClassMap.put(id, int[].class); - _serDeserClassIDMap.put(int[].class, id++); - - _serDeserMap.put(double[].class, new DoubleArraySerDeser()); - _serIDDeserClassMap.put(id, double[].class); - _serDeserClassIDMap.put(double[].class, id++); - - _serDeserMap.put(double[][].class, new Double2DArraySerDeser()); - _serIDDeserClassMap.put(id, double[][].class); - _serDeserClassIDMap.put(double[][].class, id++); - - _serDeserMap.put(Boolean.class, new BooleanSerDeser()); - _serIDDeserClassMap.put(id, Boolean.class); - _serDeserClassIDMap.put(Boolean.class, id++); - - _serDeserMap.put(boolean.class, new BooleanSerDeser()); - _serIDDeserClassMap.put(id, boolean.class); - _serDeserClassIDMap.put(boolean.class, id++); - - _serDeserMap.put(String[].class, new StringArraySerDeser()); - _serIDDeserClassMap.put(id, String[].class); - _serDeserClassIDMap.put(String[].class, id++); - - _serDeserMap.put(Map.class, new MapSerDeser()); - _serIDDeserClassMap.put(id, Map.class); - _serDeserClassIDMap.put(Map.class, id++); - - _serDeserMap.put(List.class, new ListSerDeser()); - _serIDDeserClassMap.put(id, List.class); - _serDeserClassIDMap.put(List.class, id++); - } - - - - @SuppressWarnings("rawtypes") - public static EntitySerDeser getSerDeser(Class clazz){ - return _serDeserMap.get(clazz); - } - - /** - * Get internal ID by the predefined registered class - * @param clazz original for serialization/deserialization - * @return the internal id if the input class has been registered, otherwise return -1 - */ - public static int getIDBySerDerClass(Class clazz) { - final Integer id = _serDeserClassIDMap.get(clazz); - if (id == null) { - return -1; - } - return id; - } - - - /** - * Get the predefined registered class by internal ID - * @param id the internal class ID - * @return the predefined registered class, if the class hasn't been registered, return null - */ - public static Class getClassByID(int id) { - return _serIDDeserClassMap.get(id); - } - - /** - * it is allowed that user can register their own entity - * @param clazz entity class - * @throws IllegalArgumentException - */ - public static void registerEntity(Class clazz) throws IllegalArgumentException{ - registerEntity(createEntityDefinition(clazz)); - } - - /** - * it is allowed that user can register their own entity - * @deprecated This API is deprecated since we need to use Service annotation to define service name for entities - * @param serviceName entity service name - * @param clazz entity class - * @throws IllegalArgumentException - * - */ + private static final Logger LOG = LoggerFactory.getLogger(EntityDefinitionManager.class); + private static volatile boolean initialized = false; + /** + * using concurrent hashmap is due to the fact that entity can be registered any time from any thread. + */ + private static Map entityServiceMap = new ConcurrentHashMap(); + private static Map, EntityDefinition> classMap = new ConcurrentHashMap, EntityDefinition>(); + private static Map, EntitySerDeser> _serDeserMap = new ConcurrentHashMap, EntitySerDeser>(); + private static Map, Integer> _serDeserClassIDMap = new ConcurrentHashMap, Integer>(); + private static Map> _serIDDeserClassMap = new ConcurrentHashMap>(); + private static Map> entityPrefixMap = new ConcurrentHashMap>(); + private static Map> indexPrefixMap = new ConcurrentHashMap>(); + + static { + int id = 0; + _serDeserMap.put(NullObject.class, new NullSerDeser()); + _serIDDeserClassMap.put(id, NullObject.class); + _serDeserClassIDMap.put(NullObject.class, id++); + + _serDeserMap.put(String.class, new StringSerDeser()); + _serIDDeserClassMap.put(id, String.class); + _serDeserClassIDMap.put(String.class, id++); + + _serDeserMap.put(long.class, new LongSerDeser()); + _serIDDeserClassMap.put(id, long.class); + _serDeserClassIDMap.put(long.class, id++); + + _serDeserMap.put(Long.class, new LongSerDeser()); + _serIDDeserClassMap.put(id, Long.class); + _serDeserClassIDMap.put(Long.class, id++); + + _serDeserMap.put(int.class, new IntSerDeser()); + _serIDDeserClassMap.put(id, int.class); + _serDeserClassIDMap.put(int.class, id++); + + _serDeserMap.put(Integer.class, new IntSerDeser()); + _serIDDeserClassMap.put(id, Integer.class); + _serDeserClassIDMap.put(Integer.class, id++); + + _serDeserMap.put(Double.class, new DoubleSerDeser()); + _serIDDeserClassMap.put(id, Double.class); + _serDeserClassIDMap.put(Double.class, id++); + + _serDeserMap.put(double.class, new DoubleSerDeser()); + _serIDDeserClassMap.put(id, double.class); + _serDeserClassIDMap.put(double.class, id++); + + _serDeserMap.put(int[].class, new IntArraySerDeser()); + _serIDDeserClassMap.put(id, int[].class); + _serDeserClassIDMap.put(int[].class, id++); + + _serDeserMap.put(double[].class, new DoubleArraySerDeser()); + _serIDDeserClassMap.put(id, double[].class); + _serDeserClassIDMap.put(double[].class, id++); + + _serDeserMap.put(double[][].class, new Double2DArraySerDeser()); + _serIDDeserClassMap.put(id, double[][].class); + _serDeserClassIDMap.put(double[][].class, id++); + + _serDeserMap.put(Boolean.class, new BooleanSerDeser()); + _serIDDeserClassMap.put(id, Boolean.class); + _serDeserClassIDMap.put(Boolean.class, id++); + + _serDeserMap.put(boolean.class, new BooleanSerDeser()); + _serIDDeserClassMap.put(id, boolean.class); + _serDeserClassIDMap.put(boolean.class, id++); + + _serDeserMap.put(String[].class, new StringArraySerDeser()); + _serIDDeserClassMap.put(id, String[].class); + _serDeserClassIDMap.put(String[].class, id++); + + _serDeserMap.put(Map.class, new MapSerDeser()); + _serIDDeserClassMap.put(id, Map.class); + _serDeserClassIDMap.put(Map.class, id++); + + _serDeserMap.put(List.class, new ListSerDeser()); + _serIDDeserClassMap.put(id, List.class); + _serDeserClassIDMap.put(List.class, id++); + } + + + @SuppressWarnings("rawtypes") + public static EntitySerDeser getSerDeser(Class clazz) { + return _serDeserMap.get(clazz); + } + + /** + * Get internal ID by the predefined registered class. + * + * @param clazz original for serialization/deserialization + * @return the internal id if the input class has been registered, otherwise return -1 + */ + public static int getIDBySerDerClass(Class clazz) { + final Integer id = _serDeserClassIDMap.get(clazz); + if (id == null) { + return -1; + } + return id; + } + + + /** + * Get the predefined registered class by internal ID. + * + * @param id the internal class ID + * @return the predefined registered class, if the class hasn't been registered, return null + */ + public static Class getClassByID(int id) { + return _serIDDeserClassMap.get(id); + } + + /** + * it is allowed that user can register their own entity. + * + * @param clazz entity class + * @throws IllegalArgumentException + */ + public static void registerEntity(Class clazz) throws IllegalArgumentException { + registerEntity(createEntityDefinition(clazz)); + } + + /** + * it is allowed that user can register their own entity. + * + * @param serviceName entity service name + * @param clazz entity class + * @throws IllegalArgumentException + * @deprecated This API is deprecated since we need to use Service annotation to define service name for entities + */ @Deprecated - public static void registerEntity(String serviceName, Class clazz) throws IllegalArgumentException{ - registerEntity(serviceName, createEntityDefinition(clazz)); - } - - /** - * it is allowed that user can register their own entity definition - * @param entityDef entity definition - * @throws IllegalArgumentException - */ - public static void registerEntity(EntityDefinition entityDef) { - registerEntity(entityDef.getService(), entityDef); - } - - /** - * it is allowed that user can register their own entity definition - * @deprecated This API is deprecated since we need to use Service annotation to define service name for entities. - * - * @param entityDef entity definition - * @throws IllegalArgumentException - */ - public static void registerEntity(String serviceName, EntityDefinition entityDef) { - final String table = entityDef.getTable(); - if (entityServiceMap.containsKey(serviceName)) { - final EntityDefinition existing = entityServiceMap.get(serviceName); - if (entityDef.getClass().equals(existing.getClass())) { - return; - } - throw new IllegalArgumentException("Service " + serviceName + " has already been registered by " + existing.getClass().getName() + ", so class " + entityDef.getClass() + " can NOT be registered"); - } - synchronized (EntityDefinitionManager.class) { - checkPrefix(entityDef); - entityServiceMap.put(serviceName, entityDef); - Map entityHashMap = entityPrefixMap.get(table); - if (entityHashMap == null) { - entityHashMap = new ConcurrentHashMap(); - entityPrefixMap.put(table, entityHashMap); - } - entityHashMap.put(entityDef.getPrefix().hashCode(), entityDef); - final IndexDefinition[] indexes = entityDef.getIndexes(); - if (indexes != null) { - for (IndexDefinition index : indexes) { - Map indexHashMap = indexPrefixMap.get(table); - if (indexHashMap == null) { - indexHashMap = new ConcurrentHashMap(); - indexPrefixMap.put(table, indexHashMap); - } - indexHashMap.put(index.getIndexPrefix().hashCode(), index); - } - } - classMap.put(entityDef.getEntityClass(), entityDef); - } - if(LOG.isDebugEnabled()) { - LOG.debug(entityDef.getEntityClass().getSimpleName() + " entity registered successfully, table name: " + entityDef.getTable() + - ", prefix: " + entityDef.getPrefix() + ", service: " + serviceName + ", CF: " + entityDef.getColumnFamily()); - }else{ + public static void registerEntity(String serviceName, Class clazz) throws IllegalArgumentException { + registerEntity(serviceName, createEntityDefinition(clazz)); + } + + /** + * it is allowed that user can register their own entity definition. + * + * @param entityDef entity definition + * @throws IllegalArgumentException + */ + public static void registerEntity(EntityDefinition entityDef) { + registerEntity(entityDef.getService(), entityDef); + } + + /** + * it is allowed that user can register their own entity definition. + * + * @param entityDef entity definition + * @throws IllegalArgumentException + * @deprecated This API is deprecated since we need to use Service annotation to define service name for entities. + */ + public static void registerEntity(String serviceName, EntityDefinition entityDef) { + final String table = entityDef.getTable(); + if (entityServiceMap.containsKey(serviceName)) { + final EntityDefinition existing = entityServiceMap.get(serviceName); + if (entityDef.getClass().equals(existing.getClass())) { + return; + } + throw new IllegalArgumentException("Service " + serviceName + " has already been registered by " + existing.getClass().getName() + ", so class " + entityDef.getClass() + " can NOT be " + + "registered"); + } + synchronized (EntityDefinitionManager.class) { + checkPrefix(entityDef); + entityServiceMap.put(serviceName, entityDef); + Map entityHashMap = entityPrefixMap.get(table); + if (entityHashMap == null) { + entityHashMap = new ConcurrentHashMap(); + entityPrefixMap.put(table, entityHashMap); + } + entityHashMap.put(entityDef.getPrefix().hashCode(), entityDef); + final IndexDefinition[] indexes = entityDef.getIndexes(); + if (indexes != null) { + for (IndexDefinition index : indexes) { + Map indexHashMap = indexPrefixMap.get(table); + if (indexHashMap == null) { + indexHashMap = new ConcurrentHashMap(); + indexPrefixMap.put(table, indexHashMap); + } + indexHashMap.put(index.getIndexPrefix().hashCode(), index); + } + } + classMap.put(entityDef.getEntityClass(), entityDef); + } + if (LOG.isDebugEnabled()) { + LOG.debug(entityDef.getEntityClass().getSimpleName() + " entity registered successfully, table name: " + entityDef.getTable() + + ", prefix: " + entityDef.getPrefix() + ", service: " + serviceName + ", CF: " + entityDef.getColumnFamily()); + } else { LOG.info(String.format("Registered %s (%s)", entityDef.getEntityClass().getSimpleName(), serviceName)); } - } - - private static void checkPrefix(EntityDefinition entityDef) { - final Integer entityPrefixHashcode = entityDef.getPrefix().hashCode(); - if (entityPrefixMap.containsKey(entityDef.getTable())) { - final Map entityHashMap = entityPrefixMap.get(entityDef.getTable()); - if (entityHashMap.containsKey(entityPrefixHashcode) && (!entityDef.equals(entityHashMap.get(entityPrefixHashcode)))) { - throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass().getName() + ", because of the prefix hash code conflict! The entity prefix " + entityDef.getPrefix() + " has already been registered by entity service " + entityHashMap.get(entityPrefixHashcode).getService()); - } - final IndexDefinition[] indexes = entityDef.getIndexes(); - if (indexes != null) { - for (IndexDefinition index : indexes) { - final Integer indexPrefixHashcode = index.getIndexPrefix().hashCode(); - if (entityHashMap.containsKey(indexPrefixHashcode)) { - throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass().getName() + ", because of the prefix hash code conflict! The index prefix " + index.getIndexPrefix() + " has already been registered by entity " + entityHashMap.get(indexPrefixHashcode).getService()); - } - final Map indexHashMap = indexPrefixMap.get(entityDef.getTable()); - if (indexHashMap != null && indexHashMap.containsKey(indexPrefixHashcode) && (!index.equals(indexHashMap.get(indexPrefixHashcode)))) { - throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass().getName() + ", because of the prefix hash code conflict! The index prefix " + index.getIndexPrefix() + " has already been registered by entity " + indexHashMap.get(indexPrefixHashcode).getEntityDefinition().getService()); - } - } - } - } - } - - /** - * Get entity definition by name - * @param serviceName - * @return - * @throws IllegalAccessException - * @throws InstantiationException - */ - public static EntityDefinition getEntityByServiceName(String serviceName) throws InstantiationException, IllegalAccessException{ - checkInit(); - return entityServiceMap.get(serviceName); - } - - public static EntityDefinition getEntityDefinitionByEntityClass(Class clazz) throws InstantiationException, IllegalAccessException { - checkInit(); - return classMap.get(clazz); - } - - private static void checkInit() throws InstantiationException, IllegalAccessException { - if (!initialized) { - synchronized (EntityDefinitionManager.class) { - if (!initialized) { - EntityRepositoryScanner.scan(); - initialized = true; - } - } - } - } - - public static void load() throws IllegalAccessException, InstantiationException { - checkInit(); - } - - /** - * User can register their own field SerDeser - * @param clazz class of the the SerDeser - * @param entitySerDeser entity or field SerDeser - * @throws IllegalArgumentException - */ - public static void registerSerDeser(Class clazz, EntitySerDeser entitySerDeser) { - _serDeserMap.put(clazz, entitySerDeser); - } - - /** - * Check whether the entity class is time series, false by default - * @param clazz - * @return - */ - public static boolean isTimeSeries(Class clazz){ - TimeSeries ts = clazz.getAnnotation(TimeSeries.class); - return ts != null && ts.value(); - } - - @SuppressWarnings("unchecked") - public static EntityDefinition createEntityDefinition(Class cls) { - - final EntityDefinition ed = new EntityDefinition(); - - ed.setEntityClass(cls); - // parse cls' annotations - Table table = cls.getAnnotation(Table.class); - if(table == null || table.value().isEmpty()){ - throw new IllegalArgumentException("Entity class must have a non-empty table name annotated with @Table"); - } - String tableName = table.value(); - if(EagleConfigFactory.load().isTableNamePrefixedWithEnvironment()){ - tableName = EagleConfigFactory.load().getEnv() + "_" + tableName; - } - ed.setTable(tableName); - - ColumnFamily family = cls.getAnnotation(ColumnFamily.class); - if(family == null || family.value().isEmpty()){ - throw new IllegalArgumentException("Entity class must have a non-empty column family name annotated with @ColumnFamily"); - } - ed.setColumnFamily(family.value()); - - Prefix prefix = cls.getAnnotation(Prefix.class); - if(prefix == null || prefix.value().isEmpty()){ - throw new IllegalArgumentException("Entity class must have a non-empty prefix name annotated with @Prefix"); - } - ed.setPrefix(prefix.value()); - - TimeSeries ts = cls.getAnnotation(TimeSeries.class); - if(ts == null){ - throw new IllegalArgumentException("Entity class must have a non-empty timeseries name annotated with @TimeSeries"); - } - ed.setTimeSeries(ts.value()); - - Service service = cls.getAnnotation(Service.class); - if(service == null || service.value().isEmpty()){ - ed.setService(cls.getSimpleName()); - } else { - ed.setService(service.value()); - } - - Metric m = cls.getAnnotation(Metric.class); - Map> dynamicFieldTypes = new HashMap>(); - if(m != null){ - // metric has to be timeseries - if(!ts.value()){ - throw new IllegalArgumentException("Metric entity must be time series as well"); - } - MetricDefinition md = new MetricDefinition(); - md.setInterval(m.interval()); - ed.setMetricDefinition(md); - } - - java.lang.reflect.Field[] fields = cls.getDeclaredFields(); - for(java.lang.reflect.Field f : fields){ - Column column = f.getAnnotation(Column.class); - if(column == null || column.value().isEmpty()){ - continue; - } - Class fldCls = f.getType(); - // intrusive check field type for metric entity - checkFieldTypeForMetric(ed.getMetricDefinition(), f.getName(), fldCls, dynamicFieldTypes); - Qualifier q = new Qualifier(); - q.setDisplayName(f.getName()); - q.setQualifierName(column.value()); - EntitySerDeser serDeser = _serDeserMap.get(fldCls); - if(serDeser == null){ -// throw new IllegalArgumentException(fldCls.getName() + " in field " + f.getName() + -// " of entity " + cls.getSimpleName() + " has no serializer associated "); - serDeser = DefaultJavaObjctSerDeser.INSTANCE; - } - - q.setSerDeser((EntitySerDeser)serDeser); - ed.getQualifierNameMap().put(q.getQualifierName(), q); - ed.getDisplayNameMap().put(q.getDisplayName(), q); - // TODO: should refine rules, consider fields like "hCol", getter method should be gethCol() according to org.apache.commons.beanutils.PropertyUtils - final String propertyName = f.getName().substring(0,1).toUpperCase() + f.getName().substring(1); - String getterName = "get" + propertyName; - try { - Method method = cls.getMethod(getterName); - ed.getQualifierGetterMap().put(f.getName(), method); - } catch (Exception e) { - // Check if the type is boolean - getterName = "is" + propertyName; - try { - Method method = cls.getMethod(getterName); - ed.getQualifierGetterMap().put(f.getName(), method); - } catch (Exception e1) { - throw new IllegalArgumentException("Field " + f.getName() + " hasn't defined valid getter method: " + getterName, e); - } - } - if(LOG.isDebugEnabled()) LOG.debug("Field registered " + q); - } - - // TODO: Lazy create because not used at all - // dynamically create bean class - if(ed.getMetricDefinition() != null){ - Class metricCls = createDynamicClassForMetric(cls.getName()+"_SingleTimestamp", dynamicFieldTypes); - ed.getMetricDefinition().setSingleTimestampEntityClass(metricCls); - } - - final Partition partition = cls.getAnnotation(Partition.class); - if (partition != null) { - final String[] partitions = partition.value(); - ed.setPartitions(partitions); - // Check if partition fields are all tag fields. Partition field can't be column field, must be tag field. - for (String part : partitions) { - if (!ed.isTag(part)) { - throw new IllegalArgumentException("Partition field can't be column field, must be tag field. " - + "Partition name: " + part); - } - } - } - - final Indexes indexes = cls.getAnnotation(Indexes.class); - if (indexes != null) { - final Index[] inds = indexes.value(); - final IndexDefinition[] indexDefinitions = new IndexDefinition[inds.length]; - for (int i = 0; i < inds.length; ++i) { - final Index ind = inds[i]; - indexDefinitions[i] = new IndexDefinition(ed, ind); - } - ed.setIndexes(indexDefinitions); - } - - final ServicePath path = cls.getAnnotation(ServicePath.class); - if (path != null) { - if (path.path() != null && (!path.path().isEmpty())) { - ed.setServiceCreationPath(path.path()); - } - } - - final Tags tags = cls.getAnnotation(Tags.class); - if(tags != null) { - String[] tagNames = tags.value(); - ed.setTags(tagNames); - } - - return ed; - } - - private static void checkFieldTypeForMetric(MetricDefinition md, String fieldName, Object fldCls, Map> dynamicFieldTypes){ - if(md != null){ - if(fldCls.equals(int[].class)){ - dynamicFieldTypes.put(fieldName, int.class); - return; - }else if(fldCls.equals(long[].class)){ - dynamicFieldTypes.put(fieldName, long.class); - return; - }else if(fldCls.equals(double[].class)){ - dynamicFieldTypes.put(fieldName, double.class); - return; - } - throw new IllegalArgumentException("Fields for metric entity must be one of int[], long[] or double[]"); - } - } - - private static Class createDynamicClassForMetric(final String className, Map> dynamicFieldTypes){ - BeanGenerator beanGenerator = new BeanGenerator(); - beanGenerator.setNamingPolicy(new NamingPolicy(){ - @Override - public String getClassName(String prefix,String source, Object key, Predicate names){ - return className; - }}); - BeanGenerator.addProperties(beanGenerator, dynamicFieldTypes); - beanGenerator.setSuperclass(TaggedLogAPIEntity.class); - return (Class) beanGenerator.createClass(); - } - - public static Map entities() throws Exception{ - checkInit(); - return entityServiceMap; - } + } + + private static void checkPrefix(EntityDefinition entityDef) { + final Integer entityPrefixHashcode = entityDef.getPrefix().hashCode(); + if (entityPrefixMap.containsKey(entityDef.getTable())) { + final Map entityHashMap = entityPrefixMap.get(entityDef.getTable()); + if (entityHashMap.containsKey(entityPrefixHashcode) && (!entityDef.equals(entityHashMap.get(entityPrefixHashcode)))) { + throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass().getName() + ", because of the prefix hash code conflict! The entity prefix " + entityDef + .getPrefix() + " has already been registered by entity service " + entityHashMap.get(entityPrefixHashcode).getService()); + } + final IndexDefinition[] indexes = entityDef.getIndexes(); + if (indexes != null) { + for (IndexDefinition index : indexes) { + final Integer indexPrefixHashcode = index.getIndexPrefix().hashCode(); + if (entityHashMap.containsKey(indexPrefixHashcode)) { + throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass().getName() + ", because of the prefix hash code conflict! The index prefix " + index + .getIndexPrefix() + " has already been registered by entity " + entityHashMap.get(indexPrefixHashcode).getService()); + } + final Map indexHashMap = indexPrefixMap.get(entityDef.getTable()); + if (indexHashMap != null && indexHashMap.containsKey(indexPrefixHashcode) && (!index.equals(indexHashMap.get(indexPrefixHashcode)))) { + throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass().getName() + ", because of the prefix hash code conflict! The index prefix " + index + .getIndexPrefix() + " has already been registered by entity " + indexHashMap.get(indexPrefixHashcode).getEntityDefinition().getService()); + } + } + } + } + } + + /** + * Get entity definition by name. + * + * @param serviceName + * @return + * @throws IllegalAccessException + * @throws InstantiationException + */ + public static EntityDefinition getEntityByServiceName(String serviceName) throws InstantiationException, IllegalAccessException { + checkInit(); + return entityServiceMap.get(serviceName); + } + + public static EntityDefinition getEntityDefinitionByEntityClass(Class clazz) throws InstantiationException, IllegalAccessException { + checkInit(); + return classMap.get(clazz); + } + + private static void checkInit() throws InstantiationException, IllegalAccessException { + if (!initialized) { + synchronized (EntityDefinitionManager.class) { + if (!initialized) { + EntityRepositoryScanner.scan(); + initialized = true; + } + } + } + } + + public static void load() throws IllegalAccessException, InstantiationException { + checkInit(); + } + + /** + * User can register their own field SerDeser. + * + * @param clazz class of the the SerDeser + * @param entitySerDeser entity or field SerDeser + * @throws IllegalArgumentException + */ + public static void registerSerDeser(Class clazz, EntitySerDeser entitySerDeser) { + _serDeserMap.put(clazz, entitySerDeser); + } + + /** + * Check whether the entity class is time series, false by default. + * + * @param clazz + * @return + */ + public static boolean isTimeSeries(Class clazz) { + TimeSeries ts = clazz.getAnnotation(TimeSeries.class); + return ts != null && ts.value(); + } + + @SuppressWarnings("unchecked") + public static EntityDefinition createEntityDefinition(Class cls) { + + final EntityDefinition ed = new EntityDefinition(); + + ed.setEntityClass(cls); + // parse cls' annotations + Table table = cls.getAnnotation(Table.class); + if (table == null || table.value().isEmpty()) { + throw new IllegalArgumentException("Entity class must have a non-empty table name annotated with @Table"); + } + String tableName = table.value(); + if (EagleConfigFactory.load().isTableNamePrefixedWithEnvironment()) { + tableName = EagleConfigFactory.load().getEnv() + "_" + tableName; + } + ed.setTable(tableName); + + ColumnFamily family = cls.getAnnotation(ColumnFamily.class); + if (family == null || family.value().isEmpty()) { + throw new IllegalArgumentException("Entity class must have a non-empty column family name annotated with @ColumnFamily"); + } + ed.setColumnFamily(family.value()); + + Prefix prefix = cls.getAnnotation(Prefix.class); + if (prefix == null || prefix.value().isEmpty()) { + throw new IllegalArgumentException("Entity class must have a non-empty prefix name annotated with @Prefix"); + } + ed.setPrefix(prefix.value()); + + TimeSeries ts = cls.getAnnotation(TimeSeries.class); + if (ts == null) { + throw new IllegalArgumentException("Entity class must have a non-empty timeseries name annotated with @TimeSeries"); + } + ed.setTimeSeries(ts.value()); + + Service service = cls.getAnnotation(Service.class); + if (service == null || service.value().isEmpty()) { + ed.setService(cls.getSimpleName()); + } else { + ed.setService(service.value()); + } + + Metric m = cls.getAnnotation(Metric.class); + Map> dynamicFieldTypes = new HashMap>(); + if (m != null) { + // metric has to be timeseries + if (!ts.value()) { + throw new IllegalArgumentException("Metric entity must be time series as well"); + } + MetricDefinition md = new MetricDefinition(); + md.setInterval(m.interval()); + ed.setMetricDefinition(md); + } + + java.lang.reflect.Field[] fields = cls.getDeclaredFields(); + for (java.lang.reflect.Field f : fields) { + Column column = f.getAnnotation(Column.class); + if (column == null || column.value().isEmpty()) { + continue; + } + Class fldCls = f.getType(); + // intrusive check field type for metric entity + checkFieldTypeForMetric(ed.getMetricDefinition(), f.getName(), fldCls, dynamicFieldTypes); + Qualifier q = new Qualifier(); + q.setDisplayName(f.getName()); + q.setQualifierName(column.value()); + EntitySerDeser serDeser = _serDeserMap.get(fldCls); + if (serDeser == null) { + //throw new IllegalArgumentException(fldCls.getName() + " in field " + f.getName() + + //" of entity " + cls.getSimpleName() + " has no serializer associated "); + serDeser = DefaultJavaObjctSerDeser.INSTANCE; + } + + q.setSerDeser((EntitySerDeser) serDeser); + ed.getQualifierNameMap().put(q.getQualifierName(), q); + ed.getDisplayNameMap().put(q.getDisplayName(), q); + // TODO: should refine rules, consider fields like "hCol", getter method should be gethCol() according to org.apache.commons.beanutils.PropertyUtils + final String propertyName = f.getName().substring(0, 1).toUpperCase() + f.getName().substring(1); + String getterName = "get" + propertyName; + try { + Method method = cls.getMethod(getterName); + ed.getQualifierGetterMap().put(f.getName(), method); + } catch (Exception e) { + // Check if the type is boolean + getterName = "is" + propertyName; + try { + Method method = cls.getMethod(getterName); + ed.getQualifierGetterMap().put(f.getName(), method); + } catch (Exception e1) { + throw new IllegalArgumentException("Field " + f.getName() + " hasn't defined valid getter method: " + getterName, e); + } + } + if (LOG.isDebugEnabled()) { + LOG.debug("Field registered " + q); + } + } + + // TODO: Lazy create because not used at all + // dynamically create bean class + if (ed.getMetricDefinition() != null) { + Class metricCls = createDynamicClassForMetric(cls.getName() + "_SingleTimestamp", dynamicFieldTypes); + ed.getMetricDefinition().setSingleTimestampEntityClass(metricCls); + } + + final Partition partition = cls.getAnnotation(Partition.class); + if (partition != null) { + final String[] partitions = partition.value(); + ed.setPartitions(partitions); + // Check if partition fields are all tag fields. Partition field can't be column field, must be tag field. + for (String part : partitions) { + if (!ed.isTag(part)) { + throw new IllegalArgumentException("Partition field can't be column field, must be tag field. " + + "Partition name: " + part); + } + } + } + + final Indexes indexes = cls.getAnnotation(Indexes.class); + if (indexes != null) { + final Index[] inds = indexes.value(); + final IndexDefinition[] indexDefinitions = new IndexDefinition[inds.length]; + for (int i = 0; i < inds.length; ++i) { + final Index ind = inds[i]; + indexDefinitions[i] = new IndexDefinition(ed, ind); + } + ed.setIndexes(indexDefinitions); + } + + final ServicePath path = cls.getAnnotation(ServicePath.class); + if (path != null) { + if (path.path() != null && (!path.path().isEmpty())) { + ed.setServiceCreationPath(path.path()); + } + } + + final Tags tags = cls.getAnnotation(Tags.class); + if (tags != null) { + String[] tagNames = tags.value(); + ed.setTags(tagNames); + } + + return ed; + } + + private static void checkFieldTypeForMetric(MetricDefinition md, String fieldName, Object fldCls, Map> dynamicFieldTypes) { + if (md != null) { + if (fldCls.equals(int[].class)) { + dynamicFieldTypes.put(fieldName, int.class); + return; + } else if (fldCls.equals(long[].class)) { + dynamicFieldTypes.put(fieldName, long.class); + return; + } else if (fldCls.equals(double[].class)) { + dynamicFieldTypes.put(fieldName, double.class); + return; + } + throw new IllegalArgumentException("Fields for metric entity must be one of int[], long[] or double[]"); + } + } + + private static Class createDynamicClassForMetric(final String className, Map> dynamicFieldTypes) { + BeanGenerator beanGenerator = new BeanGenerator(); + beanGenerator.setNamingPolicy(new NamingPolicy() { + @Override + public String getClassName(String prefix, String source, Object key, Predicate names) { + return className; + } + }); + BeanGenerator.addProperties(beanGenerator, dynamicFieldTypes); + beanGenerator.setSuperclass(TaggedLogAPIEntity.class); + return (Class) beanGenerator.createClass(); + } + + public static Map entities() throws Exception { + checkInit(); + return entityServiceMap; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeser.java index 25d55e02f4..08caeabd9d 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeser.java @@ -17,7 +17,9 @@ package org.apache.eagle.log.entity.meta; public interface EntitySerDeser { - public T deserialize(byte[] bytes); - public byte[] serialize(T t); - public Class type(); + public T deserialize(byte[] bytes); + + public byte[] serialize(T t); + + public Class type(); } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeserializer.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeserializer.java index a7ec4e4525..e90cac90c0 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeserializer.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeserializer.java @@ -16,8 +16,8 @@ */ package org.apache.eagle.log.entity.meta; -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.commons.beanutils.PropertyUtils; +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -26,54 +26,54 @@ import java.util.Map; public class EntitySerDeserializer { - private static final Logger LOG = LoggerFactory.getLogger(EntitySerDeserializer.class); - - // TODO throws seperate exceptions - @SuppressWarnings("unchecked") - public T readValue(Map qualifierValues, EntityDefinition ed) throws Exception{ - Class clazz = ed.getEntityClass(); - if(clazz == null){ - throw new NullPointerException("Entity class of service "+ed.getService()+" is null"); - } - TaggedLogAPIEntity obj = clazz.newInstance(); - Map map = ed.getQualifierNameMap(); - for(Map.Entry entry : qualifierValues.entrySet()){ - Qualifier q = map.get(entry.getKey()); - if(q == null){ - // if it's not pre-defined qualifier, it must be tag unless it's a bug - if(obj.getTags() == null){ - obj.setTags(new HashMap()); - } - obj.getTags().put(entry.getKey(), new StringSerDeser().deserialize(entry.getValue())); - continue; - } - - // TODO performance loss compared with new operator - // parse different types of qualifiers - String fieldName = q.getDisplayName(); - PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(obj, fieldName); - if(entry.getValue() != null){ - Object args = q.getSerDeser().deserialize(entry.getValue()); - pd.getWriteMethod().invoke(obj, args); -// if (logger.isDebugEnabled()) { -// logger.debug(entry.getKey() + ":" + args + " is deserialized"); -// } - } - } - return (T)obj; - } - - public Map writeValue(TaggedLogAPIEntity entity, EntityDefinition ed) throws Exception{ - Map qualifierValues = new HashMap(); - // iterate all modified qualifiers - for(String fieldName : entity.modifiedQualifiers()){ - PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(entity, fieldName); - Object obj = pd.getReadMethod().invoke(entity); - Qualifier q = ed.getDisplayNameMap().get(fieldName); - EntitySerDeser ser = q.getSerDeser(); - byte[] value = ser.serialize(obj); - qualifierValues.put(q.getQualifierName(), value); - } - return qualifierValues; - } + private static final Logger LOG = LoggerFactory.getLogger(EntitySerDeserializer.class); + + // TODO throws seperate exceptions + @SuppressWarnings("unchecked") + public T readValue(Map qualifierValues, EntityDefinition ed) throws Exception { + Class clazz = ed.getEntityClass(); + if (clazz == null) { + throw new NullPointerException("Entity class of service " + ed.getService() + " is null"); + } + TaggedLogAPIEntity obj = clazz.newInstance(); + Map map = ed.getQualifierNameMap(); + for (Map.Entry entry : qualifierValues.entrySet()) { + Qualifier q = map.get(entry.getKey()); + if (q == null) { + // if it's not pre-defined qualifier, it must be tag unless it's a bug + if (obj.getTags() == null) { + obj.setTags(new HashMap()); + } + obj.getTags().put(entry.getKey(), new StringSerDeser().deserialize(entry.getValue())); + continue; + } + + // TODO performance loss compared with new operator + // parse different types of qualifiers + String fieldName = q.getDisplayName(); + PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(obj, fieldName); + if (entry.getValue() != null) { + Object args = q.getSerDeser().deserialize(entry.getValue()); + pd.getWriteMethod().invoke(obj, args); + // if (logger.isDebugEnabled()) { + // logger.debug(entry.getKey() + ":" + args + " is deserialized"); + // } + } + } + return (T) obj; + } + + public Map writeValue(TaggedLogAPIEntity entity, EntityDefinition ed) throws Exception { + Map qualifierValues = new HashMap(); + // iterate all modified qualifiers + for (String fieldName : entity.modifiedQualifiers()) { + PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(entity, fieldName); + Object obj = pd.getReadMethod().invoke(entity); + Qualifier q = ed.getDisplayNameMap().get(fieldName); + EntitySerDeser ser = q.getSerDeser(); + byte[] value = ser.serialize(obj); + qualifierValues.put(q.getQualifierName(), value); + } + return qualifierValues; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Index.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Index.java index c7dc1137be..bfb288fd3c 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Index.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Index.java @@ -21,12 +21,14 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface Index { public String name(); + public String[] columns(); + public boolean unique(); -// boolean unique() default true; + // boolean unique() default true; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IndexDefinition.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IndexDefinition.java index 2e6242040c..92f83ff570 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IndexDefinition.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IndexDefinition.java @@ -16,6 +16,17 @@ */ package org.apache.eagle.log.entity.meta; +import org.apache.commons.beanutils.PropertyUtils; +import org.apache.eagle.common.ByteUtil; +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; +import org.apache.eagle.log.entity.RowkeyBuilder; +import org.apache.eagle.query.parser.ANDExpression; +import org.apache.eagle.query.parser.AtomicExpression; +import org.apache.eagle.query.parser.ComparisonOperator; +import org.apache.eagle.query.parser.ORExpression; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.beans.PropertyDescriptor; import java.lang.reflect.InvocationTargetException; import java.nio.charset.Charset; @@ -26,310 +37,298 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.apache.eagle.log.entity.RowkeyBuilder; -import org.apache.commons.beanutils.PropertyUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.apache.eagle.query.parser.ANDExpression; -import org.apache.eagle.query.parser.AtomicExpression; -import org.apache.eagle.query.parser.ComparisonOperator; -import org.apache.eagle.query.parser.ORExpression; -import org.apache.eagle.common.ByteUtil; - /** * Eagle index schema definition. - * - * 1. Index schema can be defined in entity class by annotation. + * + *

    1. Index schema can be defined in entity class by annotation. * 2. One index schema can contain multiple fields/tags, defined in order * 3. We only support immutable indexing for now * 4. When entity is created or deleted, the corresponding index entity should be created or deleted at the same time * 5. Index transparency to queries. Queries go through index when and only when index can serve all search conditions after query rewrite - * - * + *

    */ public class IndexDefinition { - - public enum IndexType { - UNIQUE_INDEX, - NON_CLUSTER_INDEX, - NON_INDEX - } - - private final EntityDefinition entityDef; - private final Index index; - private final IndexColumn[] columns; - private final String indexPrefix; - - private static final byte[] EMPTY_VALUE = new byte[0]; - private static final Charset UTF_8_CHARSET = Charset.forName("UTF-8"); - public static final int EMPTY_PARTITION_DEFAULT_HASH_CODE = 0; - public static final int MAX_INDEX_VALUE_BYTE_LENGTH = 65535; - - private static final String FIELD_NAME_PATTERN_STRING = "^@(.*)$"; - private static final Pattern FIELD_NAME_PATTERN = Pattern.compile(FIELD_NAME_PATTERN_STRING); - private final static Logger LOG = LoggerFactory.getLogger(IndexDefinition.class); - - public IndexDefinition(EntityDefinition entityDef, Index index) { - this.entityDef = entityDef; - this.index = index; - this.indexPrefix = entityDef.getPrefix() + "_" + index.name(); - final String[] indexColumns = index.columns(); - this.columns = new IndexColumn[indexColumns.length]; - for (int i = 0; i < indexColumns.length; ++i) { - final String name = indexColumns[i]; - final boolean isTag = entityDef.isTag(name); - final Qualifier qualifier = isTag ? null : entityDef.getDisplayNameMap().get(name); - columns[i] = new IndexColumn(name, isTag, qualifier); - } - LOG.info("Created index " + index.name() + " for " + entityDef.getEntityClass().getSimpleName()); - } - - public EntityDefinition getEntityDefinition() { - return entityDef; - } - - public Index getIndex() { - return index; - } - - public String getIndexName() { - return index.name(); - } - - public IndexColumn[] getIndexColumns() { - return columns; - } - - public String getIndexPrefix() { - return indexPrefix; - } - - public boolean isUnique() { - return index.unique(); - } - - /** - * Check if the query is suitable to go through index. If true, then return the value of index fields in order. Otherwise return null. - * TODO: currently index fields should be string type. - * - * @param query query expression after re-write - * @param rowkeys if the query can go through the index, all rowkeys will be added into rowkeys. - * @return true if the query can go through the index, otherwise return false - */ - public IndexType canGoThroughIndex(ORExpression query, List rowkeys) { - if (query == null || query.getANDExprList() == null || query.getANDExprList().isEmpty()) - return IndexType.NON_CLUSTER_INDEX; - if (rowkeys != null) { - rowkeys.clear(); - } - final Map indexfieldMap = new HashMap(); - for(ANDExpression andExpr : query.getANDExprList()) { - indexfieldMap.clear(); - for(AtomicExpression ae : andExpr.getAtomicExprList()) { - // TODO temporarily ignore those fields which are not for attributes - final String fieldName = parseEntityAttribute(ae.getKey()); - if(fieldName != null && ComparisonOperator.EQUAL.equals(ae.getOp())){ - indexfieldMap.put(fieldName, ae.getValue()); - } - } - final String[] partitions = entityDef.getPartitions(); - int[] partitionValueHashs = null; - if (partitions != null) { - partitionValueHashs = new int[partitions.length]; - for (int i = 0; i < partitions.length; ++i) { - final String value = indexfieldMap.get(partitions[i]); - if (value == null) { - throw new IllegalArgumentException("Partition " + partitions[i] + " is not defined in the query: " + query.toString()); - } - partitionValueHashs[i] = value.hashCode(); - } - } - final byte[][] indexFieldValues = new byte[columns.length][]; - for (int i = 0; i < columns.length; ++i) { - final IndexColumn col = columns[i]; - if (!indexfieldMap.containsKey(col.getColumnName())) { - // If we have to use scan anyway, there's no need to go through index - return IndexType.NON_INDEX; - } - final String value = indexfieldMap.get(col.getColumnName()); - indexFieldValues[i] = value.getBytes(); - } - final byte[] rowkey = generateUniqueIndexRowkey(indexFieldValues, partitionValueHashs, null); - if (rowkeys != null) { - rowkeys.add(rowkey); - } - } - if (index.unique()) { - return IndexType.UNIQUE_INDEX; - } - return IndexType.NON_CLUSTER_INDEX; - } - - private String parseEntityAttribute(String fieldName) { - Matcher m = FIELD_NAME_PATTERN.matcher(fieldName); - if(m.find()){ - return m.group(1); - } - return null; - } - - // TODO: We should move index rowkey generation later since this class is for general purpose, not only for hbase. - public byte[] generateIndexRowkey(TaggedLogAPIEntity entity) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { - if (entity.getClass() != entityDef.getEntityClass()) { - throw new IllegalArgumentException("Expected entity class: " + entityDef.getEntityClass().getName() + ", but got class " + entity.getClass().getName()); - } - final byte[][] indexValues = generateIndexValues(entity); - final int[] partitionHashCodes = generatePartitionHashCodes(entity); - SortedMap tagMap = null; - if (!index.unique()) { - // non cluster index - tagMap = RowkeyBuilder.generateSortedTagMap(entityDef.getPartitions(), entity.getTags()); - } - - return generateUniqueIndexRowkey(indexValues, partitionHashCodes, tagMap); - } - - private byte[] generateUniqueIndexRowkey(byte[][] indexValues, int[] partitionHashCodes, SortedMap tagMap) { - final int prefixHashCode = indexPrefix.hashCode(); - int totalLength = 4; - totalLength += (partitionHashCodes != null) ? (4 * partitionHashCodes.length) : 0; - - totalLength += (2 * indexValues.length); - for (int i = 0; i < indexValues.length; ++i) { - final byte[] value = indexValues[i]; - totalLength += value.length; - } - if (tagMap != null && (!tagMap.isEmpty())) { - totalLength += tagMap.size() * 8; - } - - int offset = 0; - final byte[] rowkey = new byte[totalLength]; - - // 1. set prefix - ByteUtil.intToBytes(prefixHashCode, rowkey, offset); - offset += 4; - - // 2. set partition - if (partitionHashCodes != null) { - for (Integer partitionHashCode : partitionHashCodes) { - ByteUtil.intToBytes(partitionHashCode, rowkey, offset); - offset += 4; - } - } - - // 3. set index values - for (int i = 0; i < columns.length; ++i) { - ByteUtil.shortToBytes((short)indexValues[i].length, rowkey, offset); - offset += 2; - for (int j = 0; j < indexValues[i].length; ++j) { - rowkey[offset++] = indexValues[i][j]; - } - } - - // Check if it's non clustered index, then set the tag/value hash code - if (tagMap != null && (!tagMap.isEmpty())) { - // 4. set tag key/value hashes - for (Map.Entry entry : tagMap.entrySet()) { - ByteUtil.intToBytes(entry.getKey(), rowkey, offset); - offset += 4; - ByteUtil.intToBytes(entry.getValue(), rowkey, offset); - offset += 4; - } - } - - return rowkey; - } - - private int[] generatePartitionHashCodes(TaggedLogAPIEntity entity) { - final String[] partitions = entityDef.getPartitions(); - int[] result = null; - if (partitions != null) { - result = new int[partitions.length]; - final Map tags = entity.getTags(); - for (int i = 0 ; i < partitions.length; ++i) { - final String partition = partitions[i]; - final String tagValue = tags.get(partition); - if (tagValue != null) { - result[i] = tagValue.hashCode(); - } else { - result[i] = EMPTY_PARTITION_DEFAULT_HASH_CODE; - } - } - } - return result; - } - - private byte[][] generateIndexValues(TaggedLogAPIEntity entity) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { - - final byte[][] result = new byte[columns.length][]; - for (int i = 0; i < columns.length; ++i) { - final IndexColumn column = columns[i]; - final String columnName = column.getColumnName(); - if (column.isTag) { - final Map tags = entity.getTags(); - if (tags == null || tags.get(columnName) == null) { - result[i] = EMPTY_VALUE; - } else { - result[i] = tags.get(columnName).getBytes(UTF_8_CHARSET); - } - } else { - PropertyDescriptor pd = column.getPropertyDescriptor(); - if (pd == null) { - pd = PropertyUtils.getPropertyDescriptor(entity, columnName); - column.setPropertyDescriptor(pd); - } - final Object value = pd.getReadMethod().invoke(entity); - if (value == null) { - result[i] = EMPTY_VALUE; - } else { - final Qualifier q = column.getQualifier(); - result[i] = q.getSerDeser().serialize(value); - } - } - if (result[i].length > MAX_INDEX_VALUE_BYTE_LENGTH) { - throw new IllegalArgumentException("Index field value exceeded the max length: " + MAX_INDEX_VALUE_BYTE_LENGTH + ", actual length: " + result[i].length); - } - } - return result; - } - - /** - * Index column definition class - * - */ - public static class IndexColumn { - private final String columnName; - private final boolean isTag; - private final Qualifier qualifier; - private PropertyDescriptor propertyDescriptor; - - public IndexColumn(String columnName, boolean isTag, Qualifier qualifier) { - this.columnName = columnName; - this.isTag = isTag; - this.qualifier = qualifier; - } - - public String getColumnName() { - return columnName; - } - public boolean isTag() { - return isTag; - } - - public Qualifier getQualifier() { - return qualifier; - } - - public PropertyDescriptor getPropertyDescriptor() { - return propertyDescriptor; - } - - public void setPropertyDescriptor(PropertyDescriptor propertyDescriptor) { - this.propertyDescriptor = propertyDescriptor; - } - - } + + public enum IndexType { + UNIQUE_INDEX, + NON_CLUSTER_INDEX, + NON_INDEX + } + + private final EntityDefinition entityDef; + private final Index index; + private final IndexColumn[] columns; + private final String indexPrefix; + + private static final byte[] EMPTY_VALUE = new byte[0]; + private static final Charset UTF_8_CHARSET = Charset.forName("UTF-8"); + public static final int EMPTY_PARTITION_DEFAULT_HASH_CODE = 0; + public static final int MAX_INDEX_VALUE_BYTE_LENGTH = 65535; + + private static final String FIELD_NAME_PATTERN_STRING = "^@(.*)$"; + private static final Pattern FIELD_NAME_PATTERN = Pattern.compile(FIELD_NAME_PATTERN_STRING); + private static final Logger LOG = LoggerFactory.getLogger(IndexDefinition.class); + + public IndexDefinition(EntityDefinition entityDef, Index index) { + this.entityDef = entityDef; + this.index = index; + this.indexPrefix = entityDef.getPrefix() + "_" + index.name(); + final String[] indexColumns = index.columns(); + this.columns = new IndexColumn[indexColumns.length]; + for (int i = 0; i < indexColumns.length; ++i) { + final String name = indexColumns[i]; + final boolean isTag = entityDef.isTag(name); + final Qualifier qualifier = isTag ? null : entityDef.getDisplayNameMap().get(name); + columns[i] = new IndexColumn(name, isTag, qualifier); + } + LOG.info("Created index " + index.name() + " for " + entityDef.getEntityClass().getSimpleName()); + } + + public EntityDefinition getEntityDefinition() { + return entityDef; + } + + public Index getIndex() { + return index; + } + + public String getIndexName() { + return index.name(); + } + + public IndexColumn[] getIndexColumns() { + return columns; + } + + public String getIndexPrefix() { + return indexPrefix; + } + + public boolean isUnique() { + return index.unique(); + } + + /** + * Check if the query is suitable to go through index. If true, then return the value of index fields in order. Otherwise return null. + * TODO: currently index fields should be string type. + * + * @param query query expression after re-write + * @param rowkeys if the query can go through the index, all rowkeys will be added into rowkeys. + * @return true if the query can go through the index, otherwise return false + */ + public IndexType canGoThroughIndex(ORExpression query, List rowkeys) { + if (query == null || query.getANDExprList() == null || query.getANDExprList().isEmpty()) { + return IndexType.NON_CLUSTER_INDEX; + } + if (rowkeys != null) { + rowkeys.clear(); + } + final Map indexfieldMap = new HashMap(); + for (ANDExpression andExpr : query.getANDExprList()) { + indexfieldMap.clear(); + for (AtomicExpression ae : andExpr.getAtomicExprList()) { + // TODO temporarily ignore those fields which are not for attributes + final String fieldName = parseEntityAttribute(ae.getKey()); + if (fieldName != null && ComparisonOperator.EQUAL.equals(ae.getOp())) { + indexfieldMap.put(fieldName, ae.getValue()); + } + } + final String[] partitions = entityDef.getPartitions(); + int[] partitionValueHashs = null; + if (partitions != null) { + partitionValueHashs = new int[partitions.length]; + for (int i = 0; i < partitions.length; ++i) { + final String value = indexfieldMap.get(partitions[i]); + if (value == null) { + throw new IllegalArgumentException("Partition " + partitions[i] + " is not defined in the query: " + query.toString()); + } + partitionValueHashs[i] = value.hashCode(); + } + } + final byte[][] indexFieldValues = new byte[columns.length][]; + for (int i = 0; i < columns.length; ++i) { + final IndexColumn col = columns[i]; + if (!indexfieldMap.containsKey(col.getColumnName())) { + // If we have to use scan anyway, there's no need to go through index + return IndexType.NON_INDEX; + } + final String value = indexfieldMap.get(col.getColumnName()); + indexFieldValues[i] = value.getBytes(); + } + final byte[] rowkey = generateUniqueIndexRowkey(indexFieldValues, partitionValueHashs, null); + if (rowkeys != null) { + rowkeys.add(rowkey); + } + } + if (index.unique()) { + return IndexType.UNIQUE_INDEX; + } + return IndexType.NON_CLUSTER_INDEX; + } + + private String parseEntityAttribute(String fieldName) { + Matcher m = FIELD_NAME_PATTERN.matcher(fieldName); + if (m.find()) { + return m.group(1); + } + return null; + } + + // TODO: We should move index rowkey generation later since this class is for general purpose, not only for hbase. + public byte[] generateIndexRowkey(TaggedLogAPIEntity entity) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { + if (entity.getClass() != entityDef.getEntityClass()) { + throw new IllegalArgumentException("Expected entity class: " + entityDef.getEntityClass().getName() + ", but got class " + entity.getClass().getName()); + } + final byte[][] indexValues = generateIndexValues(entity); + final int[] partitionHashCodes = generatePartitionHashCodes(entity); + SortedMap tagMap = null; + if (!index.unique()) { + // non cluster index + tagMap = RowkeyBuilder.generateSortedTagMap(entityDef.getPartitions(), entity.getTags()); + } + + return generateUniqueIndexRowkey(indexValues, partitionHashCodes, tagMap); + } + + private byte[] generateUniqueIndexRowkey(byte[][] indexValues, int[] partitionHashCodes, SortedMap tagMap) { + final int prefixHashCode = indexPrefix.hashCode(); + int totalLength = 4; + totalLength += (partitionHashCodes != null) ? (4 * partitionHashCodes.length) : 0; + + totalLength += (2 * indexValues.length); + for (int i = 0; i < indexValues.length; ++i) { + final byte[] value = indexValues[i]; + totalLength += value.length; + } + if (tagMap != null && (!tagMap.isEmpty())) { + totalLength += tagMap.size() * 8; + } + + int offset = 0; + final byte[] rowkey = new byte[totalLength]; + + // 1. set prefix + ByteUtil.intToBytes(prefixHashCode, rowkey, offset); + offset += 4; + + // 2. set partition + if (partitionHashCodes != null) { + for (Integer partitionHashCode : partitionHashCodes) { + ByteUtil.intToBytes(partitionHashCode, rowkey, offset); + offset += 4; + } + } + + // 3. set index values + for (int i = 0; i < columns.length; ++i) { + ByteUtil.shortToBytes((short) indexValues[i].length, rowkey, offset); + offset += 2; + for (int j = 0; j < indexValues[i].length; ++j) { + rowkey[offset++] = indexValues[i][j]; + } + } + + // Check if it's non clustered index, then set the tag/value hash code + if (tagMap != null && (!tagMap.isEmpty())) { + // 4. set tag key/value hashes + for (Map.Entry entry : tagMap.entrySet()) { + ByteUtil.intToBytes(entry.getKey(), rowkey, offset); + offset += 4; + ByteUtil.intToBytes(entry.getValue(), rowkey, offset); + offset += 4; + } + } + + return rowkey; + } + + private int[] generatePartitionHashCodes(TaggedLogAPIEntity entity) { + final String[] partitions = entityDef.getPartitions(); + int[] result = null; + if (partitions != null) { + result = new int[partitions.length]; + final Map tags = entity.getTags(); + for (int i = 0; i < partitions.length; ++i) { + final String partition = partitions[i]; + final String tagValue = tags.get(partition); + if (tagValue != null) { + result[i] = tagValue.hashCode(); + } else { + result[i] = EMPTY_PARTITION_DEFAULT_HASH_CODE; + } + } + } + return result; + } + + private byte[][] generateIndexValues(TaggedLogAPIEntity entity) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { + + final byte[][] result = new byte[columns.length][]; + for (int i = 0; i < columns.length; ++i) { + final IndexColumn column = columns[i]; + final String columnName = column.getColumnName(); + if (column.isTag) { + final Map tags = entity.getTags(); + if (tags == null || tags.get(columnName) == null) { + result[i] = EMPTY_VALUE; + } else { + result[i] = tags.get(columnName).getBytes(UTF_8_CHARSET); + } + } else { + PropertyDescriptor pd = column.getPropertyDescriptor(); + if (pd == null) { + pd = PropertyUtils.getPropertyDescriptor(entity, columnName); + column.setPropertyDescriptor(pd); + } + final Object value = pd.getReadMethod().invoke(entity); + if (value == null) { + result[i] = EMPTY_VALUE; + } else { + final Qualifier q = column.getQualifier(); + result[i] = q.getSerDeser().serialize(value); + } + } + if (result[i].length > MAX_INDEX_VALUE_BYTE_LENGTH) { + throw new IllegalArgumentException("Index field value exceeded the max length: " + MAX_INDEX_VALUE_BYTE_LENGTH + ", actual length: " + result[i].length); + } + } + return result; + } + + /** + * Index column definition class. + */ + public static class IndexColumn { + private final String columnName; + private final boolean isTag; + private final Qualifier qualifier; + private PropertyDescriptor propertyDescriptor; + + public IndexColumn(String columnName, boolean isTag, Qualifier qualifier) { + this.columnName = columnName; + this.isTag = isTag; + this.qualifier = qualifier; + } + + public String getColumnName() { + return columnName; + } + + public boolean isTag() { + return isTag; + } + + public Qualifier getQualifier() { + return qualifier; + } + + public PropertyDescriptor getPropertyDescriptor() { + return propertyDescriptor; + } + + public void setPropertyDescriptor(PropertyDescriptor propertyDescriptor) { + this.propertyDescriptor = propertyDescriptor; + } + + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Indexes.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Indexes.java index 3c82a0a4ab..125ffae88c 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Indexes.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Indexes.java @@ -21,9 +21,9 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface Indexes { - public Index[] value(); + public Index[] value(); } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntArraySerDeser.java index 8831223aaf..8ee793c4ad 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntArraySerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntArraySerDeser.java @@ -20,52 +20,55 @@ /** * serialize int array which is stored like the following - * *size, where the first is the size of int + * *size, where the first is the size of int. */ -public class IntArraySerDeser implements EntitySerDeser{ +public class IntArraySerDeser implements EntitySerDeser { - public IntArraySerDeser(){} + public IntArraySerDeser() { + } - @Override - public int[] deserialize(byte[] bytes){ - if(bytes.length < 4) - return null; - int offset = 0; - // get size of int array - int size = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - int[] values = new int[size]; - for(int i=0; i type() { - return int[].class; - } + /** + * serialize. + * @param obj + * @return + */ + @Override + public byte[] serialize(int[] obj) { + if (obj == null) { + return null; + } + int size = obj.length; + byte[] array = new byte[4 + 4 * size]; + byte[] first = ByteUtil.intToBytes(size); + int offset = 0; + System.arraycopy(first, 0, array, offset, first.length); + offset += first.length; + for (int i = 0; i < size; i++) { + System.arraycopy(ByteUtil.intToBytes(obj[i]), 0, array, offset, 4); + offset += 4; + } + return array; + } + + @Override + public Class type() { + return int[].class; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntSerDeser.java index 695baddbc4..8353499828 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntSerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntSerDeser.java @@ -18,25 +18,28 @@ import org.apache.eagle.common.ByteUtil; -public class IntSerDeser implements EntitySerDeser{ - public IntSerDeser(){} +public class IntSerDeser implements EntitySerDeser { + public IntSerDeser() { + } - @Override - public Integer deserialize(byte[] bytes){ - if(bytes.length < 4) - return null; - return Integer.valueOf(ByteUtil.bytesToInt(bytes)); - } - - @Override - public byte[] serialize(Integer obj){ - if(obj == null) - return null; - return ByteUtil.intToBytes(obj); - } + @Override + public Integer deserialize(byte[] bytes) { + if (bytes.length < 4) { + return null; + } + return Integer.valueOf(ByteUtil.bytesToInt(bytes)); + } - @Override - public Class type() { - return Integer.class; - } + @Override + public byte[] serialize(Integer obj) { + if (obj == null) { + return null; + } + return ByteUtil.intToBytes(obj); + } + + @Override + public Class type() { + return Integer.class; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ListSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ListSerDeser.java index eaf5e929d5..a22d281c7d 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ListSerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ListSerDeser.java @@ -16,113 +16,111 @@ */ package org.apache.eagle.log.entity.meta; +import org.apache.eagle.common.ByteUtil; + import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; -import org.apache.eagle.common.ByteUtil; - /** - * Serialization/deserialization for map type - * + * Serialization/deserialization for map type. */ @SuppressWarnings("rawtypes") public class ListSerDeser implements EntitySerDeser { - @SuppressWarnings({ "unchecked" }) - @Override - public List deserialize(byte[] bytes) { - if (bytes == null || bytes.length == 0) { - return null; - } - final List list = new ArrayList(); - int offset = 0; - // get size of int array - final int size = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - - for (int i = 0; i < size; ++i) { - final int valueID = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - final Class valueClass = EntityDefinitionManager.getClassByID(valueID); - if (valueClass == null) { - throw new IllegalArgumentException("Unsupported value type ID: " + valueID); - } - final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass); - final int valueLength = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - final byte[] valueContent = new byte[valueLength]; - System.arraycopy(bytes, offset, valueContent, 0, valueLength); - offset += valueLength; - final Object value = valueSerDer.deserialize(valueContent); - - list.add(value); - } - return list; - } + @SuppressWarnings( {"unchecked"}) + @Override + public List deserialize(byte[] bytes) { + if (bytes == null || bytes.length == 0) { + return null; + } + final List list = new ArrayList(); + int offset = 0; + // get size of int array + final int size = ByteUtil.bytesToInt(bytes, offset); + offset += 4; + + for (int i = 0; i < size; ++i) { + final int valueID = ByteUtil.bytesToInt(bytes, offset); + offset += 4; + final Class valueClass = EntityDefinitionManager.getClassByID(valueID); + if (valueClass == null) { + throw new IllegalArgumentException("Unsupported value type ID: " + valueID); + } + final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass); + final int valueLength = ByteUtil.bytesToInt(bytes, offset); + offset += 4; + final byte[] valueContent = new byte[valueLength]; + System.arraycopy(bytes, offset, valueContent, 0, valueLength); + offset += valueLength; + final Object value = valueSerDer.deserialize(valueContent); + + list.add(value); + } + return list; + } + + /** + * size + value1 type id + value length + value1 binary content + ... + * 4B 4B 4B value1 bytes + */ + @SuppressWarnings( {"unchecked"}) + @Override + public byte[] serialize(List list) { + if (list == null) { + return null; + } + final int size = list.size(); + final int[] valueIDs = new int[size]; + final byte[][] valueBytes = new byte[size][]; - /** - * size + value1 type id + value length + value1 binary content + ... - * 4B 4B 4B value1 bytes - */ - @SuppressWarnings({ "unchecked" }) - @Override - public byte[] serialize(List list) { - if(list == null) - return null; - final int size = list.size(); - final int[] valueIDs = new int[size]; - final byte[][] valueBytes = new byte[size][]; - - int totalSize = 4 + size * 8; - int i = 0; - Iterator iter = list.iterator(); - while (iter.hasNext()) { - final Object value = iter.next(); - Class valueClass = value.getClass(); - int valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass); + int totalSize = 4 + size * 8; + int i = 0; + Iterator iter = list.iterator(); + while (iter.hasNext()) { + final Object value = iter.next(); + Class valueClass = value.getClass(); + int valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass); - if (valueTypeID == -1) { - if (value instanceof List) { - valueClass = List.class; - valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass); - } - else if (value instanceof Map) { - valueClass = Map.class; - valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass); - } - else { - throw new IllegalArgumentException("Unsupported class: " + valueClass.getName()); - } - } - valueIDs[i] = valueTypeID; - final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass); - if (valueSerDer == null) { - throw new IllegalArgumentException("Unsupported class: " + valueClass.getName()); - } - valueBytes[i] = valueSerDer.serialize(value); - totalSize += valueBytes[i].length; - ++i; - } - final byte[] result = new byte[totalSize]; - int offset = 0; - ByteUtil.intToBytes(size, result, offset); - offset += 4; - for (i = 0; i < size; ++i) { - ByteUtil.intToBytes(valueIDs[i], result, offset); - offset += 4; - ByteUtil.intToBytes(valueBytes[i].length, result, offset); - offset += 4; - System.arraycopy(valueBytes[i], 0, result, offset, valueBytes[i].length); - offset += valueBytes[i].length; - } - return result; - } + if (valueTypeID == -1) { + if (value instanceof List) { + valueClass = List.class; + valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass); + } else if (value instanceof Map) { + valueClass = Map.class; + valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass); + } else { + throw new IllegalArgumentException("Unsupported class: " + valueClass.getName()); + } + } + valueIDs[i] = valueTypeID; + final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass); + if (valueSerDer == null) { + throw new IllegalArgumentException("Unsupported class: " + valueClass.getName()); + } + valueBytes[i] = valueSerDer.serialize(value); + totalSize += valueBytes[i].length; + ++i; + } + final byte[] result = new byte[totalSize]; + int offset = 0; + ByteUtil.intToBytes(size, result, offset); + offset += 4; + for (i = 0; i < size; ++i) { + ByteUtil.intToBytes(valueIDs[i], result, offset); + offset += 4; + ByteUtil.intToBytes(valueBytes[i].length, result, offset); + offset += 4; + System.arraycopy(valueBytes[i], 0, result, offset, valueBytes[i].length); + offset += valueBytes[i].length; + } + return result; + } - @Override - public Class type() { - return List.class; - } + @Override + public Class type() { + return List.class; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/LongSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/LongSerDeser.java index 914cd95f56..6f0c6abe16 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/LongSerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/LongSerDeser.java @@ -18,26 +18,29 @@ import org.apache.eagle.common.ByteUtil; -public class LongSerDeser implements EntitySerDeser{ - public LongSerDeser(){} +public class LongSerDeser implements EntitySerDeser { + public LongSerDeser() { + } - @Override - public Long deserialize(byte[] bytes){ - if(bytes.length < 8) - return null; -// return new Long(ByteUtil.bytesToLong(bytes)); - return Long.valueOf(ByteUtil.bytesToLong(bytes)); - } - - @Override - public byte[] serialize(Long obj){ - if(obj == null) - return null; - return ByteUtil.longToBytes(obj); - } + @Override + public Long deserialize(byte[] bytes) { + if (bytes.length < 8) { + return null; + } + // return new Long(ByteUtil.bytesToLong(bytes)); + return Long.valueOf(ByteUtil.bytesToLong(bytes)); + } - @Override - public Class type() { - return Long.class; - } + @Override + public byte[] serialize(Long obj) { + if (obj == null) { + return null; + } + return ByteUtil.longToBytes(obj); + } + + @Override + public Class type() { + return Long.class; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MapSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MapSerDeser.java index d16fe3ad0d..7c9aa77991 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MapSerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MapSerDeser.java @@ -23,146 +23,146 @@ import java.util.TreeMap; /** - * Serialization/deserialization for map type - * + * Serialization/deserialization for map type. */ @SuppressWarnings("rawtypes") public class MapSerDeser implements EntitySerDeser { - @SuppressWarnings({ "unchecked" }) - @Override - public Map deserialize(byte[] bytes) { - if (bytes == null || bytes.length == 0) { - return null; - } - final Map map = new TreeMap(); - int offset = 0; - // get size of int array - final int size = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - - for (int i = 0; i < size; ++i) { - final int keyID = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - final Class keyClass = EntityDefinitionManager.getClassByID(keyID); - if (keyClass == null) { - throw new IllegalArgumentException("Unsupported key type ID: " + keyID); - } - final EntitySerDeser keySerDer = EntityDefinitionManager.getSerDeser(keyClass); - final int keyLength = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - final byte[] keyContent = new byte[keyLength]; - System.arraycopy(bytes, offset, keyContent, 0, keyLength); - offset += keyLength; - final Object key = keySerDer.deserialize(keyContent); - - final int valueID = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - final Class valueClass = EntityDefinitionManager.getClassByID(valueID); - if (valueClass == null) { - throw new IllegalArgumentException("Unsupported value type ID: " + valueID); - } - final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass); - final int valueLength = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - final byte[] valueContent = new byte[valueLength]; - System.arraycopy(bytes, offset, valueContent, 0, valueLength); - offset += valueLength; - final Object value = valueSerDer.deserialize(valueContent); - - map.put(key, value); - } - return map; - } + @SuppressWarnings( {"unchecked"}) + @Override + public Map deserialize(byte[] bytes) { + if (bytes == null || bytes.length == 0) { + return null; + } + final Map map = new TreeMap(); + int offset = 0; + // get size of int array + final int size = ByteUtil.bytesToInt(bytes, offset); + offset += 4; + + for (int i = 0; i < size; ++i) { + final int keyID = ByteUtil.bytesToInt(bytes, offset); + offset += 4; + final Class keyClass = EntityDefinitionManager.getClassByID(keyID); + if (keyClass == null) { + throw new IllegalArgumentException("Unsupported key type ID: " + keyID); + } + final EntitySerDeser keySerDer = EntityDefinitionManager.getSerDeser(keyClass); + final int keyLength = ByteUtil.bytesToInt(bytes, offset); + offset += 4; + final byte[] keyContent = new byte[keyLength]; + System.arraycopy(bytes, offset, keyContent, 0, keyLength); + offset += keyLength; + final Object key = keySerDer.deserialize(keyContent); + + final int valueID = ByteUtil.bytesToInt(bytes, offset); + offset += 4; + final Class valueClass = EntityDefinitionManager.getClassByID(valueID); + if (valueClass == null) { + throw new IllegalArgumentException("Unsupported value type ID: " + valueID); + } + final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass); + final int valueLength = ByteUtil.bytesToInt(bytes, offset); + offset += 4; + final byte[] valueContent = new byte[valueLength]; + System.arraycopy(bytes, offset, valueContent, 0, valueLength); + offset += valueLength; + final Object value = valueSerDer.deserialize(valueContent); + + map.put(key, value); + } + return map; + } + + /** + * size + key1 type ID + key1 length + key1 binary content + value1 type id + value length + value1 binary content + ... + * 4B 4B 4B key1 bytes 4B 4B value1 bytes + */ + @SuppressWarnings( {"unchecked"}) + @Override + public byte[] serialize(Map map) { + if (map == null) { + return null; + } + final int size = map.size(); + final int[] keyIDs = new int[size]; + final int[] valueIDs = new int[size]; + final byte[][] keyBytes = new byte[size][]; + final byte[][] valueBytes = new byte[size][]; + + int totalSize = 4 + size * 16; + int i = 0; + Iterator iter = map.entrySet().iterator(); + while (iter.hasNext()) { + final Map.Entry entry = (Map.Entry) iter.next(); + final Object key = entry.getKey(); + final Object value = entry.getValue(); + Class keyClass = key.getClass(); + Class valueClass = NullObject.class; + if (value != null) { + valueClass = value.getClass(); + } + int keyTypeID = EntityDefinitionManager.getIDBySerDerClass(keyClass); + int valueTypeID = 0; // default null object + if (valueClass != null) { + valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass); + } + if (keyTypeID == -1) { + if (key instanceof Map) { + keyClass = Map.class; + keyTypeID = EntityDefinitionManager.getIDBySerDerClass(keyClass); + } else { + throw new IllegalArgumentException("Unsupported class: " + keyClass.getName()); + } + } + if (valueTypeID == -1) { + if (value instanceof Map) { + valueClass = Map.class; + valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass); + } else { + throw new IllegalArgumentException("Unsupported class: " + valueClass.getName()); + } + } + keyIDs[i] = keyTypeID; + valueIDs[i] = valueTypeID; + final EntitySerDeser keySerDer = EntityDefinitionManager.getSerDeser(keyClass); + final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass); + if (keySerDer == null) { + throw new IllegalArgumentException("Unsupported class: " + keyClass.getName()); + } + if (valueSerDer == null) { + throw new IllegalArgumentException("Unsupported class: " + valueClass.getName()); + } + keyBytes[i] = keySerDer.serialize(key); + valueBytes[i] = valueSerDer.serialize(value); + totalSize += keyBytes[i].length + valueBytes[i].length; + ++i; + } + final byte[] result = new byte[totalSize]; + int offset = 0; + ByteUtil.intToBytes(size, result, offset); + offset += 4; + for (i = 0; i < size; ++i) { + ByteUtil.intToBytes(keyIDs[i], result, offset); + offset += 4; + ByteUtil.intToBytes(keyBytes[i].length, result, offset); + offset += 4; + System.arraycopy(keyBytes[i], 0, result, offset, keyBytes[i].length); + offset += keyBytes[i].length; - /** - * size + key1 type ID + key1 length + key1 binary content + value1 type id + value length + value1 binary content + ... - * 4B 4B 4B key1 bytes 4B 4B value1 bytes - */ - @SuppressWarnings({ "unchecked" }) - @Override - public byte[] serialize(Map map) { - if(map == null) - return null; - final int size = map.size(); - final int[] keyIDs = new int[size]; - final int[] valueIDs = new int[size]; - final byte[][] keyBytes = new byte[size][]; - final byte[][] valueBytes = new byte[size][]; - - int totalSize = 4 + size * 16; - int i = 0; - Iterator iter = map.entrySet().iterator(); - while (iter.hasNext()) { - final Map.Entry entry = (Map.Entry)iter.next(); - final Object key = entry.getKey(); - final Object value = entry.getValue(); - Class keyClass = key.getClass(); - Class valueClass = NullObject.class; - if (value != null) { - valueClass = value.getClass(); - } - int keyTypeID = EntityDefinitionManager.getIDBySerDerClass(keyClass); - int valueTypeID = 0; // default null object - if (valueClass != null) { - valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass); - } - if (keyTypeID == -1) { - if (key instanceof Map) { - keyClass = Map.class; - keyTypeID = EntityDefinitionManager.getIDBySerDerClass(keyClass); - } else { - throw new IllegalArgumentException("Unsupported class: " + keyClass.getName()); - } - } - if (valueTypeID == -1) { - if (value instanceof Map) { - valueClass = Map.class; - valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass); - } else { - throw new IllegalArgumentException("Unsupported class: " + valueClass.getName()); - } - } - keyIDs[i] = keyTypeID; - valueIDs[i] = valueTypeID; - final EntitySerDeser keySerDer = EntityDefinitionManager.getSerDeser(keyClass); - final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass); - if (keySerDer == null) { - throw new IllegalArgumentException("Unsupported class: " + keyClass.getName()); - } - if (valueSerDer == null) { - throw new IllegalArgumentException("Unsupported class: " + valueClass.getName()); - } - keyBytes[i] = keySerDer.serialize(key); - valueBytes[i] = valueSerDer.serialize(value); - totalSize += keyBytes[i].length + valueBytes[i].length; - ++i; - } - final byte[] result = new byte[totalSize]; - int offset = 0; - ByteUtil.intToBytes(size, result, offset); - offset += 4; - for (i = 0; i < size; ++i) { - ByteUtil.intToBytes(keyIDs[i], result, offset); - offset += 4; - ByteUtil.intToBytes(keyBytes[i].length, result, offset); - offset += 4; - System.arraycopy(keyBytes[i], 0, result, offset, keyBytes[i].length); - offset += keyBytes[i].length; - - ByteUtil.intToBytes(valueIDs[i], result, offset); - offset += 4; - ByteUtil.intToBytes(valueBytes[i].length, result, offset); - offset += 4; - System.arraycopy(valueBytes[i], 0, result, offset, valueBytes[i].length); - offset += valueBytes[i].length; - } - return result; - } + ByteUtil.intToBytes(valueIDs[i], result, offset); + offset += 4; + ByteUtil.intToBytes(valueBytes[i].length, result, offset); + offset += 4; + System.arraycopy(valueBytes[i], 0, result, offset, valueBytes[i].length); + offset += valueBytes[i].length; + } + return result; + } - @Override - public Class type() { - return Map.class; - } + @Override + public Class type() { + return Map.class; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Metric.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Metric.java index 0e3e77664e..25155339f1 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Metric.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Metric.java @@ -21,9 +21,9 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface Metric { - // interval with million seconds - long interval() default 60000; + // interval with million seconds + long interval() default 60000; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MetricDefinition.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MetricDefinition.java index 06bbed34cc..d0b7fc7155 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MetricDefinition.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MetricDefinition.java @@ -25,44 +25,53 @@ import java.io.IOException; public class MetricDefinition implements Writable { - private final static Logger LOG = LoggerFactory.getLogger(MetricDefinition.class); - private long interval; - private Class singleTimestampEntityClass; - public long getInterval() { - return interval; - } - public void setInterval(long interval) { - this.interval = interval; - } - public Class getSingleTimestampEntityClass() { - return singleTimestampEntityClass; - } - public void setSingleTimestampEntityClass(Class singleTimestampEntityClass) { - this.singleTimestampEntityClass = singleTimestampEntityClass; - } + private static final Logger LOG = LoggerFactory.getLogger(MetricDefinition.class); + private long interval; + private Class singleTimestampEntityClass; - private final static String EMPTY=""; - @Override - public void write(DataOutput out) throws IOException { - if(LOG.isDebugEnabled()) LOG.debug("Writing metric definition: interval = "+interval+" singleTimestampEntityClass = "+ this.singleTimestampEntityClass); - out.writeLong(interval); - if(this.singleTimestampEntityClass == null){ - out.writeUTF(EMPTY); - }else { - out.writeUTF(this.singleTimestampEntityClass.getName()); - } - } + public long getInterval() { + return interval; + } - @Override - public void readFields(DataInput in) throws IOException { - interval = in.readLong(); - String singleTimestampEntityClassName = in.readUTF(); - if(!EMPTY.equals(singleTimestampEntityClassName)) { - try { - this.singleTimestampEntityClass = Class.forName(singleTimestampEntityClassName); - } catch (ClassNotFoundException e) { - if(LOG.isDebugEnabled()) LOG.warn("Class " + singleTimestampEntityClassName + " not found "); - } - } - } + public void setInterval(long interval) { + this.interval = interval; + } + + public Class getSingleTimestampEntityClass() { + return singleTimestampEntityClass; + } + + public void setSingleTimestampEntityClass(Class singleTimestampEntityClass) { + this.singleTimestampEntityClass = singleTimestampEntityClass; + } + + private static final String EMPTY = ""; + + @Override + public void write(DataOutput out) throws IOException { + if (LOG.isDebugEnabled()) { + LOG.debug("Writing metric definition: interval = " + interval + " singleTimestampEntityClass = " + this.singleTimestampEntityClass); + } + out.writeLong(interval); + if (this.singleTimestampEntityClass == null) { + out.writeUTF(EMPTY); + } else { + out.writeUTF(this.singleTimestampEntityClass.getName()); + } + } + + @Override + public void readFields(DataInput in) throws IOException { + interval = in.readLong(); + String singleTimestampEntityClassName = in.readUTF(); + if (!EMPTY.equals(singleTimestampEntityClassName)) { + try { + this.singleTimestampEntityClass = Class.forName(singleTimestampEntityClassName); + } catch (ClassNotFoundException e) { + if (LOG.isDebugEnabled()) { + LOG.warn("Class " + singleTimestampEntityClassName + " not found "); + } + } + } + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndex.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndex.java index 9fb05a3418..600eff366f 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndex.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndex.java @@ -21,7 +21,7 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface NonUniqueIndex { diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndexes.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndexes.java index ff1139786e..95baac71db 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndexes.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndexes.java @@ -22,10 +22,10 @@ import java.lang.annotation.Target; -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface NonUniqueIndexes { - - public NonUniqueIndex[] value(); + + public NonUniqueIndex[] value(); } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NullSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NullSerDeser.java index 177878843c..fd769998c5 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NullSerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NullSerDeser.java @@ -16,22 +16,22 @@ */ package org.apache.eagle.log.entity.meta; -public class NullSerDeser implements EntitySerDeser{ +public class NullSerDeser implements EntitySerDeser { - private static final byte[] EMPTY_NULL_ARRAY = new byte[0]; - - @Override - public NullObject deserialize(byte[] bytes) { - return null; - } + private static final byte[] EMPTY_NULL_ARRAY = new byte[0]; - @Override - public byte[] serialize(NullObject t) { - return EMPTY_NULL_ARRAY; - } + @Override + public NullObject deserialize(byte[] bytes) { + return null; + } - @Override - public Class type() { - return NullObject.class; - } + @Override + public byte[] serialize(NullObject t) { + return EMPTY_NULL_ARRAY; + } + + @Override + public Class type() { + return NullObject.class; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Partition.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Partition.java index cb60016401..6a236fd800 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Partition.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Partition.java @@ -22,19 +22,16 @@ import java.lang.annotation.Target; /** - * Partition annotation will impact the rowkey generation for Eagle entities. Once an entity class - * has defined the partition fields for an Eagle entity, the hash codes of the defined partition + * Partition annotation will impact the rowkey generation for Eagle entities. Once an entity class + * has defined the partition fields for an Eagle entity, the hash codes of the defined partition * fields will be placed just after prefix field, and before timestamp field. - * - * */ -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) -public @interface Partition -{ +public @interface Partition { /** * Order in which annotated tags are to be regarded as data partitions. */ - public String[] value() default { }; + public String[] value() default {}; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Prefix.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Prefix.java index 36f404c722..6d3e3882bb 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Prefix.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Prefix.java @@ -21,8 +21,8 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface Prefix { - String value() default ""; + String value() default ""; } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Qualifier.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Qualifier.java index a8fc8d244a..d6c72c222f 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Qualifier.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Qualifier.java @@ -27,74 +27,82 @@ import java.util.HashMap; import java.util.Map; -public class Qualifier implements Writable{ - private final static Logger LOG = LoggerFactory.getLogger(Qualifier.class); +public class Qualifier implements Writable { + private static final Logger LOG = LoggerFactory.getLogger(Qualifier.class); - private String displayName; - private String qualifierName; - private EntitySerDeser serDeser; - @JsonIgnore - public EntitySerDeser getSerDeser() { - return serDeser; - } - public void setSerDeser(EntitySerDeser serDeser) { - this.serDeser = serDeser; - } - public String getDisplayName() { - return displayName; - } - public void setDisplayName(String displayName) { - this.displayName = displayName; - } - public String getQualifierName() { - return qualifierName; - } - public void setQualifierName(String qualifierName) { - this.qualifierName = qualifierName; - } - - public String toString(){ - StringBuffer sb = new StringBuffer(); - sb.append("displayName:"); - sb.append(displayName); - sb.append(","); - sb.append("qualifierName:"); - sb.append(qualifierName); - sb.append(","); - sb.append("serDeser class:"); - sb.append(serDeser.getClass().getName()); - return sb.toString(); - } + private String displayName; + private String qualifierName; + private EntitySerDeser serDeser; - @Override - public void write(DataOutput out) throws IOException { - out.writeUTF(displayName); - out.writeUTF(qualifierName); - out.writeUTF(serDeser.getClass().getName()); - } + @JsonIgnore + public EntitySerDeser getSerDeser() { + return serDeser; + } - private final static Map _entitySerDeserCache = new HashMap(); + public void setSerDeser(EntitySerDeser serDeser) { + this.serDeser = serDeser; + } - @Override - public void readFields(DataInput in) throws IOException { - displayName = in.readUTF(); - qualifierName = in.readUTF(); - String serDeserClassName = in.readUTF(); + public String getDisplayName() { + return displayName; + } - EntitySerDeser _cached = _entitySerDeserCache.get(serDeserClassName); - if(_cached != null){ - this.serDeser = _cached; - }else { - try { - if (LOG.isDebugEnabled()) LOG.debug("Creating new instance for " + serDeserClassName); - Class serDeserClass = Class.forName(serDeserClassName); - this.serDeser = (EntitySerDeser) serDeserClass.newInstance(); - _entitySerDeserCache.put(serDeserClassName, this.serDeser); - } catch (Exception e) { - if (LOG.isDebugEnabled()) { - LOG.warn("Class not found for " + serDeserClassName + ": " + e.getMessage(), e); - } - } - } - } + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public String getQualifierName() { + return qualifierName; + } + + public void setQualifierName(String qualifierName) { + this.qualifierName = qualifierName; + } + + public String toString() { + StringBuffer sb = new StringBuffer(); + sb.append("displayName:"); + sb.append(displayName); + sb.append(","); + sb.append("qualifierName:"); + sb.append(qualifierName); + sb.append(","); + sb.append("serDeser class:"); + sb.append(serDeser.getClass().getName()); + return sb.toString(); + } + + @Override + public void write(DataOutput out) throws IOException { + out.writeUTF(displayName); + out.writeUTF(qualifierName); + out.writeUTF(serDeser.getClass().getName()); + } + + private static final Map _entitySerDeserCache = new HashMap(); + + @Override + public void readFields(DataInput in) throws IOException { + displayName = in.readUTF(); + qualifierName = in.readUTF(); + String serDeserClassName = in.readUTF(); + + EntitySerDeser _cached = _entitySerDeserCache.get(serDeserClassName); + if (_cached != null) { + this.serDeser = _cached; + } else { + try { + if (LOG.isDebugEnabled()) { + LOG.debug("Creating new instance for " + serDeserClassName); + } + Class serDeserClass = Class.forName(serDeserClassName); + this.serDeser = (EntitySerDeser) serDeserClass.newInstance(); + _entitySerDeserCache.put(serDeserClassName, this.serDeser); + } catch (Exception e) { + if (LOG.isDebugEnabled()) { + LOG.warn("Class not found for " + serDeserClassName + ": " + e.getMessage(), e); + } + } + } + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Service.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Service.java index 22d70ed151..e0072ea87e 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Service.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Service.java @@ -21,8 +21,8 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface Service { - String value() default ""; + String value() default ""; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ServicePath.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ServicePath.java index 8c712d08b3..f693a65d0d 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ServicePath.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ServicePath.java @@ -22,10 +22,9 @@ import java.lang.annotation.Target; /** - * This class is for service client for generic entity creation API (entities and metrics) - * + * This class is for service client for generic entity creation API (entities and metrics). */ -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface ServicePath { diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringArraySerDeser.java index 635065bb3d..db394c5144 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringArraySerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringArraySerDeser.java @@ -16,79 +16,80 @@ */ package org.apache.eagle.log.entity.meta; -import java.io.UnsupportedEncodingException; - import org.apache.eagle.common.ByteUtil; +import java.io.UnsupportedEncodingException; + /** - * String array entity serializer and deserializer - * + * String array entity serializer and deserializer. */ public class StringArraySerDeser implements EntitySerDeser { - public static final int MAX_STRING_LENGTH = 65535; - public static final String UTF_8 = "UTF-8"; - - @Override - public String[] deserialize(byte[] bytes) { - if(bytes == null || bytes.length < 4) - return null; - int offset = 0; - // get size of int array - final int size = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - final String[] strings = new String[size]; - try { - for(int i = 0; i < size; i++) { - final int len = ByteUtil.bytesToInt(bytes, offset); - offset += 4; - strings[i] = new String(bytes, offset, len, UTF_8); - offset += len; - } - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("Invalid byte array"); - } - return strings; - } - - /** - * size + str1 length + str1 + str2 length + str2 + ... - * 4B 4B n1B 4B n2B - * - * @param obj - * @return - */ - @Override - public byte[] serialize(String[] array) { - if(array == null) - return null; - final int size = array.length; - final byte[][] tmp = new byte[size][]; - int total = 4 + 4 * size; - for (int i = 0; i < size; ++i) { - try { - tmp[i] = array[i].getBytes(UTF_8); - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("String doesn't support UTF-8 encoding: " + array[i]); - } - total += tmp[i].length; - } - final byte[] result = new byte[total]; - int offset = 0; - ByteUtil.intToBytes(size, result, offset); - offset += 4; - for (int i = 0; i < size; ++i) { - ByteUtil.intToBytes(tmp[i].length, result, offset); - offset += 4; - System.arraycopy(tmp[i], 0, result, offset, tmp[i].length); - offset += tmp[i].length; - } - return result; - } + public static final int MAX_STRING_LENGTH = 65535; + public static final String UTF_8 = "UTF-8"; + + @Override + public String[] deserialize(byte[] bytes) { + if (bytes == null || bytes.length < 4) { + return null; + } + int offset = 0; + // get size of int array + final int size = ByteUtil.bytesToInt(bytes, offset); + offset += 4; + final String[] strings = new String[size]; + try { + for (int i = 0; i < size; i++) { + final int len = ByteUtil.bytesToInt(bytes, offset); + offset += 4; + strings[i] = new String(bytes, offset, len, UTF_8); + offset += len; + } + } catch (UnsupportedEncodingException e) { + throw new IllegalArgumentException("Invalid byte array"); + } + return strings; + } + + /** + * size + str1 length + str1 + str2 length + str2 + ... + * 4B 4B n1B 4B n2B + * + * @param obj + * @return + */ + @Override + public byte[] serialize(String[] array) { + if (array == null) { + return null; + } + final int size = array.length; + final byte[][] tmp = new byte[size][]; + int total = 4 + 4 * size; + for (int i = 0; i < size; ++i) { + try { + tmp[i] = array[i].getBytes(UTF_8); + } catch (UnsupportedEncodingException e) { + throw new IllegalArgumentException("String doesn't support UTF-8 encoding: " + array[i]); + } + total += tmp[i].length; + } + final byte[] result = new byte[total]; + int offset = 0; + ByteUtil.intToBytes(size, result, offset); + offset += 4; + for (int i = 0; i < size; ++i) { + ByteUtil.intToBytes(tmp[i].length, result, offset); + offset += 4; + System.arraycopy(tmp[i], 0, result, offset, tmp[i].length); + offset += tmp[i].length; + } + return result; + } - @Override - public Class type() { - return String[].class; - } + @Override + public Class type() { + return String[].class; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringSerDeser.java index eef6e4fd04..532e27af68 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringSerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringSerDeser.java @@ -18,22 +18,24 @@ public class StringSerDeser implements EntitySerDeser { - public StringSerDeser(){} + public StringSerDeser() { + } - @Override - public String deserialize(byte[] bytes){ - return new String(bytes); - } - - @Override - public byte[] serialize(String obj){ - if(obj == null) - return null; - return obj.getBytes(); - } + @Override + public String deserialize(byte[] bytes) { + return new String(bytes); + } - @Override - public Class type() { - return String.class; - } + @Override + public byte[] serialize(String obj) { + if (obj == null) { + return null; + } + return obj.getBytes(); + } + + @Override + public Class type() { + return String.class; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Table.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Table.java index ac722cda02..550c647bb5 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Table.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Table.java @@ -21,8 +21,8 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface Table { - String value() default ""; + String value() default ""; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Tags.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Tags.java index ac9b328660..f6c7d142cd 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Tags.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Tags.java @@ -22,10 +22,10 @@ import java.lang.annotation.Target; /** - * specify list of tag names which are used by embracing hbase table + * specify list of tag names which are used by embracing hbase table. */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) public @interface Tags { - String[] value() default {""}; + String[] value() default {""}; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/TimeSeries.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/TimeSeries.java index 01023bc0fe..ac29edf834 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/TimeSeries.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/TimeSeries.java @@ -21,8 +21,8 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -@Target({ElementType.TYPE}) +@Target( {ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface TimeSeries { - boolean value() default true; + boolean value() default true; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericByRowkeyReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericByRowkeyReader.java index 43a707380b..17d1ad5d78 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericByRowkeyReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericByRowkeyReader.java @@ -16,63 +16,62 @@ */ package org.apache.eagle.log.entity.old; +import org.apache.eagle.common.EagleBase64Wrapper; +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; +import org.apache.eagle.log.base.taggedlog.TaggedLogObjectMapper; +import org.apache.eagle.log.entity.InternalLog; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +public class GenericByRowkeyReader { + private static final Logger LOG = LoggerFactory.getLogger(GenericByRowkeyReader.class); -import org.apache.eagle.log.base.taggedlog.TaggedLogObjectMapper; -import org.apache.eagle.log.entity.InternalLog; -import org.apache.eagle.common.EagleBase64Wrapper; + private TaggedLogObjectMapper mapper; + private String table; + private String columnFamily; + private boolean outputAll; + private List outputColumns; + private GenericReader.EntityFactory entityFactory; -public class GenericByRowkeyReader { - private static final Logger LOG = LoggerFactory.getLogger(GenericByRowkeyReader.class); + public GenericByRowkeyReader(TaggedLogObjectMapper mapper, GenericReader.EntityFactory entityFactory, String table, String columnFamily, boolean outputAll, List outputColumns) { + this.mapper = mapper; + this.entityFactory = entityFactory; + this.table = table; + this.columnFamily = columnFamily; + this.outputAll = outputAll; + this.outputColumns = outputColumns; + } + + public List read(List rowkeys) throws IOException { + HBaseLogByRowkeyReader reader = new HBaseLogByRowkeyReader(this.table, this.columnFamily, + outputAll, outputColumns); + List entities = new ArrayList(); + try { + reader.open(); + for (String rowkeyString : rowkeys) { + byte[] rowkey = EagleBase64Wrapper.decode(rowkeyString); + InternalLog log = reader.get(rowkey); + TaggedLogAPIEntity entity = entityFactory.create(); + entities.add(entity); + entity.setTags(log.getTags()); + entity.setTimestamp(log.getTimestamp()); + entity.setEncodedRowkey(log.getEncodedRowkey()); + entity.setPrefix(log.getPrefix()); + Map qualifierValues = log.getQualifierValues(); + mapper.populateQualifierValues(entity, qualifierValues); + } + } catch (IOException ex) { + LOG.error("Fail read by rowkey", ex); + throw ex; + } finally { + reader.close(); + } - private TaggedLogObjectMapper mapper; - private String table; - private String columnFamily; - private boolean outputAll; - private List outputColumns; - private GenericReader.EntityFactory entityFactory; - - public GenericByRowkeyReader(TaggedLogObjectMapper mapper, GenericReader.EntityFactory entityFactory, String table, String columnFamily, boolean outputAll, List outputColumns){ - this.mapper = mapper; - this.entityFactory = entityFactory; - this.table = table; - this.columnFamily = columnFamily; - this.outputAll = outputAll; - this.outputColumns = outputColumns; - } - - public List read(List rowkeys) throws IOException{ - HBaseLogByRowkeyReader reader = new HBaseLogByRowkeyReader(this.table, this.columnFamily, - outputAll, outputColumns); - List entities = new ArrayList(); - try{ - reader.open(); - for(String rowkeyString : rowkeys){ - byte[] rowkey = EagleBase64Wrapper.decode(rowkeyString); - InternalLog log = reader.get(rowkey); - TaggedLogAPIEntity entity = entityFactory.create(); - entities.add(entity); - entity.setTags(log.getTags()); - entity.setTimestamp(log.getTimestamp()); - entity.setEncodedRowkey(log.getEncodedRowkey()); - entity.setPrefix(log.getPrefix()); - Map qualifierValues = log.getQualifierValues(); - mapper.populateQualifierValues(entity, qualifierValues); - } - }catch(IOException ex){ - LOG.error("Fail read by rowkey", ex); - throw ex; - }finally{ - reader.close(); - } - - return entities; - } + return entities; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericDeleter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericDeleter.java index e97b522558..2c07e37ea3 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericDeleter.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericDeleter.java @@ -16,36 +16,35 @@ */ package org.apache.eagle.log.entity.old; -import java.io.IOException; -import java.util.*; - import org.apache.eagle.common.EagleBase64Wrapper; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.HBaseInternalLogHelper; -import org.apache.eagle.log.entity.meta.EntityConstants; import org.apache.eagle.log.entity.InternalLog; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import org.apache.eagle.log.entity.meta.EntityConstants; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; import org.apache.eagle.log.entity.meta.IndexDefinition; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.*; public class GenericDeleter { - private static final Logger LOG = LoggerFactory.getLogger(GenericDeleter.class); + private static final Logger LOG = LoggerFactory.getLogger(GenericDeleter.class); + + private final HBaseLogDeleter deleter; + private final HBaseLogByRowkeyReader reader; - private final HBaseLogDeleter deleter; - private final HBaseLogByRowkeyReader reader; - - - public GenericDeleter(EntityDefinition ed) { - this(ed.getTable(), ed.getColumnFamily()); - } - - public GenericDeleter(String table, String columnFamily) { - this.deleter = new HBaseLogDeleter(table, columnFamily); - this.reader = new HBaseLogByRowkeyReader(table, columnFamily, true, null); - } + + public GenericDeleter(EntityDefinition ed) { + this(ed.getTable(), ed.getColumnFamily()); + } + + public GenericDeleter(String table, String columnFamily) { + this.deleter = new HBaseLogDeleter(table, columnFamily); + this.reader = new HBaseLogByRowkeyReader(table, columnFamily, true, null); + } public void deleteByRowkeys(List rowkeys) throws Exception { try { @@ -66,70 +65,70 @@ public void deleteByEncodedRowkeys(List encodedRowkeys) throws Exception throw e; } } - - public List delete(List entities) throws Exception{ + + public List delete(List entities) throws Exception { List encodedRowkey = new LinkedList(); - try{ - deleter.open(); - final Map, List> entityClassMap = classifyEntities(entities); - for (Map.Entry, List> entry : entityClassMap.entrySet()) { - final Class clazz = entry.getKey(); - final List entityList = entry.getValue(); + try { + deleter.open(); + final Map, List> entityClassMap = classifyEntities(entities); + for (Map.Entry, List> entry : entityClassMap.entrySet()) { + final Class clazz = entry.getKey(); + final List entityList = entry.getValue(); - final EntityDefinition entityDef = EntityDefinitionManager.getEntityDefinitionByEntityClass(clazz); - // TODO: we should fix this hardcoded prefix hack - fixPrefixAndTimestampIssue(entityList, entityDef); + final EntityDefinition entityDef = EntityDefinitionManager.getEntityDefinitionByEntityClass(clazz); + // TODO: we should fix this hardcoded prefix hack + fixPrefixAndTimestampIssue(entityList, entityDef); - final List rowkeys = RowkeyHelper.getRowkeysByEntities(entityList, entityDef); - // Check index - final IndexDefinition[] indexes = entityDef.getIndexes(); - if (indexes != null && indexes.length > 0) { - reader.open(); - final List logs = reader.get(rowkeys); - final List newEntities = HBaseInternalLogHelper.buildEntities(logs, entityDef); - for (TaggedLogAPIEntity entity : newEntities) { - // Add index rowkeys - for (IndexDefinition index : indexes) { - final byte[] indexRowkey = index.generateIndexRowkey(entity); - rowkeys.add(indexRowkey); - } - } - } - for(byte[] rowkey:rowkeys) { + final List rowkeys = RowkeyHelper.getRowkeysByEntities(entityList, entityDef); + // Check index + final IndexDefinition[] indexes = entityDef.getIndexes(); + if (indexes != null && indexes.length > 0) { + reader.open(); + final List logs = reader.get(rowkeys); + final List newEntities = HBaseInternalLogHelper.buildEntities(logs, entityDef); + for (TaggedLogAPIEntity entity : newEntities) { + // Add index rowkeys + for (IndexDefinition index : indexes) { + final byte[] indexRowkey = index.generateIndexRowkey(entity); + rowkeys.add(indexRowkey); + } + } + } + for (byte[] rowkey : rowkeys) { encodedRowkey.add(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey)); } - deleter.deleteRowkeys(rowkeys); - } - }catch(IOException ioe){ - LOG.error("Fail writing tagged log", ioe); - throw ioe; - }finally{ - deleter.close(); - } + deleter.deleteRowkeys(rowkeys); + } + } catch (IOException ioe) { + LOG.error("Fail writing tagged log", ioe); + throw ioe; + } finally { + deleter.close(); + } return encodedRowkey; - } + } - private void fixPrefixAndTimestampIssue(List entities, EntityDefinition entityDef) { - for (TaggedLogAPIEntity e : entities) { - e.setPrefix(entityDef.getPrefix()); - if (!entityDef.isTimeSeries()) { - e.setTimestamp(EntityConstants.FIXED_WRITE_TIMESTAMP); // set timestamp to MAX, then actually stored 0 - } - } - } + private void fixPrefixAndTimestampIssue(List entities, EntityDefinition entityDef) { + for (TaggedLogAPIEntity e : entities) { + e.setPrefix(entityDef.getPrefix()); + if (!entityDef.isTimeSeries()) { + e.setTimestamp(EntityConstants.FIXED_WRITE_TIMESTAMP); // set timestamp to MAX, then actually stored 0 + } + } + } - private Map, List> classifyEntities(List entities) { - final Map, List> result = new - HashMap, List>(); - for (TaggedLogAPIEntity entity : entities) { - final Class clazz = entity.getClass(); - List list = result.get(clazz); - if (list == null) { - list = new ArrayList(); - result.put(clazz, list); - } - list.add(entity); - } - return result; - } + private Map, List> classifyEntities(List entities) { + final Map, List> result = new + HashMap, List>(); + for (TaggedLogAPIEntity entity : entities) { + final Class clazz = entity.getClass(); + List list = result.get(clazz); + if (list == null) { + list = new ArrayList(); + result.put(clazz, list); + } + list.add(entity); + } + return result; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericReader.java index 76e314b478..8d76832ca7 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericReader.java @@ -16,100 +16,94 @@ */ package org.apache.eagle.log.entity.old; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import org.apache.eagle.common.DateTimeUtil; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.base.taggedlog.TaggedLogObjectMapper; import org.apache.eagle.log.entity.InternalLog; -import org.apache.eagle.common.DateTimeUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.*; public class GenericReader { - private static final Logger LOG = LoggerFactory.getLogger(GenericReader.class); + private static final Logger LOG = LoggerFactory.getLogger(GenericReader.class); + + public interface EntityFactory { + public TaggedLogAPIEntity create(); + } + + private Schema schema; + private EntityFactory entityFactory; + private TaggedLogObjectMapper mapper; + + public GenericReader(TaggedLogObjectMapper mapper, Schema schema, EntityFactory factory) { + this.mapper = mapper; + this.schema = schema; + this.entityFactory = factory; + } + + public List read(String startTime, + String endTime, List tagNameValues, List outputTags, + List outputFields, String startRowkey, int pageSize) throws Exception { + // decode the query parameters + // TODO should support one tag has multiple tag values + Map> searchTags = new HashMap>(); + for (String tagNameValue : tagNameValues) { + String[] tmp = tagNameValue.split("="); + if (tmp == null || tmp.length <= 1) { + continue; // silently ignore this parameter + } + List tagValues = searchTags.get(tmp[0]); + if (tagValues == null) { + tagValues = new ArrayList(); + searchTags.put(tmp[0], tagValues); + } + tagValues.add(tmp[1]); + } - public interface EntityFactory{ - public TaggedLogAPIEntity create(); - } - - private Schema schema; - private EntityFactory entityFactory; - private TaggedLogObjectMapper mapper; - - public GenericReader(TaggedLogObjectMapper mapper, Schema schema, EntityFactory factory){ - this.mapper = mapper; - this.schema = schema; - this.entityFactory = factory; - } - - public List read(String startTime, - String endTime, List tagNameValues, List outputTags, - List outputFields, String startRowkey, int pageSize) throws Exception{ - Date start = DateTimeUtil.humanDateToDate(startTime); - Date end = DateTimeUtil.humanDateToDate(endTime); - - // decode the query parameters - // TODO should support one tag has multiple tag values - Map> searchTags = new HashMap>(); - for(String tagNameValue : tagNameValues){ - String[] tmp = tagNameValue.split("="); - if(tmp == null || tmp.length <=1){ - continue; // silently ignore this parameter - } - List tagValues = searchTags.get(tmp[0]); - if(tagValues == null){ - tagValues = new ArrayList(); - searchTags.put(tmp[0], tagValues); - } - tagValues.add(tmp[1]); - } - - int numTags = outputTags.size(); - int numFields = outputFields.size(); - byte[][] outputQualifiers = new byte[numTags+numFields][]; - int i = 0; - for(String tag : outputTags){ - outputQualifiers[i++] = tag.getBytes(); - } - for(String field : outputFields){ - outputQualifiers[i++] = field.getBytes(); - } - // shortcut to avoid read when pageSize=0 - List entities = new ArrayList(); - if(pageSize <= 0){ - return entities; // return empty entities - } + int numTags = outputTags.size(); + int numFields = outputFields.size(); + byte[][] outputQualifiers = new byte[numTags + numFields][]; + int i = 0; + for (String tag : outputTags) { + outputQualifiers[i++] = tag.getBytes(); + } + for (String field : outputFields) { + outputQualifiers[i++] = field.getBytes(); + } + // shortcut to avoid read when pageSize=0 + List entities = new ArrayList(); + if (pageSize <= 0) { + return entities; // return empty entities + } + Date start = DateTimeUtil.humanDateToDate(startTime); + Date end = DateTimeUtil.humanDateToDate(endTime); + HBaseLogReader reader = new HBaseLogReader(schema, start, end, searchTags, startRowkey, outputQualifiers); + try { + reader.open(); + InternalLog log; + int count = 0; + while ((log = reader.read()) != null) { + TaggedLogAPIEntity entity = entityFactory.create(); + entity.setTags(log.getTags()); + entity.setTimestamp(log.getTimestamp()); + entity.setEncodedRowkey(log.getEncodedRowkey()); + entity.setPrefix(log.getPrefix()); + entities.add(entity); - HBaseLogReader reader = new HBaseLogReader(schema, start, end, searchTags, startRowkey, outputQualifiers); - try{ - reader.open(); - InternalLog log; - int count = 0; - while ((log = reader.read()) != null) { - TaggedLogAPIEntity entity = entityFactory.create(); - entity.setTags(log.getTags()); - entity.setTimestamp(log.getTimestamp()); - entity.setEncodedRowkey(log.getEncodedRowkey()); - entity.setPrefix(log.getPrefix()); - entities.add(entity); - - Map qualifierValues = log.getQualifierValues(); - mapper.populateQualifierValues(entity, qualifierValues); - if(++count == pageSize) - break; - } - }catch(IOException ioe){ - LOG.error("Fail reading log", ioe); - throw ioe; - }finally{ - reader.close(); - } - return entities; - } + Map qualifierValues = log.getQualifierValues(); + mapper.populateQualifierValues(entity, qualifierValues); + if (++count == pageSize) { + break; + } + } + } catch (IOException ioe) { + LOG.error("Fail reading log", ioe); + throw ioe; + } finally { + reader.close(); + } + return entities; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericWriter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericWriter.java index 3d292372fb..fd3fca6ef8 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericWriter.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericWriter.java @@ -16,12 +16,7 @@ */ package org.apache.eagle.log.entity.old; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - +import org.apache.eagle.common.EagleBase64Wrapper; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.base.taggedlog.TaggedLogObjectMapper; import org.apache.eagle.log.entity.HBaseLogWriter; @@ -29,65 +24,69 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.eagle.common.EagleBase64Wrapper; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; public class GenericWriter { - private static final Logger LOG = LoggerFactory.getLogger(GenericWriter.class); + private static final Logger LOG = LoggerFactory.getLogger(GenericWriter.class); + + private String table; + private String columnFamily; + private TaggedLogObjectMapper mapper; + + public GenericWriter(TaggedLogObjectMapper mapper, String table, String columnFamily) { + this.mapper = mapper; + this.table = table; + this.columnFamily = columnFamily; + } + + public List write(List entities) throws IOException { + HBaseLogWriter writer = new HBaseLogWriter(table, columnFamily); + List rowkeys = new ArrayList(); + + try { + writer.open(); + for (TaggedLogAPIEntity entity : entities) { + InternalLog log = new InternalLog(); + Map inputTags = entity.getTags(); + Map tags = new TreeMap(); + for (Map.Entry entry : inputTags.entrySet()) { + tags.put(entry.getKey(), entry.getValue()); + } + log.setTags(tags); + log.setTimestamp(entity.getTimestamp()); + log.setPrefix(entity.getPrefix()); + log.setQualifierValues(mapper.createQualifierValues(entity)); + byte[] rowkey = writer.write(log); + rowkeys.add(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey)); + } + } catch (IOException ioe) { + LOG.error("Fail writing tagged log", ioe); + throw ioe; + } finally { + writer.close(); + } + return rowkeys; + } - private String table; - private String columnFamily; - private TaggedLogObjectMapper mapper; - - public GenericWriter(TaggedLogObjectMapper mapper, String table, String columnFamily){ - this.mapper = mapper; - this.table = table; - this.columnFamily = columnFamily; - } - - public List write(List entities) throws IOException{ - HBaseLogWriter writer = new HBaseLogWriter(table, columnFamily); - List rowkeys = new ArrayList(); - - try{ - writer.open(); - for(TaggedLogAPIEntity entity : entities){ - InternalLog log = new InternalLog(); - Map inputTags = entity.getTags(); - Map tags = new TreeMap(); - for(Map.Entry entry : inputTags.entrySet()){ - tags.put(entry.getKey(), entry.getValue()); - } - log.setTags(tags); - log.setTimestamp(entity.getTimestamp()); - log.setPrefix(entity.getPrefix()); - log.setQualifierValues(mapper.createQualifierValues(entity)); - byte[] rowkey = writer.write(log); - rowkeys.add(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey)); - } - }catch(IOException ioe){ - LOG.error("Fail writing tagged log", ioe); - throw ioe; - }finally{ - writer.close(); - } - return rowkeys; - } - - public void updateByRowkey(List entities) throws IOException{ - HBaseLogWriter writer = new HBaseLogWriter(table, columnFamily); - try{ - writer.open(); - for(TaggedLogAPIEntity entity : entities){ - byte[] rowkey = EagleBase64Wrapper.decode(entity.getEncodedRowkey()); - InternalLog log = new InternalLog(); - log.setQualifierValues(mapper.createQualifierValues(entity)); - writer.updateByRowkey(rowkey, log); - } - }catch(IOException ioe){ - LOG.error("Fail writing tagged log", ioe); - throw ioe; - }finally{ - writer.close(); - } - } + public void updateByRowkey(List entities) throws IOException { + HBaseLogWriter writer = new HBaseLogWriter(table, columnFamily); + try { + writer.open(); + for (TaggedLogAPIEntity entity : entities) { + byte[] rowkey = EagleBase64Wrapper.decode(entity.getEncodedRowkey()); + InternalLog log = new InternalLog(); + log.setQualifierValues(mapper.createQualifierValues(entity)); + writer.updateByRowkey(rowkey, log); + } + } catch (IOException ioe) { + LOG.error("Fail writing tagged log", ioe); + throw ioe; + } finally { + writer.close(); + } + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogByRowkeyReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogByRowkeyReader.java index 37e55ac46a..7ac4ea0f35 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogByRowkeyReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogByRowkeyReader.java @@ -16,146 +16,144 @@ */ package org.apache.eagle.log.entity.old; -import java.io.Closeable; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.NavigableMap; - +import org.apache.eagle.common.ByteUtil; +import org.apache.eagle.common.EagleBase64Wrapper; import org.apache.eagle.common.config.EagleConfigFactory; import org.apache.eagle.log.base.taggedlog.NoSuchRowException; +import org.apache.eagle.log.entity.InternalLog; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTableFactory; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; -import org.apache.eagle.log.entity.InternalLog; -import org.apache.eagle.common.ByteUtil; -import org.apache.eagle.common.EagleBase64Wrapper; +import java.io.Closeable; +import java.io.IOException; +import java.util.*; /** - * Get details of rowkey and qualifiers given a raw rowkey. This function mostly is used for inspecting one row's content - * This only supports single column family, which is mostly used in log application + * Get details of rowkey and qualifiers given a raw rowkey. This function mostly is used for inspecting one row's content + * This only supports single column family, which is mostly used in log application. */ -public class HBaseLogByRowkeyReader implements Closeable{ - private String table; - private String columnFamily; - private byte[][] outputQualifiers; - private boolean includingAllQualifiers; - private HTableInterface tbl; - private boolean isOpen; - - /** - * if includingAllQualifiers is true, then the fourth argument outputQualifiers is ignored - * if includingAllQualifiers is false, then need calculate based on the fourth argument outputQualifiers - */ - public HBaseLogByRowkeyReader(String table, String columnFamily, boolean includingAllQualifiers, List qualifiers){ - this.table = table; - this.columnFamily = columnFamily; - if(qualifiers != null){ - this.outputQualifiers = new byte[qualifiers.size()][]; - int i = 0; - for(String qualifier : qualifiers){ - this.outputQualifiers[i++] = qualifier.getBytes(); - } - } - this.includingAllQualifiers = includingAllQualifiers; - } - - - public void open() throws IOException { - if (isOpen) - return; // silently return - try { - tbl = EagleConfigFactory.load().getHTable(this.table); - } catch (RuntimeException ex) { - throw new IOException(ex); - } - - isOpen = true; - } - - /** - * Here all qualifiers' values goes into qualifierValues of InternalLog as given a row, we can't differentiate it's a tag or a field - * @param rowkeys - * @return - * @throws IOException - */ - public List get(List rowkeys) throws IOException, NoSuchRowException { - final List gets = createGets(rowkeys); - final Result[] results = tbl.get(gets); - final List logs = new ArrayList(); - for (Result result : results) { - final InternalLog log = buildLog(result); - logs.add(log); - } - return logs; - } - - private List createGets(List rowkeys) { - final List gets = new ArrayList(); - for (byte[] rowkey : rowkeys) { - final Get get = createGet(rowkey); - gets.add(get); - } - return gets; - } - - - private Get createGet(byte[] rowkey) { - final Get get = new Get(rowkey); - byte[] cf = this.columnFamily.getBytes(); - if(includingAllQualifiers){ - get.addFamily(cf); - }else{ - for(byte[] outputQualifier : outputQualifiers){ - get.addColumn(cf, outputQualifier); - } - } - return get; - } - - - /** - * Here all qualifiers' values goes into qualifierValues of InternalLog as given a row, we can't differentiate it's a tag or a field - * @param rowkey - * @return - * @throws IOException - */ - public InternalLog get(byte[] rowkey) throws IOException, NoSuchRowException{ - final Get get = createGet(rowkey); - final Result result = tbl.get(get); - final InternalLog log = buildLog(result); - return log; - } - - private InternalLog buildLog(Result result) { - final InternalLog log = new InternalLog(); - final byte[] rowkey = result.getRow(); - log.setEncodedRowkey(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey)); - long timestamp = ByteUtil.bytesToLong(rowkey, 4); - timestamp = Long.MAX_VALUE - timestamp; - log.setTimestamp(timestamp); - Map qualifierValues = new HashMap(); - log.setQualifierValues(qualifierValues); - NavigableMap map = result.getFamilyMap(this.columnFamily.getBytes()); - if(map == null){ - throw new NoSuchRowException(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey)); - } - for(Map.Entry entry : map.entrySet()){ - byte[] qualifier = entry.getKey(); - byte[] value = entry.getValue(); - qualifierValues.put(new String(qualifier), value); - } - return log; - } - - - public void close() throws IOException { - if(tbl != null){ - new HTableFactory().releaseHTableInterface(tbl); - } - } +public class HBaseLogByRowkeyReader implements Closeable { + private String table; + private String columnFamily; + private byte[][] outputQualifiers; + private boolean includingAllQualifiers; + private HTableInterface tbl; + private boolean isOpen; + + /** + * if includingAllQualifiers is true, then the fourth argument outputQualifiers is ignored + * if includingAllQualifiers is false, then need calculate based on the fourth argument outputQualifiers. + */ + public HBaseLogByRowkeyReader(String table, String columnFamily, boolean includingAllQualifiers, List qualifiers) { + this.table = table; + this.columnFamily = columnFamily; + if (qualifiers != null) { + this.outputQualifiers = new byte[qualifiers.size()][]; + int i = 0; + for (String qualifier : qualifiers) { + this.outputQualifiers[i++] = qualifier.getBytes(); + } + } + this.includingAllQualifiers = includingAllQualifiers; + } + + + public void open() throws IOException { + if (isOpen) { + return; // silently return + } + try { + tbl = EagleConfigFactory.load().getHTable(this.table); + } catch (RuntimeException ex) { + throw new IOException(ex); + } + + isOpen = true; + } + + /** + * Here all qualifiers' values goes into qualifierValues of InternalLog as given a row, we can't differentiate it's a tag or a field. + * + * @param rowkeys + * @return + * @throws IOException + */ + public List get(List rowkeys) throws IOException, NoSuchRowException { + final List gets = createGets(rowkeys); + final Result[] results = tbl.get(gets); + final List logs = new ArrayList(); + for (Result result : results) { + final InternalLog log = buildLog(result); + logs.add(log); + } + return logs; + } + + /** + * Here all qualifiers' values goes into qualifierValues of InternalLog as given a row, we can't differentiate it's a tag or a field. + * + * @param rowkey + * @return + * @throws IOException + */ + public InternalLog get(byte[] rowkey) throws IOException, NoSuchRowException { + final Get get = createGet(rowkey); + final Result result = tbl.get(get); + final InternalLog log = buildLog(result); + return log; + } + + + private List createGets(List rowkeys) { + final List gets = new ArrayList(); + for (byte[] rowkey : rowkeys) { + final Get get = createGet(rowkey); + gets.add(get); + } + return gets; + } + + + private Get createGet(byte[] rowkey) { + final Get get = new Get(rowkey); + byte[] cf = this.columnFamily.getBytes(); + if (includingAllQualifiers) { + get.addFamily(cf); + } else { + for (byte[] outputQualifier : outputQualifiers) { + get.addColumn(cf, outputQualifier); + } + } + return get; + } + + private InternalLog buildLog(Result result) { + final InternalLog log = new InternalLog(); + final byte[] rowkey = result.getRow(); + log.setEncodedRowkey(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey)); + long timestamp = ByteUtil.bytesToLong(rowkey, 4); + timestamp = Long.MAX_VALUE - timestamp; + log.setTimestamp(timestamp); + Map qualifierValues = new HashMap(); + log.setQualifierValues(qualifierValues); + NavigableMap map = result.getFamilyMap(this.columnFamily.getBytes()); + if (map == null) { + throw new NoSuchRowException(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey)); + } + for (Map.Entry entry : map.entrySet()) { + byte[] qualifier = entry.getKey(); + byte[] value = entry.getValue(); + qualifierValues.put(new String(qualifier), value); + } + return log; + } + + + public void close() throws IOException { + if (tbl != null) { + new HTableFactory().releaseHTableInterface(tbl); + } + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogDeleter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogDeleter.java index 3460949e3e..a2da1637bf 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogDeleter.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogDeleter.java @@ -16,116 +16,118 @@ */ package org.apache.eagle.log.entity.old; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - +import org.apache.eagle.common.EagleBase64Wrapper; import org.apache.eagle.common.config.EagleConfigFactory; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; +import org.apache.eagle.log.entity.InternalLog; +import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.HTableFactory; import org.apache.hadoop.hbase.client.HTableInterface; -import org.apache.eagle.log.entity.InternalLog; -import org.apache.eagle.log.entity.meta.EntityDefinition; -import org.apache.eagle.common.EagleBase64Wrapper; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class HBaseLogDeleter implements LogDeleter { + private HTableInterface tbl; + private String table; + private String columnFamily; + + public HBaseLogDeleter(String table, String columnFamily) { + this.table = table; + this.columnFamily = columnFamily; + } + + @Override + public void open() throws IOException { + try { + tbl = EagleConfigFactory.load().getHTable(this.table); + } catch (RuntimeException ex) { + throw new IOException(ex); + } + } + + @Override + public void close() throws IOException { + if (tbl != null) { + new HTableFactory().releaseHTableInterface(tbl); + } + } + + @Override + public void flush() throws IOException { + throw new IllegalArgumentException("Not supported flush for hbase delete"); + } + + /** + * support delete by constructing a rowkey or by encoded rowkey passed from client. + */ + @Override + public void delete(InternalLog log) throws IOException { + final byte[] rowkey = RowkeyHelper.getRowkey(log); + final Delete delete = createDelete(rowkey); + tbl.delete(delete); + } + + public void delete(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception { + final byte[] rowkey = RowkeyHelper.getRowkey(entity, entityDef); + final Delete delete = createDelete(rowkey); + tbl.delete(delete); + } + + /** + * Batch delete. + * + * @param logs + * @throws IOException + */ + public void delete(List logs) throws IOException { + final List rowkeys = RowkeyHelper.getRowkeysByLogs(logs); + deleteRowkeys(rowkeys); + } + + + /** + * Batch delete. + * + * @throws Exception + */ + public void deleteEntities(List entities, EntityDefinition entityDef) throws Exception { + final List rowkeys = RowkeyHelper.getRowkeysByEntities(entities, entityDef); + deleteRowkeys(rowkeys); + } + + /** + * Batch delete. + * + * @throws IOException + */ + public void deleteRowkeys(List rowkeys) throws IOException { + final List deletes = new ArrayList(rowkeys.size()); + for (byte[] rowkey : rowkeys) { + final Delete delete = createDelete(rowkey); + deletes.add(delete); + } + tbl.delete(deletes); + } + + @Override + public void deleteRowByRowkey(String encodedRowkey) throws IOException { + byte[] row = EagleBase64Wrapper.decode(encodedRowkey); + final Delete delete = createDelete(row); + tbl.delete(delete); + } + + public void deleteRowByRowkey(List encodedRowkeys) throws IOException { + final List rowkeys = RowkeyHelper.getRowkeysByEncodedRowkeys(encodedRowkeys); + deleteRowkeys(rowkeys); + } -public class HBaseLogDeleter implements LogDeleter{ - private HTableInterface tbl; - private String table; - private String columnFamily; - - public HBaseLogDeleter(String table, String columnFamily) { - this.table = table; - this.columnFamily = columnFamily; - } - - @Override - public void open() throws IOException { - try{ - tbl = EagleConfigFactory.load().getHTable(this.table); - }catch(RuntimeException ex){ - throw new IOException(ex); - } - } - - @Override - public void close() throws IOException { - if(tbl != null){ - new HTableFactory().releaseHTableInterface(tbl); - } - } - - @Override - public void flush() throws IOException{ - throw new IllegalArgumentException("Not supported flush for hbase delete"); - } - - /** - * support delete by constructing a rowkey or by encoded rowkey passed from client - */ - @Override - public void delete(InternalLog log) throws IOException{ - final byte[] rowkey = RowkeyHelper.getRowkey(log); - final Delete delete = createDelete(rowkey); - tbl.delete(delete); - } - - public void delete(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception { - final byte[] rowkey = RowkeyHelper.getRowkey(entity, entityDef); - final Delete delete = createDelete(rowkey); - tbl.delete(delete); - } - - /** - * Batch delete - * @param logs - * @throws IOException - */ - public void delete(List logs) throws IOException{ - final List rowkeys = RowkeyHelper.getRowkeysByLogs(logs); - deleteRowkeys(rowkeys); - } - - - /** - * Batch delete - * @throws Exception - */ - public void deleteEntities(List entities, EntityDefinition entityDef) throws Exception{ - final List rowkeys = RowkeyHelper.getRowkeysByEntities(entities, entityDef); - deleteRowkeys(rowkeys); - } - - /** - * Batch delete - * @throws IOException - */ - public void deleteRowkeys(List rowkeys) throws IOException { - final List deletes = new ArrayList(rowkeys.size()); - for (byte[] rowkey : rowkeys) { - final Delete delete = createDelete(rowkey); - deletes.add(delete); - } - tbl.delete(deletes); - } - - @Override - public void deleteRowByRowkey(String encodedRowkey) throws IOException{ - byte[] row = EagleBase64Wrapper.decode(encodedRowkey); - final Delete delete = createDelete(row); - tbl.delete(delete); - } - - public void deleteRowByRowkey(List encodedRowkeys) throws IOException { - final List rowkeys = RowkeyHelper.getRowkeysByEncodedRowkeys(encodedRowkeys); - deleteRowkeys(rowkeys); - } - - private Delete createDelete(byte[] row) throws IOException{ - Delete delete = new Delete(row); - delete.deleteFamily(columnFamily.getBytes()); - return delete; - } + private Delete createDelete(byte[] row) throws IOException { + Delete delete = new Delete(row); + delete.deleteFamily(columnFamily.getBytes()); + return delete; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogReader.java index c75b94f90c..d21bc21070 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogReader.java @@ -16,24 +16,12 @@ */ package org.apache.eagle.log.entity.old; -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.SortedMap; -import java.util.TreeMap; - +import org.apache.eagle.common.ByteUtil; +import org.apache.eagle.common.EagleBase64Wrapper; import org.apache.eagle.common.config.EagleConfigFactory; import org.apache.eagle.log.entity.InternalLog; import org.apache.eagle.log.entity.LogReader; -import org.apache.hadoop.hbase.client.HTableFactory; -import org.apache.hadoop.hbase.client.HTableInterface; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.ResultScanner; -import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.RegexStringComparator; @@ -41,207 +29,211 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.eagle.common.ByteUtil; -import org.apache.eagle.common.EagleBase64Wrapper; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.*; public class HBaseLogReader implements LogReader { - private static Logger LOG = LoggerFactory.getLogger(HBaseLogReader.class); - - protected byte[][] qualifiers; - private HTableInterface tbl; - private byte[] startKey; - private byte[] stopKey; - protected Map> searchTags; - - private ResultScanner rs; - private boolean isOpen = false; - - private Schema schema; - - public HBaseLogReader(Schema schema, Date startTime, Date endTime, - Map> searchTags, String lastScanKey, - byte[][] outputQualifier){ - this.schema = schema; - this.qualifiers = outputQualifier; - this.startKey = buildRowKey(schema.getPrefix(), startTime); - if (lastScanKey == null) { - this.stopKey = buildRowKey(schema.getPrefix(), endTime); - } else { - // build stop key - this.stopKey = EagleBase64Wrapper.decode(lastScanKey); - // concat byte 0 to exclude this stopKey - this.stopKey = ByteUtil.concat(this.stopKey, new byte[] { 0 }); - } - this.searchTags = searchTags; - } - - /** - * TODO If the required field is null for a row, then this row will not be fetched. That could be a problem for counting - * Need another version of read to strictly get the number of rows which will return all the columns for a column family - */ - public void open() throws IOException { - if (isOpen) - return; // silently return - try { - tbl = EagleConfigFactory.load().getHTable(schema.getTable()); - } catch (RuntimeException ex) { - throw new IOException(ex); - } - - String rowkeyRegex = buildRegex2(searchTags); - RegexStringComparator regexStringComparator = new RegexStringComparator( - rowkeyRegex); - regexStringComparator.setCharset(Charset.forName("ISO-8859-1")); - RowFilter filter = new RowFilter(CompareOp.EQUAL, regexStringComparator); - FilterList filterList = new FilterList(); - filterList.addFilter(filter); - Scan s1 = new Scan(); - // reverse timestamp, startRow is stopKey, and stopRow is startKey - s1.setStartRow(stopKey); - s1.setStopRow(startKey); - s1.setFilter(filterList); - // TODO the # of cached rows should be minimum of (pagesize and 100) - s1.setCaching(100); - // TODO not optimized for all applications - s1.setCacheBlocks(true); - // scan specified columnfamily and qualifiers - for(byte[] qualifier : qualifiers){ - s1.addColumn(schema.getColumnFamily().getBytes(), qualifier); - } - rs = tbl.getScanner(s1); - isOpen = true; - } - - public void close() throws IOException { - if(tbl != null){ - new HTableFactory().releaseHTableInterface(tbl); - } - if(rs != null){ - rs.close(); - } - } - - public void flush() throws IOException { - tbl.flushCommits(); - } - - private byte[] buildRowKey(String appName, Date t){ - byte[] key = new byte[4 + 8]; - byte[] b = ByteUtil.intToBytes(appName.hashCode()); - System.arraycopy(b, 0, key, 0, 4); - // reverse timestamp - long ts = Long.MAX_VALUE - t.getTime(); - System.arraycopy(ByteUtil.longToBytes(ts), 0, key, 4, 8); - return key; - } - - /** - * one search tag may have multiple values which have OR relationship, and relationship between - * different search tags is AND - * the query is like "(TAG1=value11 OR TAG1=value12) AND TAG2=value2" - * @param tags - * @return - */ - protected String buildRegex2(Map> tags){ - // TODO need consider that \E could be part of tag, refer to https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java - SortedMap> tagHash = new TreeMap>(); - - for(Map.Entry> entry : tags.entrySet()){ - String tagName = entry.getKey(); - List stringValues = entry.getValue(); - List hashValues = new ArrayList(1); - for(String value : stringValues){ - hashValues.add(value.hashCode()); - } - tagHash.put(tagName.hashCode(), hashValues); - } - // ... - StringBuilder sb = new StringBuilder(); - sb.append("(?s)"); - sb.append("^(?:.{12})"); - sb.append("(?:.{").append(8).append("})*"); // for any number of tags - for (Map.Entry> entry : tagHash.entrySet()) { - try { - sb.append("\\Q"); - sb.append(new String(ByteUtil.intToBytes(entry.getKey()), "ISO-8859-1")).append("\\E"); - List hashValues = entry.getValue(); - sb.append("(?:"); - boolean first = true; - for(Integer value : hashValues){ - if(!first){ - sb.append('|'); - } - sb.append("\\Q"); - sb.append(new String(ByteUtil.intToBytes(value), "ISO-8859-1")); - sb.append("\\E"); - first = false; - } - sb.append(")"); - sb.append("(?:.{").append(8).append("})*"); // for any number of tags - } catch (Exception ex) { - LOG.error("Constructing regex error", ex); - } - } - sb.append("$"); - if (LOG.isDebugEnabled()) { - LOG.debug("Pattern is " + sb.toString()); - } - return sb.toString(); - } - - public InternalLog read() throws IOException { - if (rs == null) - throw new IllegalArgumentException( - "ResultScanner must be initialized before reading"); - - InternalLog t = null; - - Result r = rs.next(); - if (r != null) { - byte[] row = r.getRow(); - // skip the first 4 bytes : prefix - long timestamp = ByteUtil.bytesToLong(row, 4); - // reverse timestamp - timestamp = Long.MAX_VALUE - timestamp; - int count = 0; - if(qualifiers != null){ - count = qualifiers.length; - } - byte[][] values = new byte[count][]; - Map allQualifierValues = new HashMap(); - for (int i = 0; i < count; i++) { - // TODO if returned value is null, it means no this column for this row, so why set null to the object? - values[i] = r.getValue(schema.getColumnFamily().getBytes(), qualifiers[i]); - allQualifierValues.put(new String(qualifiers[i]), values[i]); - } - t = buildObject(row, timestamp, allQualifierValues); - } - - return t; - } - - public InternalLog buildObject(byte[] row, long timestamp, - Map allQualifierValues) { - InternalLog alertDetail = new InternalLog(); - String myRow = EagleBase64Wrapper.encodeByteArray2URLSafeString(row); - alertDetail.setEncodedRowkey(myRow); - alertDetail.setPrefix(schema.getPrefix()); - alertDetail.setSearchTags(searchTags); - alertDetail.setTimestamp(timestamp); - - Map logQualifierValues = new HashMap(); - Map logTags = new HashMap(); - for (Map.Entry entry : allQualifierValues.entrySet()) { - if (schema.isTag(entry.getKey())) { - if (entry.getValue() != null) { - logTags.put(entry.getKey(), new String(entry.getValue())); - } - } else { - logQualifierValues.put(entry.getKey(),entry.getValue()); - } - } - alertDetail.setQualifierValues(logQualifierValues); - alertDetail.setTags(logTags); - return alertDetail; - } + private static Logger LOG = LoggerFactory.getLogger(HBaseLogReader.class); + + protected byte[][] qualifiers; + private HTableInterface tbl; + private byte[] startKey; + private byte[] stopKey; + protected Map> searchTags; + + private ResultScanner rs; + private boolean isOpen = false; + + private Schema schema; + + public HBaseLogReader(Schema schema, Date startTime, Date endTime, + Map> searchTags, String lastScanKey, + byte[][] outputQualifier) { + this.schema = schema; + this.qualifiers = outputQualifier; + this.startKey = buildRowKey(schema.getPrefix(), startTime); + if (lastScanKey == null) { + this.stopKey = buildRowKey(schema.getPrefix(), endTime); + } else { + // build stop key + this.stopKey = EagleBase64Wrapper.decode(lastScanKey); + // concat byte 0 to exclude this stopKey + this.stopKey = ByteUtil.concat(this.stopKey, new byte[] {0}); + } + this.searchTags = searchTags; + } + + /** + * TODO If the required field is null for a row, then this row will not be fetched. That could be a problem for counting + * Need another version of read to strictly get the number of rows which will return all the columns for a column family + */ + public void open() throws IOException { + if (isOpen) { + return; // silently return + } + try { + tbl = EagleConfigFactory.load().getHTable(schema.getTable()); + } catch (RuntimeException ex) { + throw new IOException(ex); + } + + String rowkeyRegex = buildRegex2(searchTags); + RegexStringComparator regexStringComparator = new RegexStringComparator( + rowkeyRegex); + regexStringComparator.setCharset(Charset.forName("ISO-8859-1")); + RowFilter filter = new RowFilter(CompareOp.EQUAL, regexStringComparator); + FilterList filterList = new FilterList(); + filterList.addFilter(filter); + Scan s1 = new Scan(); + // reverse timestamp, startRow is stopKey, and stopRow is startKey + s1.setStartRow(stopKey); + s1.setStopRow(startKey); + s1.setFilter(filterList); + // TODO the # of cached rows should be minimum of (pagesize and 100) + s1.setCaching(100); + // TODO not optimized for all applications + s1.setCacheBlocks(true); + // scan specified columnfamily and qualifiers + for (byte[] qualifier : qualifiers) { + s1.addColumn(schema.getColumnFamily().getBytes(), qualifier); + } + rs = tbl.getScanner(s1); + isOpen = true; + } + + public void close() throws IOException { + if (tbl != null) { + new HTableFactory().releaseHTableInterface(tbl); + } + if (rs != null) { + rs.close(); + } + } + + public void flush() throws IOException { + tbl.flushCommits(); + } + + private byte[] buildRowKey(String appName, Date t) { + byte[] key = new byte[4 + 8]; + byte[] b = ByteUtil.intToBytes(appName.hashCode()); + System.arraycopy(b, 0, key, 0, 4); + // reverse timestamp + long ts = Long.MAX_VALUE - t.getTime(); + System.arraycopy(ByteUtil.longToBytes(ts), 0, key, 4, 8); + return key; + } + + /** + * one search tag may have multiple values which have OR relationship, and relationship between + * different search tags is AND + * the query is like "(TAG1=value11 OR TAG1=value12) AND TAG2=value2". + * + * @param tags + * @return + */ + protected String buildRegex2(Map> tags) { + // TODO need consider that \E could be part of tag, refer to https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java + SortedMap> tagHash = new TreeMap>(); + + for (Map.Entry> entry : tags.entrySet()) { + String tagName = entry.getKey(); + List stringValues = entry.getValue(); + List hashValues = new ArrayList(1); + for (String value : stringValues) { + hashValues.add(value.hashCode()); + } + tagHash.put(tagName.hashCode(), hashValues); + } + // ... + StringBuilder sb = new StringBuilder(); + sb.append("(?s)"); + sb.append("^(?:.{12})"); + sb.append("(?:.{").append(8).append("})*"); // for any number of tags + for (Map.Entry> entry : tagHash.entrySet()) { + try { + sb.append("\\Q"); + sb.append(new String(ByteUtil.intToBytes(entry.getKey()), "ISO-8859-1")).append("\\E"); + List hashValues = entry.getValue(); + sb.append("(?:"); + boolean first = true; + for (Integer value : hashValues) { + if (!first) { + sb.append('|'); + } + sb.append("\\Q"); + sb.append(new String(ByteUtil.intToBytes(value), "ISO-8859-1")); + sb.append("\\E"); + first = false; + } + sb.append(")"); + sb.append("(?:.{").append(8).append("})*"); // for any number of tags + } catch (Exception ex) { + LOG.error("Constructing regex error", ex); + } + } + sb.append("$"); + if (LOG.isDebugEnabled()) { + LOG.debug("Pattern is " + sb.toString()); + } + return sb.toString(); + } + + public InternalLog read() throws IOException { + if (rs == null) { + throw new IllegalArgumentException( + "ResultScanner must be initialized before reading"); + } + + InternalLog t = null; + + Result r = rs.next(); + if (r != null) { + byte[] row = r.getRow(); + // skip the first 4 bytes : prefix + long timestamp = ByteUtil.bytesToLong(row, 4); + // reverse timestamp + timestamp = Long.MAX_VALUE - timestamp; + int count = 0; + if (qualifiers != null) { + count = qualifiers.length; + } + byte[][] values = new byte[count][]; + Map allQualifierValues = new HashMap(); + for (int i = 0; i < count; i++) { + // TODO if returned value is null, it means no this column for this row, so why set null to the object? + values[i] = r.getValue(schema.getColumnFamily().getBytes(), qualifiers[i]); + allQualifierValues.put(new String(qualifiers[i]), values[i]); + } + t = buildObject(row, timestamp, allQualifierValues); + } + + return t; + } + + public InternalLog buildObject(byte[] row, long timestamp, + Map allQualifierValues) { + InternalLog alertDetail = new InternalLog(); + String myRow = EagleBase64Wrapper.encodeByteArray2URLSafeString(row); + alertDetail.setEncodedRowkey(myRow); + alertDetail.setPrefix(schema.getPrefix()); + alertDetail.setSearchTags(searchTags); + alertDetail.setTimestamp(timestamp); + + Map logQualifierValues = new HashMap(); + Map logTags = new HashMap(); + for (Map.Entry entry : allQualifierValues.entrySet()) { + if (schema.isTag(entry.getKey())) { + if (entry.getValue() != null) { + logTags.put(entry.getKey(), new String(entry.getValue())); + } + } else { + logQualifierValues.put(entry.getKey(), entry.getValue()); + } + } + alertDetail.setQualifierValues(logQualifierValues); + alertDetail.setTags(logTags); + return alertDetail; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/LogDeleter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/LogDeleter.java index c5087e729a..f36be1c7f7 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/LogDeleter.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/LogDeleter.java @@ -16,19 +16,19 @@ */ package org.apache.eagle.log.entity.old; +import org.apache.eagle.log.entity.InternalLog; + import java.io.Closeable; import java.io.IOException; -import org.apache.eagle.log.entity.InternalLog; +public interface LogDeleter extends Closeable { + public void flush() throws IOException; -public interface LogDeleter extends Closeable{ - public void flush() throws IOException; + public void open() throws IOException; - public void open() throws IOException; + public void close() throws IOException; - public void close() throws IOException; + public void delete(InternalLog log) throws IOException; - public void delete(InternalLog log) throws IOException; - - public void deleteRowByRowkey(String encodedRowkey) throws IOException; + public void deleteRowByRowkey(String encodedRowkey) throws IOException; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/RowkeyHelper.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/RowkeyHelper.java index f2bdd15d40..b48b76f4c7 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/RowkeyHelper.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/RowkeyHelper.java @@ -16,70 +16,70 @@ */ package org.apache.eagle.log.entity.old; -import java.util.ArrayList; -import java.util.List; - +import org.apache.eagle.common.EagleBase64Wrapper; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.HBaseInternalLogHelper; import org.apache.eagle.log.entity.InternalLog; import org.apache.eagle.log.entity.RowkeyBuilder; import org.apache.eagle.log.entity.meta.EntityDefinition; -import org.apache.eagle.common.EagleBase64Wrapper; + +import java.util.ArrayList; +import java.util.List; public final class RowkeyHelper { - public static byte[] getRowkey(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception { - byte[] rowkey = null; - if(entity.getEncodedRowkey() != null && !(entity.getEncodedRowkey().isEmpty())){ - rowkey = EagleBase64Wrapper.decode(entity.getEncodedRowkey()); - }else{ - InternalLog log = HBaseInternalLogHelper.convertToInternalLog(entity, entityDef); - rowkey = RowkeyBuilder.buildRowkey(log); - } - return rowkey; - } + public static byte[] getRowkey(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception { + byte[] rowkey = null; + if (entity.getEncodedRowkey() != null && !(entity.getEncodedRowkey().isEmpty())) { + rowkey = EagleBase64Wrapper.decode(entity.getEncodedRowkey()); + } else { + InternalLog log = HBaseInternalLogHelper.convertToInternalLog(entity, entityDef); + rowkey = RowkeyBuilder.buildRowkey(log); + } + return rowkey; + } + - public static List getRowkeysByEntities(List entities, EntityDefinition entityDef) throws Exception { - final List result = new ArrayList(entities.size()); - for (TaggedLogAPIEntity entity : entities) { - final byte[] rowkey = getRowkey(entity, entityDef); - result.add(rowkey); - } - return result; - } - + public static byte[] getRowkey(InternalLog log) { + byte[] rowkey = null; + if (log.getEncodedRowkey() != null && !(log.getEncodedRowkey().isEmpty())) { + rowkey = EagleBase64Wrapper.decode(log.getEncodedRowkey()); + } else { + rowkey = RowkeyBuilder.buildRowkey(log); + } + return rowkey; + } - public static byte[] getRowkey(InternalLog log) { - byte[] rowkey = null; - if(log.getEncodedRowkey() != null && !(log.getEncodedRowkey().isEmpty())){ - rowkey = EagleBase64Wrapper.decode(log.getEncodedRowkey()); - }else{ - rowkey = RowkeyBuilder.buildRowkey(log); - } - return rowkey; - } + public static byte[] getRowkey(String encodedRowkey) { + byte[] rowkey = EagleBase64Wrapper.decode(encodedRowkey); + return rowkey; + } - public static List getRowkeysByLogs(List logs) { - final List result = new ArrayList(logs.size()); - for (InternalLog log : logs) { - final byte[] rowkey = getRowkey(log); - result.add(rowkey); - } - return result; - } + public static List getRowkeysByEntities(List entities, EntityDefinition entityDef) throws Exception { + final List result = new ArrayList(entities.size()); + for (TaggedLogAPIEntity entity : entities) { + final byte[] rowkey = getRowkey(entity, entityDef); + result.add(rowkey); + } + return result; + } - public static byte[] getRowkey(String encodedRowkey) { - byte[] rowkey = EagleBase64Wrapper.decode(encodedRowkey); - return rowkey; - } + public static List getRowkeysByLogs(List logs) { + final List result = new ArrayList(logs.size()); + for (InternalLog log : logs) { + final byte[] rowkey = getRowkey(log); + result.add(rowkey); + } + return result; + } - public static List getRowkeysByEncodedRowkeys(List encodedRowkeys) { - final List result = new ArrayList(encodedRowkeys.size()); - for (String encodedRowkey : encodedRowkeys) { - byte[] rowkey = EagleBase64Wrapper.decode(encodedRowkey); - result.add(rowkey); - } - return result; - } + public static List getRowkeysByEncodedRowkeys(List encodedRowkeys) { + final List result = new ArrayList(encodedRowkeys.size()); + for (String encodedRowkey : encodedRowkeys) { + byte[] rowkey = EagleBase64Wrapper.decode(encodedRowkey); + result.add(rowkey); + } + return result; + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/Schema.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/Schema.java index 55556bd515..2ab0773481 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/Schema.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/Schema.java @@ -17,8 +17,11 @@ package org.apache.eagle.log.entity.old; public interface Schema { - public boolean isTag(String qualifier); - public String getTable(); - public String getColumnFamily(); - public String getPrefix(); + public boolean isTag(String qualifier); + + public String getTable(); + + public String getColumnFamily(); + + public String getPrefix(); } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/DefaultEntityRepository.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/DefaultEntityRepository.java index 1d0d398474..26ec328fb9 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/DefaultEntityRepository.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/DefaultEntityRepository.java @@ -22,13 +22,11 @@ /** * Default Eagle framework entity repository class. - * - * */ public class DefaultEntityRepository extends EntityRepository { - public DefaultEntityRepository() { - entitySet.add(MetricMetadataEntity.class); - entitySet.add(TestLogAPIEntity.class); - entitySet.add(TestTimeSeriesAPIEntity.class); - } + public DefaultEntityRepository() { + entitySet.add(MetricMetadataEntity.class); + entitySet.add(TestLogAPIEntity.class); + entitySet.add(TestTimeSeriesAPIEntity.class); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepository.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepository.java index 0c24d30ac0..37ee2f6c7a 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepository.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepository.java @@ -16,41 +16,34 @@ */ package org.apache.eagle.log.entity.repo; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.meta.EntitySerDeser; +import java.util.*; + /** * Entity repository is used to store entity definition class. Each domain should define its own entities. Eagle entity * framework will scan all derived class implementations to get all entity definitions, then register them to EntityDefinitionManager. - * - * */ public abstract class EntityRepository { - protected Set> entitySet = new HashSet>(); - protected Map, EntitySerDeser> serDeserMap = new HashMap, EntitySerDeser>(); - - public synchronized Collection> getEntitySet() { - return new ArrayList>(entitySet); - } - - public synchronized Map, EntitySerDeser> getSerDeserMap() { - return new HashMap, EntitySerDeser>(serDeserMap); - } - - public synchronized void registerEntity(Class clazz) { - entitySet.add(clazz); - } - - public synchronized void registerSerDeser(Class clazz, EntitySerDeser serDeser) { - serDeserMap.put(clazz, serDeser); - } - + protected Set> entitySet = new HashSet>(); + protected Map, EntitySerDeser> serDeserMap = new HashMap, EntitySerDeser>(); + + public synchronized Collection> getEntitySet() { + return new ArrayList>(entitySet); + } + + public synchronized Map, EntitySerDeser> getSerDeserMap() { + return new HashMap, EntitySerDeser>(serDeserMap); + } + + public synchronized void registerEntity(Class clazz) { + entitySet.add(clazz); + } + + public synchronized void registerSerDeser(Class clazz, EntitySerDeser serDeser) { + serDeserMap.put(clazz, serDeser); + } + } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepositoryScanner.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepositoryScanner.java index 7065cbef91..109354a824 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepositoryScanner.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepositoryScanner.java @@ -16,55 +16,54 @@ */ package org.apache.eagle.log.entity.repo; -import java.util.Collection; -import java.util.Map; -import java.util.Set; - -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.apache.eagle.log.entity.meta.EntitySerDeser; import net.sf.extcos.ComponentQuery; import net.sf.extcos.ComponentScanner; - +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; +import org.apache.eagle.log.entity.meta.EntitySerDeser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Collection; +import java.util.Map; +import java.util.Set; + public final class EntityRepositoryScanner { - private static final Logger LOG = LoggerFactory.getLogger(EntityRepositoryScanner.class); + private static final Logger LOG = LoggerFactory.getLogger(EntityRepositoryScanner.class); - public static void scan() throws InstantiationException, IllegalAccessException { - // TODO currently extcos 0.3b doesn't support to search packages like "com.*.eagle.*", "org.*.eagle.*". However 0.4b depends on asm-all version 4.0, which is - // conflicted with jersey server 1.8. We should fix it later - LOG.info("Scanning all entity repositories with pattern \"org.apache.eagle.*\""); - final ComponentScanner scanner = new ComponentScanner(); - final Set> classes = scanner.getClasses(new EntityRepoScanQuery() ); - for (Class entityClass : classes) { - LOG.info("Processing entity repository: " + entityClass.getName()); - if (EntityRepository.class.isAssignableFrom(entityClass)) { - EntityRepository repo = (EntityRepository)entityClass.newInstance(); - addRepo(repo); - } - } - } + public static void scan() throws InstantiationException, IllegalAccessException { + // TODO currently extcos 0.3b doesn't support to search packages like "com.*.eagle.*", "org.*.eagle.*". However 0.4b depends on asm-all version 4.0, which is + // conflicted with jersey server 1.8. We should fix it later + LOG.info("Scanning all entity repositories with pattern \"org.apache.eagle.*\""); + final ComponentScanner scanner = new ComponentScanner(); + final Set> classes = scanner.getClasses(new EntityRepoScanQuery()); + for (Class entityClass : classes) { + LOG.info("Processing entity repository: " + entityClass.getName()); + if (EntityRepository.class.isAssignableFrom(entityClass)) { + EntityRepository repo = (EntityRepository) entityClass.newInstance(); + addRepo(repo); + } + } + } - private static void addRepo(EntityRepository repo) { - final Map, EntitySerDeser> serDeserMap = repo.getSerDeserMap(); - for (Map.Entry, EntitySerDeser> entry : serDeserMap.entrySet()) { - EntityDefinitionManager.registerSerDeser(entry.getKey(), entry.getValue()); - } - final Collection> entityClasses = repo.getEntitySet(); - for (Class clazz : entityClasses) { - EntityDefinitionManager.registerEntity(clazz); - } - } + private static void addRepo(EntityRepository repo) { + final Map, EntitySerDeser> serDeserMap = repo.getSerDeserMap(); + for (Map.Entry, EntitySerDeser> entry : serDeserMap.entrySet()) { + EntityDefinitionManager.registerSerDeser(entry.getKey(), entry.getValue()); + } + final Collection> entityClasses = repo.getEntitySet(); + for (Class clazz : entityClasses) { + EntityDefinitionManager.registerEntity(clazz); + } + } - public static class EntityRepoScanQuery extends ComponentQuery { + public static class EntityRepoScanQuery extends ComponentQuery { - @Override - protected void query() { - select().from("org.apache.eagle").returning( - allExtending(EntityRepository.class)); - } - } + @Override + protected void query() { + select().from("org.apache.eagle").returning( + allExtending(EntityRepository.class)); + } + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestEntity.java index af635ecce9..ceac2e212f 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestEntity.java @@ -17,14 +17,9 @@ package org.apache.eagle.log.entity.test; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.apache.eagle.log.entity.meta.Column; -import org.apache.eagle.log.entity.meta.ColumnFamily; -import org.apache.eagle.log.entity.meta.TimeSeries; +import org.apache.eagle.log.entity.meta.*; import org.codehaus.jackson.map.annotate.JsonSerialize; -import org.apache.eagle.log.entity.meta.Prefix; -import org.apache.eagle.log.entity.meta.Table; - /** * this class is written by customer, but it has some contracts * 0. This class should conform to java bean conventions @@ -33,75 +28,75 @@ * 3. Annotate those qualifier fields with column name * 4. Fire property change event for all fields' setter method, where field name is mandatory parameter */ -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) @Table("alertdetail") @ColumnFamily("f") @Prefix("hadoop") @TimeSeries(true) public class TestEntity extends TaggedLogAPIEntity { - @Column("remediationID") - private String remediationID; - @Column("remediationStatus") - private String remediationStatus; - @Column("c") - private long count; - @Column("d") - private int numHosts; - @Column("e") - private Long numClusters; + @Column("remediationID") + private String remediationID; + @Column("remediationStatus") + private String remediationStatus; + @Column("c") + private long count; + @Column("d") + private int numHosts; + @Column("e") + private Long numClusters; + + public Long getNumClusters() { + return numClusters; + } - public Long getNumClusters() { - return numClusters; - } + public void setNumClusters(Long numClusters) { + this.numClusters = numClusters; + pcs.firePropertyChange("numClusters", null, null); + } - public void setNumClusters(Long numClusters) { - this.numClusters = numClusters; - pcs.firePropertyChange("numClusters", null, null); - } + public int getNumHosts() { + return numHosts; + } - public int getNumHosts() { - return numHosts; - } + public void setNumHosts(int numHosts) { + this.numHosts = numHosts; + pcs.firePropertyChange("numHosts", null, null); + } - public void setNumHosts(int numHosts) { - this.numHosts = numHosts; - pcs.firePropertyChange("numHosts", null, null); - } + public long getCount() { + return count; + } - public long getCount() { - return count; - } + public void setCount(long count) { + this.count = count; + pcs.firePropertyChange("count", null, null); + } - public void setCount(long count) { - this.count = count; - pcs.firePropertyChange("count", null, null); - } + public String getRemediationID() { + return remediationID; + } - public String getRemediationID() { - return remediationID; - } + public void setRemediationID(String remediationID) { + this.remediationID = remediationID; + pcs.firePropertyChange("remediationID", null, null); + } - public void setRemediationID(String remediationID) { - this.remediationID = remediationID; - pcs.firePropertyChange("remediationID", null, null); - } + public String getRemediationStatus() { + return remediationStatus; + } - public String getRemediationStatus() { - return remediationStatus; - } + public void setRemediationStatus(String remediationStatus) { + this.remediationStatus = remediationStatus; + pcs.firePropertyChange("remediationStatus", null, null); + } - public void setRemediationStatus(String remediationStatus) { - this.remediationStatus = remediationStatus; - pcs.firePropertyChange("remediationStatus", null, null); - } - - public String toString(){ - StringBuffer sb = new StringBuffer(); - sb.append(super.toString()); - sb.append(", remediationID:"); - sb.append(remediationID); - sb.append(", remediationStatus:"); - sb.append(remediationStatus); - return sb.toString(); - } + public String toString() { + StringBuffer sb = new StringBuffer(); + sb.append(super.toString()); + sb.append(", remediationID:"); + sb.append(remediationID); + sb.append(", remediationStatus:"); + sb.append(remediationStatus); + return sb.toString(); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestLogAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestLogAPIEntity.java index d126421b2d..90bd21c11c 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestLogAPIEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestLogAPIEntity.java @@ -20,80 +20,93 @@ import org.apache.eagle.log.entity.meta.*; import org.codehaus.jackson.map.annotate.JsonSerialize; -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) @Table("unittest") @ColumnFamily("f") @Prefix("entityut") @Service("TestLogAPIEntity") @TimeSeries(false) -@Indexes({ - @Index(name="jobIdIndex1", columns = { "jobID" }, unique = true), - @Index(name="jobIdNonIndex2", columns = { "hostname" }, unique = false) - }) +@Indexes( { + @Index(name = "jobIdIndex1", columns = {"jobID"}, unique = true), + @Index(name = "jobIdNonIndex2", columns = {"hostname"}, unique = false) + }) public class TestLogAPIEntity extends TaggedLogAPIEntity { - @Column("a") - private int field1; - @Column("b") - private Integer field2; - @Column("c") - private long field3; - @Column("d") - private Long field4; - @Column("e") - private double field5; - @Column("f") - private Double field6; - @Column("g") - private String field7; - - public int getField1() { - return field1; - } - public void setField1(int field1) { - this.field1 = field1; - pcs.firePropertyChange("field1", null, null); - } - public Integer getField2() { - return field2; - } - public void setField2(Integer field2) { - this.field2 = field2; - pcs.firePropertyChange("field2", null, null); - } - public long getField3() { - return field3; - } - public void setField3(long field3) { - this.field3 = field3; - pcs.firePropertyChange("field3", null, null); - } - public Long getField4() { - return field4; - } - public void setField4(Long field4) { - this.field4 = field4; - pcs.firePropertyChange("field4", null, null); - } - public double getField5() { - return field5; - } - public void setField5(double field5) { - this.field5 = field5; - pcs.firePropertyChange("field5", null, null); - } - public Double getField6() { - return field6; - } - public void setField6(Double field6) { - this.field6 = field6; - pcs.firePropertyChange("field6", null, null); - } - public String getField7() { - return field7; - } - public void setField7(String field7) { - this.field7 = field7; - pcs.firePropertyChange("field7", null, null); - } + @Column("a") + private int field1; + @Column("b") + private Integer field2; + @Column("c") + private long field3; + @Column("d") + private Long field4; + @Column("e") + private double field5; + @Column("f") + private Double field6; + @Column("g") + private String field7; + + public int getField1() { + return field1; + } + + public void setField1(int field1) { + this.field1 = field1; + pcs.firePropertyChange("field1", null, null); + } + + public Integer getField2() { + return field2; + } + + public void setField2(Integer field2) { + this.field2 = field2; + pcs.firePropertyChange("field2", null, null); + } + + public long getField3() { + return field3; + } + + public void setField3(long field3) { + this.field3 = field3; + pcs.firePropertyChange("field3", null, null); + } + + public Long getField4() { + return field4; + } + + public void setField4(Long field4) { + this.field4 = field4; + pcs.firePropertyChange("field4", null, null); + } + + public double getField5() { + return field5; + } + + public void setField5(double field5) { + this.field5 = field5; + pcs.firePropertyChange("field5", null, null); + } + + public Double getField6() { + return field6; + } + + public void setField6(Double field6) { + this.field6 = field6; + pcs.firePropertyChange("field6", null, null); + } + + public String getField7() { + return field7; + } + + public void setField7(String field7) { + this.field7 = field7; + pcs.firePropertyChange("field7", null, null); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestTimeSeriesAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestTimeSeriesAPIEntity.java index eaa9fd22f9..481f078669 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestTimeSeriesAPIEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestTimeSeriesAPIEntity.java @@ -16,82 +16,94 @@ */ package org.apache.eagle.log.entity.test; +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.meta.*; import org.codehaus.jackson.map.annotate.JsonSerialize; -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; - -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) @Table("unittest") @ColumnFamily("f") @Prefix("testTSEntity") @Service("TestTimeSeriesAPIEntity") @TimeSeries(true) -@Tags({"cluster","datacenter","random"}) +@Tags( {"cluster", "datacenter", "random"}) public class TestTimeSeriesAPIEntity extends TaggedLogAPIEntity { - @Column("a") - private int field1; - @Column("b") - private Integer field2; - @Column("c") - private long field3; - @Column("d") - private Long field4; - @Column("e") - private double field5; - @Column("f") - private Double field6; - @Column("g") - private String field7; - - public int getField1() { - return field1; - } - public void setField1(int field1) { - this.field1 = field1; - pcs.firePropertyChange("field1", null, null); - } - public Integer getField2() { - return field2; - } - public void setField2(Integer field2) { - this.field2 = field2; - pcs.firePropertyChange("field2", null, null); - } - public long getField3() { - return field3; - } - public void setField3(long field3) { - this.field3 = field3; - pcs.firePropertyChange("field3", null, null); - } - public Long getField4() { - return field4; - } - public void setField4(Long field4) { - this.field4 = field4; - pcs.firePropertyChange("field4", null, null); - } - public double getField5() { - return field5; - } - public void setField5(double field5) { - this.field5 = field5; - pcs.firePropertyChange("field5", null, null); - } - public Double getField6() { - return field6; - } - public void setField6(Double field6) { - this.field6 = field6; - pcs.firePropertyChange("field6", null, null); - } - public String getField7() { - return field7; - } - public void setField7(String field7) { - this.field7 = field7; - pcs.firePropertyChange("field7", null, null); - } + @Column("a") + private int field1; + @Column("b") + private Integer field2; + @Column("c") + private long field3; + @Column("d") + private Long field4; + @Column("e") + private double field5; + @Column("f") + private Double field6; + @Column("g") + private String field7; + + public int getField1() { + return field1; + } + + public void setField1(int field1) { + this.field1 = field1; + pcs.firePropertyChange("field1", null, null); + } + + public Integer getField2() { + return field2; + } + + public void setField2(Integer field2) { + this.field2 = field2; + pcs.firePropertyChange("field2", null, null); + } + + public long getField3() { + return field3; + } + + public void setField3(long field3) { + this.field3 = field3; + pcs.firePropertyChange("field3", null, null); + } + + public Long getField4() { + return field4; + } + + public void setField4(Long field4) { + this.field4 = field4; + pcs.firePropertyChange("field4", null, null); + } + + public double getField5() { + return field5; + } + + public void setField5(double field5) { + this.field5 = field5; + pcs.firePropertyChange("field5", null, null); + } + + public Double getField6() { + return field6; + } + + public void setField6(Double field6) { + this.field6 = field6; + pcs.firePropertyChange("field6", null, null); + } + + public String getField7() { + return field7; + } + + public void setField7(String field7) { + this.field7 = field7; + pcs.firePropertyChange("field7", null, null); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ExpressionParser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ExpressionParser.java index 4f7471506d..9c71781c51 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ExpressionParser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ExpressionParser.java @@ -15,7 +15,7 @@ * limitations under the License. */ /** - * + * */ package org.apache.eagle.log.expression; @@ -38,170 +38,169 @@ /** *

    Expression Evaluation

    - * * Given expression in string and set context variables, return value in double - * - *
    *
    * For example: * EXP{(max(a, b)* min(a, b)) / abs(a-b+c-d)} => 600.0 - * - *
    *
    * NOTE: Expression variable must be in format: fieldName instead of @fieldName - * - *
    *
    *

    Dependencies:

    *
      - *
    • - * scireum/parsii - * Super fast and simple evaluator for mathematical expressions written in Java - *
    • + *
    • + * scireum/parsii + * Super fast and simple evaluator for mathematical expressions written in Java + *
    • *
    - * */ -public class ExpressionParser{ - private final static Logger LOG = LoggerFactory.getLogger(ExpressionParser.class); - - private String exprStr; - private Expression expression; - private Scope scope; - - @SuppressWarnings("unused") - public Scope getScope() { - return scope; - } - - private List dependentFields; - - /** - * @param exprStr expression string in format like: (max(a, b)* min(a, b)) / abs(a-b+c-d) - * - * @throws ParseException - * @throws ParsiiInvalidException - */ - public ExpressionParser(String exprStr) throws ParseException, ParsiiInvalidException{ - this.exprStr = exprStr; - scope = Scope.create(); - expression = Parser.parse(this.exprStr,scope); - } - - @SuppressWarnings("unused") - public ExpressionParser(String exprStr, Map context) throws ParsiiInvalidException, ParseException, ParsiiUnknowVariableException { - this(exprStr); - setVariables(context); - } - - public ExpressionParser setVariables(Map tuple) throws ParsiiUnknowVariableException{ -// for(String valName : tuple.keySet()) { -// Double value = tuple.get(valName); - for(Map.Entry entry : tuple.entrySet()) { +public class ExpressionParser { + private static final Logger LOG = LoggerFactory.getLogger(ExpressionParser.class); + + private String exprStr; + private Expression expression; + private Scope scope; + + @SuppressWarnings("unused") + public Scope getScope() { + return scope; + } + + private List dependentFields; + + /** + * Constructor with expr. + * + * @param exprStr expression string in format like: (max(a, b)* min(a, b)) / abs(a-b+c-d) + * @throws ParseException + * @throws ParsiiInvalidException + */ + public ExpressionParser(String exprStr) throws ParseException, ParsiiInvalidException { + this.exprStr = exprStr; + scope = Scope.create(); + expression = Parser.parse(this.exprStr, scope); + } + + @SuppressWarnings("unused") + public ExpressionParser(String exprStr, Map context) throws ParsiiInvalidException, ParseException, ParsiiUnknowVariableException { + this(exprStr); + setVariables(context); + } + + public ExpressionParser setVariables(Map tuple) throws ParsiiUnknowVariableException { + // for(String valName : tuple.keySet()) { + // Double value = tuple.get(valName); + for (Map.Entry entry : tuple.entrySet()) { String valName = entry.getKey(); Double value = entry.getValue(); - Variable variable = scope.getVariable(valName); - if(variable!=null && value !=null) { - variable.setValue(value); - }else{ - if(LOG.isDebugEnabled()) LOG.warn("Variable for "+valName+" is null in scope of expression: "+this.exprStr); - } - } - return this; - } - - @SuppressWarnings("unused") - public ExpressionParser setVariable(Entry tuple) throws ParsiiUnknowVariableException{ - if (getDependentFields().contains(tuple.getKey())) { - scope.getVariable(tuple.getKey()).setValue(tuple.getValue()); - } - else { - throw new ParsiiUnknowVariableException("unknown variable: " + tuple.getKey()); - } - return this; - } - - public ExpressionParser setVariable(String key, Double value) throws ParsiiUnknowVariableException{ - scope.getVariable(key).setValue(value); - return this; - } - - public double eval() throws Exception{ - return expression.evaluate(); - } - - /** - * Thread safe - * - * @param tuple - * @return - * @throws ParsiiUnknowVariableException - */ - public double eval(Map tuple) throws Exception { - synchronized (this){ - this.setVariables(tuple); - return this.eval(); - } - } - - public List getDependentFields() { - if (dependentFields == null) { - dependentFields = new ArrayList(); - for (String variable : scope.getNames()) { - if (!variable.equals("pi") && !variable.equals("E") && !variable.equals("euler")) - dependentFields.add(variable); - } - } - return dependentFields; - } - - private final static Map _exprParserCache = new HashMap(); - - /** - * Thread safe - * - * @param expr - * @return - * @throws ParsiiInvalidException - * @throws ParseException - */ - public static ExpressionParser parse(String expr) throws ParsiiInvalidException, ParseException { - if(expr == null) throw new IllegalStateException("Expression to parse is null"); - synchronized (_exprParserCache) { - ExpressionParser parser = _exprParserCache.get(expr); - if (parser == null) { - parser = new ExpressionParser(expr); - _exprParserCache.put(expr, parser); - } - return parser; - } - } - public static double eval(String expression,Map context) throws Exception { - ExpressionParser parser = parse(expression); - return parser.eval(context); - } - - private static final Map _entityMethodCache = new HashMap(); - public static double eval(String expression,TaggedLogAPIEntity entity) throws Exception { - ExpressionParser parser = parse(expression); - List dependencies = parser.getDependentFields(); - Map context = new HashMap(); - for(String field:dependencies){ - String methodName = "get"+field.substring(0, 1).toUpperCase() + field.substring(1); - String methodUID = entity.getClass().getName()+"."+methodName; - - Method m; - synchronized (_entityMethodCache) { - m = _entityMethodCache.get(methodUID); - if (m == null) { - m = entity.getClass().getMethod(methodName); - _entityMethodCache.put(methodUID, m); - } - } - Object obj = m.invoke(entity); - Double doubleValue = EntityQualifierUtils.convertObjToDouble(obj); - // if(doubleValue == Double.NaN) throw new IllegalArgumentException("Field "+field+": "+obj+" in expression "+expression+" is not number"); - context.put(field,doubleValue); - } - return parser.eval(context); - } + Variable variable = scope.getVariable(valName); + if (variable != null && value != null) { + variable.setValue(value); + } else { + if (LOG.isDebugEnabled()) { + LOG.warn("Variable for " + valName + " is null in scope of expression: " + this.exprStr); + } + } + } + return this; + } + + @SuppressWarnings("unused") + public ExpressionParser setVariable(Entry tuple) throws ParsiiUnknowVariableException { + if (getDependentFields().contains(tuple.getKey())) { + scope.getVariable(tuple.getKey()).setValue(tuple.getValue()); + } else { + throw new ParsiiUnknowVariableException("unknown variable: " + tuple.getKey()); + } + return this; + } + + public ExpressionParser setVariable(String key, Double value) throws ParsiiUnknowVariableException { + scope.getVariable(key).setValue(value); + return this; + } + + public List getDependentFields() { + if (dependentFields == null) { + dependentFields = new ArrayList(); + for (String variable : scope.getNames()) { + if (!variable.equals("pi") && !variable.equals("E") && !variable.equals("euler")) { + dependentFields.add(variable); + } + } + } + return dependentFields; + } + + public double eval() throws Exception { + return expression.evaluate(); + } + + /** + * Thread safe. + * + * @param tuple + * @return + * @throws ParsiiUnknowVariableException + */ + public double eval(Map tuple) throws Exception { + synchronized (this) { + this.setVariables(tuple); + return this.eval(); + } + } + + public static double eval(String expression, Map context) throws Exception { + ExpressionParser parser = parse(expression); + return parser.eval(context); + } + + private static final Map _entityMethodCache = new HashMap(); + + public static double eval(String expression, TaggedLogAPIEntity entity) throws Exception { + ExpressionParser parser = parse(expression); + List dependencies = parser.getDependentFields(); + Map context = new HashMap(); + for (String field : dependencies) { + String methodName = "get" + field.substring(0, 1).toUpperCase() + field.substring(1); + String methodUID = entity.getClass().getName() + "." + methodName; + + Method m; + synchronized (_entityMethodCache) { + m = _entityMethodCache.get(methodUID); + if (m == null) { + m = entity.getClass().getMethod(methodName); + _entityMethodCache.put(methodUID, m); + } + } + Object obj = m.invoke(entity); + Double doubleValue = EntityQualifierUtils.convertObjToDouble(obj); + // if(doubleValue == Double.NaN) throw new IllegalArgumentException("Field "+field+": "+obj+" in expression "+expression+" is not number"); + context.put(field, doubleValue); + } + return parser.eval(context); + } + + private static final Map _exprParserCache = new HashMap(); + + /** + * Thread safe. + * + * @param expr + * @return + * @throws ParsiiInvalidException + * @throws ParseException + */ + public static ExpressionParser parse(String expr) throws ParsiiInvalidException, ParseException { + if (expr == null) { + throw new IllegalStateException("Expression to parse is null"); + } + synchronized (_exprParserCache) { + ExpressionParser parser = _exprParserCache.get(expr); + if (parser == null) { + parser = new ExpressionParser(expr); + _exprParserCache.put(expr, parser); + } + return parser; + } + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiInvalidException.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiInvalidException.java index 22301f8de0..0eb8223161 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiInvalidException.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiInvalidException.java @@ -15,43 +15,43 @@ * limitations under the License. */ /** - * + * */ package org.apache.eagle.log.expression; -/** - * @since Nov 7, 2014 - */ -public class ParsiiInvalidException extends Exception{ - - private static final long serialVersionUID = 1L; +public class ParsiiInvalidException extends Exception { + + private static final long serialVersionUID = 1L; - /** - * Default constructor - */ - public ParsiiInvalidException() { - super(); - } + /** + * Default constructor. + */ + public ParsiiInvalidException() { + super(); + } - /** - * @param message - * @param cause - */ - public ParsiiInvalidException(String message, Throwable cause) { - super(message, cause); - } + /** + * constructor with message and cause. + * @param message + * @param cause + */ + public ParsiiInvalidException(String message, Throwable cause) { + super(message, cause); + } - /** - * @param message - */ - public ParsiiInvalidException(String message) { - super(message); - } + /** + * constructor with message. + * @param message + */ + public ParsiiInvalidException(String message) { + super(message); + } - /** - * @param cause - */ - public ParsiiInvalidException(Throwable cause) { - super(cause); - } + /** + * constructor with cause. + * @param cause + */ + public ParsiiInvalidException(Throwable cause) { + super(cause); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiUnknowVariableException.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiUnknowVariableException.java index 1573a08541..8a05f3bc37 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiUnknowVariableException.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiUnknowVariableException.java @@ -15,43 +15,44 @@ * limitations under the License. */ /** - * + * */ package org.apache.eagle.log.expression; -/** - * @since Nov 7, 2014 - */ -public class ParsiiUnknowVariableException extends Exception{ - - private static final long serialVersionUID = 1L; - /** - * Default constructor - */ - public ParsiiUnknowVariableException() { - super(); - } +public class ParsiiUnknowVariableException extends Exception { + + private static final long serialVersionUID = 1L; + + /** + * Default constructor. + */ + public ParsiiUnknowVariableException() { + super(); + } - /** - * @param message - * @param cause - */ - public ParsiiUnknowVariableException(String message, Throwable cause) { - super(message, cause); - } + /** + * constructor with message and cause. + * @param message + * @param cause + */ + public ParsiiUnknowVariableException(String message, Throwable cause) { + super(message, cause); + } - /** - * @param message - */ - public ParsiiUnknowVariableException(String message) { - super(message); - } + /** + * constructor with message. + * @param message + */ + public ParsiiUnknowVariableException(String message) { + super(message); + } - /** - * @param cause - */ - public ParsiiUnknowVariableException(Throwable cause) { - super(cause); - } + /** + * constructor with cause. + * @param cause + */ + public ParsiiUnknowVariableException(Throwable cause) { + super(cause); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/TestGenericServiceAPIResponseEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/TestGenericServiceAPIResponseEntity.java index 092ef75cc0..cf609be085 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/TestGenericServiceAPIResponseEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/TestGenericServiceAPIResponseEntity.java @@ -34,34 +34,40 @@ * @since 3/18/15 */ public class TestGenericServiceAPIResponseEntity { - final static Logger LOG = LoggerFactory.getLogger(TestGenericServiceAPIResponseEntity.class); + static final Logger LOG = LoggerFactory.getLogger(TestGenericServiceAPIResponseEntity.class); ObjectMapper objectMapper; @Before - public void setUp(){ + public void setUp() { objectMapper = new ObjectMapper(); } @JsonSerialize - public static class Item{ - public Item(){} - public Item(String name,Double value){ + public static class Item { + public Item() { + } + + public Item(String name, Double value) { this.name = name; this.value = value; } + private String name; private Double value; public String getName() { return name; } + public void setName(String name) { this.name = name; } + public Double getValue() { return value; } + public void setValue(Double value) { this.value = value; } @@ -71,10 +77,10 @@ public void setValue(Double value) { public void testSerDeserialize() throws IOException { // mock up service side to serialize GenericServiceAPIResponseEntity entity = new GenericServiceAPIResponseEntity(Item.class); - entity.setObj(Arrays.asList(new Item("a",1.2),new Item("b",1.3),new Item("c",1.4))); - entity.setMeta(new HashMap(){{ - put("tag1","val1"); - put("tag2","val2"); + entity.setObj(Arrays.asList(new Item("a", 1.2), new Item("b", 1.3), new Item("c", 1.4))); + entity.setMeta(new HashMap() {{ + put("tag1", "val1"); + put("tag2", "val2"); }}); // entity.setTypeByObj(); @@ -83,10 +89,10 @@ public void testSerDeserialize() throws IOException { LOG.info(json); // mock up client side to deserialize - GenericServiceAPIResponseEntity deserEntity = objectMapper.readValue(json,GenericServiceAPIResponseEntity.class); - Assert.assertEquals(json,objectMapper.writeValueAsString(deserEntity)); + GenericServiceAPIResponseEntity deserEntity = objectMapper.readValue(json, GenericServiceAPIResponseEntity.class); + Assert.assertEquals(json, objectMapper.writeValueAsString(deserEntity)); Assert.assertEquals(3, deserEntity.getObj().size()); - Assert.assertEquals(LinkedList.class,deserEntity.getObj().getClass()); - Assert.assertEquals(Item.class,deserEntity.getObj().get(0).getClass()); + Assert.assertEquals(LinkedList.class, deserEntity.getObj().getClass()); + Assert.assertEquals(Item.class, deserEntity.getObj().get(0).getClass()); } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDouble2DArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDouble2DArraySerDeser.java index 12fba59e17..3d5572d527 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDouble2DArraySerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDouble2DArraySerDeser.java @@ -26,19 +26,19 @@ public class TestDouble2DArraySerDeser { private Double2DArraySerDeser double2DArraySerDeser = new Double2DArraySerDeser(); @Test - public void testSerDeser(){ - double[][] data = new double[][]{ - {0,1,2,4}, - {4,2,1,0}, - {4}, - null, - {} + public void testSerDeser() { + double[][] data = new double[][] { + {0, 1, 2, 4}, + {4, 2, 1, 0}, + {4}, + null, + {} }; byte[] bytes = double2DArraySerDeser.serialize(data); double[][] data2 = double2DArraySerDeser.deserialize(bytes); - assert data.length == data2.length; + assert data.length == data2.length; assert data[0].length == data2[0].length; assert data[1].length == data2[1].length; assert data[2].length == data2[2].length; diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDoubleSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDoubleSerDeser.java index 2432e47883..87306d86ec 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDoubleSerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDoubleSerDeser.java @@ -16,43 +16,46 @@ */ package org.apache.eagle.log.entity; -import org.apache.eagle.log.entity.meta.DoubleSerDeser; import org.apache.eagle.common.ByteUtil; -import org.junit.Assert; +import org.apache.eagle.log.entity.meta.DoubleSerDeser; import org.apache.hadoop.hbase.filter.BinaryComparator; +import org.junit.Assert; import org.junit.Test; public class TestDoubleSerDeser { - @Test - public void test() { - DoubleSerDeser dsd = new DoubleSerDeser(); - //byte[] t = {'N', 'a', 'N'}; - byte [] t = dsd.serialize(Double.NaN); - - Double d = dsd.deserialize(t); - System.out.println(d); - //Double d = dsd.deserialize(t); - } + @Test + public void test() { + DoubleSerDeser dsd = new DoubleSerDeser(); + //byte[] t = {'N', 'a', 'N'}; + byte[] t = dsd.serialize(Double.NaN); + + Double d = dsd.deserialize(t); + System.out.println(d); + //Double d = dsd.deserialize(t); + } - /** - * @link http://en.wikipedia.org/wiki/Double-precision_floating-point_format - */ - @Test - public void testIEEE754_Binary64_DoublePrecisionFloatingPointFormat(){ - for(Double last = null,i=Math.pow(-2.0,33);i< Math.pow(2.0,33);i+=Math.pow(2.0,10)){ - if(last != null){ - Assert.assertTrue(i > last); - if(last < 0 && i <0){ - Assert.assertTrue("Negative double value and its serialization Binary array have negative correlation", new BinaryComparator(ByteUtil.doubleToBytes(i)).compareTo(ByteUtil.doubleToBytes(last)) < 0); - }else if(last < 0 && i >=0){ - Assert.assertTrue("Binary array for negative double is always greater than any positive doubles' ",new BinaryComparator(ByteUtil.doubleToBytes(i)).compareTo(ByteUtil.doubleToBytes(last)) < 0); - }else if(last >= 0){ - Assert.assertTrue("Positive double value and its serialization Binary array have positive correlation",new BinaryComparator(ByteUtil.doubleToBytes(i)).compareTo(ByteUtil.doubleToBytes(last)) > 0); - } - } - last = i; - } - Assert.assertTrue("Binary array for negative double is always greater than any positive doubles'",new BinaryComparator(ByteUtil.doubleToBytes(-1.0)).compareTo(ByteUtil.doubleToBytes(Math.pow(2.0,32)))>0) ; - } + /** + * @link http://en.wikipedia.org/wiki/Double-precision_floating-point_format + */ + @Test + public void testIEEE754_Binary64_DoublePrecisionFloatingPointFormat() { + for (Double last = null, i = Math.pow(-2.0, 33); i < Math.pow(2.0, 33); i += Math.pow(2.0, 10)) { + if (last != null) { + Assert.assertTrue(i > last); + if (last < 0 && i < 0) { + Assert.assertTrue("Negative double value and its serialization Binary array have negative correlation", new BinaryComparator(ByteUtil.doubleToBytes(i)).compareTo(ByteUtil + .doubleToBytes(last)) < 0); + } else if (last < 0 && i >= 0) { + Assert.assertTrue("Binary array for negative double is always greater than any positive doubles' ", new BinaryComparator(ByteUtil.doubleToBytes(i)).compareTo(ByteUtil + .doubleToBytes(last)) < 0); + } else if (last >= 0) { + Assert.assertTrue("Positive double value and its serialization Binary array have positive correlation", new BinaryComparator(ByteUtil.doubleToBytes(i)).compareTo(ByteUtil + .doubleToBytes(last)) > 0); + } + } + last = i; + } + Assert.assertTrue("Binary array for negative double is always greater than any positive doubles'", new BinaryComparator(ByteUtil.doubleToBytes(-1.0)).compareTo(ByteUtil.doubleToBytes(Math.pow(2.0, 32))) > 0); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestGenericEntityIndexStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestGenericEntityIndexStreamReader.java index 1e9e6cb297..dc0f5404c9 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestGenericEntityIndexStreamReader.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestGenericEntityIndexStreamReader.java @@ -41,7 +41,7 @@ public void testUniqueIndexRead() throws Exception { EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); final EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - + final List list = new ArrayList(); TestLogAPIEntity e = new TestLogAPIEntity(); e.setField1(1); @@ -59,7 +59,7 @@ public void testUniqueIndexRead() throws Exception { GenericEntityWriter writer = new GenericEntityWriter(ed.getService()); List result = writer.write(list); Assert.assertNotNull(result); - + final IndexDefinition indexDef = ed.getIndexes()[0]; SearchCondition condition = new SearchCondition(); condition.setOutputFields(new ArrayList()); @@ -77,7 +77,7 @@ public void testUniqueIndexRead() throws Exception { UniqueIndexStreamReader indexReader = new UniqueIndexStreamReader(indexDef, condition); GenericEntityBatchReader batchReader = new GenericEntityBatchReader(indexReader); - List entities = batchReader.read(); + List entities = batchReader.read(); Assert.assertNotNull(entities); Assert.assertTrue(entities.size() >= 1); TestLogAPIEntity e1 = entities.get(0); @@ -88,13 +88,13 @@ public void testUniqueIndexRead() throws Exception { Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); Assert.assertEquals(e.getField6(), e1.getField6()); Assert.assertEquals(e.getField7(), e1.getField7()); - + GenericDeleter deleter = new GenericDeleter(ed.getTable(), ed.getColumnFamily()); deleter.delete(list); - + indexReader = new UniqueIndexStreamReader(indexDef, condition); batchReader = new GenericEntityBatchReader(indexReader); - entities = batchReader.read(); + entities = batchReader.read(); hbase.deleteTable(entityDefinition.getTable()); Assert.assertNotNull(entities); Assert.assertTrue(entities.isEmpty()); @@ -107,7 +107,7 @@ public void testNonClusterIndexRead() throws Exception { EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); final EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - + final List list = new ArrayList(); TestLogAPIEntity e = new TestLogAPIEntity(); e.setField1(1); @@ -125,7 +125,7 @@ public void testNonClusterIndexRead() throws Exception { GenericEntityWriter writer = new GenericEntityWriter(ed.getService()); List result = writer.write(list); Assert.assertNotNull(result); - + final IndexDefinition indexDef = ed.getIndexes()[1]; SearchCondition condition = new SearchCondition(); condition.setOutputFields(new ArrayList()); @@ -143,7 +143,7 @@ public void testNonClusterIndexRead() throws Exception { NonClusteredIndexStreamReader indexReader = new NonClusteredIndexStreamReader(indexDef, condition); GenericEntityBatchReader batchReader = new GenericEntityBatchReader(indexReader); - List entities = batchReader.read(); + List entities = batchReader.read(); Assert.assertNotNull(entities); Assert.assertTrue(entities.size() >= 1); TestLogAPIEntity e1 = entities.get(0); @@ -158,10 +158,10 @@ public void testNonClusterIndexRead() throws Exception { GenericDeleter deleter = new GenericDeleter(ed.getTable(), ed.getColumnFamily()); deleter.delete(list); - + indexReader = new NonClusteredIndexStreamReader(indexDef, condition); batchReader = new GenericEntityBatchReader(indexReader); - entities = batchReader.read(); + entities = batchReader.read(); hbase.deleteTable(entityDefinition.getTable()); Assert.assertNotNull(entities); Assert.assertTrue(entities.isEmpty()); diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseIntegerLogHelper.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseIntegerLogHelper.java index a755668e0d..bd9127d490 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseIntegerLogHelper.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseIntegerLogHelper.java @@ -16,11 +16,11 @@ */ package org.apache.eagle.log.entity; +import org.apache.eagle.common.ByteUtil; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; import org.apache.eagle.log.entity.test.TestTimeSeriesAPIEntity; -import org.apache.eagle.common.ByteUtil; import org.junit.Assert; import org.junit.Test; @@ -31,37 +31,37 @@ * @since : 11/10/14,2014 */ public class TestHBaseIntegerLogHelper { - @Test - public void testTimeSeriesAPIEntity(){ - InternalLog internalLog = new InternalLog(); - Map map = new HashMap(); - TestTimeSeriesAPIEntity apiEntity = new TestTimeSeriesAPIEntity(); - EntityDefinition ed = null; - try { - ed = EntityDefinitionManager.getEntityByServiceName("TestTimeSeriesAPIEntity"); - } catch (InstantiationException e) { - e.printStackTrace(); - } catch (IllegalAccessException e) { - e.printStackTrace(); - } - map.put("a", ByteUtil.intToBytes(12)); - map.put("c", ByteUtil.longToBytes(123432432l)); - map.put("cluster", new String("cluster4ut").getBytes()); - map.put("datacenter", new String("datacenter4ut").getBytes()); + @Test + public void testTimeSeriesAPIEntity() { + InternalLog internalLog = new InternalLog(); + Map map = new HashMap(); + TestTimeSeriesAPIEntity apiEntity = new TestTimeSeriesAPIEntity(); + EntityDefinition ed = null; + try { + ed = EntityDefinitionManager.getEntityByServiceName("TestTimeSeriesAPIEntity"); + } catch (InstantiationException e) { + e.printStackTrace(); + } catch (IllegalAccessException e) { + e.printStackTrace(); + } + map.put("a", ByteUtil.intToBytes(12)); + map.put("c", ByteUtil.longToBytes(123432432l)); + map.put("cluster", new String("cluster4ut").getBytes()); + map.put("datacenter", new String("datacenter4ut").getBytes()); - internalLog.setQualifierValues(map); - internalLog.setTimestamp(System.currentTimeMillis()); + internalLog.setQualifierValues(map); + internalLog.setTimestamp(System.currentTimeMillis()); - try { - TaggedLogAPIEntity entity = HBaseInternalLogHelper.buildEntity(internalLog, ed); - Assert.assertTrue(entity instanceof TestTimeSeriesAPIEntity); - TestTimeSeriesAPIEntity tsentity = (TestTimeSeriesAPIEntity) entity; - Assert.assertEquals("cluster4ut",tsentity.getTags().get("cluster")); - Assert.assertEquals("datacenter4ut",tsentity.getTags().get("datacenter")); - Assert.assertEquals(12,tsentity.getField1()); - Assert.assertEquals(123432432l,tsentity.getField3()); - } catch (Exception e) { - e.printStackTrace(); - } - } + try { + TaggedLogAPIEntity entity = HBaseInternalLogHelper.buildEntity(internalLog, ed); + Assert.assertTrue(entity instanceof TestTimeSeriesAPIEntity); + TestTimeSeriesAPIEntity tsentity = (TestTimeSeriesAPIEntity) entity; + Assert.assertEquals("cluster4ut", tsentity.getTags().get("cluster")); + Assert.assertEquals("datacenter4ut", tsentity.getTags().get("datacenter")); + Assert.assertEquals(12, tsentity.getField1()); + Assert.assertEquals(123432432l, tsentity.getField3()); + } catch (Exception e) { + e.printStackTrace(); + } + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWriteEntitiesPerformance.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWriteEntitiesPerformance.java index 4324b7310b..29d4ad2cef 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWriteEntitiesPerformance.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWriteEntitiesPerformance.java @@ -16,12 +16,12 @@ */ package org.apache.eagle.log.entity; +import org.apache.commons.lang.time.StopWatch; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; import org.apache.eagle.log.entity.test.TestLogAPIEntity; import org.apache.eagle.service.hbase.TestHBaseBase; import org.junit.*; -import org.apache.commons.lang.time.StopWatch; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -70,7 +70,7 @@ private List writeEntities(int count) { int wroteCount = 0; List rowkeys = new ArrayList(); List list = new ArrayList(); - for (int i = 0 ; i <= count;i++) { + for (int i = 0; i <= count; i++) { TestLogAPIEntity e = new TestLogAPIEntity(); e.setTimestamp(new Date().getTime()); e.setField1(i); @@ -87,14 +87,16 @@ private List writeEntities(int count) { e.getTags().put("class", e.toString()); list.add(e); - if ( list.size() >= 1000) { + if (list.size() >= 1000) { try { StopWatch watch = new StopWatch(); watch.start(); rowkeys.addAll(writer.write(list)); watch.stop(); wroteCount += list.size(); - if (LOG.isDebugEnabled()) LOG.debug("Wrote "+wroteCount+" / "+count+" entities"+" in "+watch.getTime()+" ms"); + if (LOG.isDebugEnabled()) { + LOG.debug("Wrote " + wroteCount + " / " + count + " entities" + " in " + watch.getTime() + " ms"); + } list.clear(); } catch (Exception e1) { Assert.fail(e1.getMessage()); @@ -105,11 +107,15 @@ private List writeEntities(int count) { try { rowkeys.addAll(writer.write(list)); wroteCount += list.size(); - if (LOG.isDebugEnabled()) LOG.debug("wrote "+wroteCount+" / "+count+" entities"); + if (LOG.isDebugEnabled()) { + LOG.debug("wrote " + wroteCount + " / " + count + " entities"); + } } catch (Exception e) { Assert.fail(e.getMessage()); } - if (LOG.isDebugEnabled()) LOG.debug("done "+count+" entities"); + if (LOG.isDebugEnabled()) { + LOG.debug("done " + count + " entities"); + } return rowkeys; } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHbaseWritePerformance.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHbaseWritePerformance.java index 81f1dce472..1cd1b27f48 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHbaseWritePerformance.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHbaseWritePerformance.java @@ -16,85 +16,85 @@ */ package org.apache.eagle.log.entity; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - import org.apache.eagle.common.config.EagleConfigFactory; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.junit.Test; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + public class TestHbaseWritePerformance { - public static void main(String[] args) throws IOException { - - HTableInterface tbl = EagleConfigFactory.load().getHTable("unittest"); - - int putSize = 1000; - List list = new ArrayList(putSize); - for (int i = 0; i < putSize; ++i) { - byte[] v = Integer.toString(i).getBytes(); - Put p = new Put(v); - p.add("f".getBytes(), "a".getBytes(), 100, v); - list.add(p); - } - - // Case 1 - System.out.println("Case 1: autoflush = true, individual put"); - tbl.setAutoFlush(true); - long startTime = System.currentTimeMillis(); - for (int i = 0; i < 1; ++i) { - for (Put p : list) { - tbl.put(p); - } - tbl.flushCommits(); - } - long endTime = System.currentTimeMillis(); - System.out.println("Case 1: " + (endTime - startTime) + " ms"); - - - // Case 2 - System.out.println("Case 2: autoflush = true, multi-put"); - tbl.setAutoFlush(true); - startTime = System.currentTimeMillis(); - for (int i = 0; i < 1; ++i) { - tbl.put(list); - tbl.flushCommits(); - } - endTime = System.currentTimeMillis(); - System.out.println("Case 2: " + (endTime - startTime) + " ms"); - - - // Case 3 - System.out.println("Case 3: autoflush = false, multi-put"); - tbl.setAutoFlush(false); - startTime = System.currentTimeMillis(); - for (int i = 0; i < 1; ++i) { - tbl.put(list); - tbl.flushCommits(); - } - endTime = System.currentTimeMillis(); - System.out.println("Case 3: " + (endTime - startTime) + " ms"); - - - // Case 4 - System.out.println("Case 4: autoflush = false, individual put"); - tbl.setAutoFlush(true); - startTime = System.currentTimeMillis(); - for (int i = 0; i < 1; ++i) { - for (Put p : list) { - tbl.put(p); - } - tbl.flushCommits(); - } - endTime = System.currentTimeMillis(); - System.out.println("Case 4: " + (endTime - startTime) + " ms"); - - } - - @Test - public void test() { - - } + public static void main(String[] args) throws IOException { + + HTableInterface tbl = EagleConfigFactory.load().getHTable("unittest"); + + int putSize = 1000; + List list = new ArrayList(putSize); + for (int i = 0; i < putSize; ++i) { + byte[] v = Integer.toString(i).getBytes(); + Put p = new Put(v); + p.add("f".getBytes(), "a".getBytes(), 100, v); + list.add(p); + } + + // Case 1 + System.out.println("Case 1: autoflush = true, individual put"); + tbl.setAutoFlush(true); + long startTime = System.currentTimeMillis(); + for (int i = 0; i < 1; ++i) { + for (Put p : list) { + tbl.put(p); + } + tbl.flushCommits(); + } + long endTime = System.currentTimeMillis(); + System.out.println("Case 1: " + (endTime - startTime) + " ms"); + + + // Case 2 + System.out.println("Case 2: autoflush = true, multi-put"); + tbl.setAutoFlush(true); + startTime = System.currentTimeMillis(); + for (int i = 0; i < 1; ++i) { + tbl.put(list); + tbl.flushCommits(); + } + endTime = System.currentTimeMillis(); + System.out.println("Case 2: " + (endTime - startTime) + " ms"); + + + // Case 3 + System.out.println("Case 3: autoflush = false, multi-put"); + tbl.setAutoFlush(false); + startTime = System.currentTimeMillis(); + for (int i = 0; i < 1; ++i) { + tbl.put(list); + tbl.flushCommits(); + } + endTime = System.currentTimeMillis(); + System.out.println("Case 3: " + (endTime - startTime) + " ms"); + + + // Case 4 + System.out.println("Case 4: autoflush = false, individual put"); + tbl.setAutoFlush(true); + startTime = System.currentTimeMillis(); + for (int i = 0; i < 1; ++i) { + for (Put p : list) { + tbl.put(p); + } + tbl.flushCommits(); + } + endTime = System.currentTimeMillis(); + System.out.println("Case 4: " + (endTime - startTime) + " ms"); + + } + + @Test + public void test() { + + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestTestLogAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestTestLogAPIEntity.java index 1839a99bec..ba7a299493 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestTestLogAPIEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestTestLogAPIEntity.java @@ -35,371 +35,371 @@ public class TestTestLogAPIEntity extends TestHBaseBase { - @Test - public void testGetValue() throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { - EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - if (ed == null) { - EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); - ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - } - - Assert.assertNotNull(ed); - Assert.assertNotNull(ed.getQualifierGetterMap()); - TestLogAPIEntity e = new TestLogAPIEntity(); - e.setField1(1); - e.setField2(2); - e.setField3(3); - e.setField4(4L); - e.setField5(5.0); - e.setField6(6.0); - e.setField7("7"); - e.setTags(new HashMap()); - e.getTags().put("tag1", "value1"); - - Assert.assertNotNull(ed.getQualifierGetterMap().get("field1")); - Assert.assertEquals(1, ed.getValue(e, "field1")); - Assert.assertEquals(2, ed.getValue(e, "field2")); - Assert.assertEquals(3L, ed.getValue(e, "field3")); - Assert.assertEquals(4L, ed.getValue(e, "field4")); - Assert.assertEquals(5.0, ed.getValue(e, "field5")); - Assert.assertEquals(6.0, ed.getValue(e, "field6")); - Assert.assertEquals("7", ed.getValue(e, "field7")); - Assert.assertEquals("value1", ed.getValue(e, "tag1")); - } - - @Test - public void testIndexDefinition() throws InstantiationException, IllegalAccessException { - - EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - if (ed == null) { - EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); - ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - } - Assert.assertNotNull(ed); - IndexDefinition[] indexDefinitions = ed.getIndexes(); - Assert.assertNotNull(indexDefinitions); - Assert.assertEquals(2, indexDefinitions.length); - for (IndexDefinition def : indexDefinitions) { - Assert.assertNotNull(def.getIndexName()); - Assert.assertNotNull(def.getIndexColumns()); - Assert.assertEquals(1, def.getIndexColumns().length); - } - } - - @Test - public void testWriteEmptyIndexFieldAndDeleteWithoutPartition() throws Exception { - EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily()); - - EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - if (ed == null) { - EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); - ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - } - String[] partitions = ed.getPartitions(); - ed.setPartitions(null); - - try { - List list = new ArrayList(); - TestLogAPIEntity e = new TestLogAPIEntity(); - e.setField1(1); - e.setField2(2); - e.setField3(3); - e.setField4(4L); - e.setField5(5.0); - e.setField6(5.0); - e.setField7("7"); - e.setTags(new HashMap()); + @Test + public void testGetValue() throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { + EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + if (ed == null) { + EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); + ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + } + + Assert.assertNotNull(ed); + Assert.assertNotNull(ed.getQualifierGetterMap()); + TestLogAPIEntity e = new TestLogAPIEntity(); + e.setField1(1); + e.setField2(2); + e.setField3(3); + e.setField4(4L); + e.setField5(5.0); + e.setField6(6.0); + e.setField7("7"); + e.setTags(new HashMap()); + e.getTags().put("tag1", "value1"); + + Assert.assertNotNull(ed.getQualifierGetterMap().get("field1")); + Assert.assertEquals(1, ed.getValue(e, "field1")); + Assert.assertEquals(2, ed.getValue(e, "field2")); + Assert.assertEquals(3L, ed.getValue(e, "field3")); + Assert.assertEquals(4L, ed.getValue(e, "field4")); + Assert.assertEquals(5.0, ed.getValue(e, "field5")); + Assert.assertEquals(6.0, ed.getValue(e, "field6")); + Assert.assertEquals("7", ed.getValue(e, "field7")); + Assert.assertEquals("value1", ed.getValue(e, "tag1")); + } + + @Test + public void testIndexDefinition() throws InstantiationException, IllegalAccessException { + + EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + if (ed == null) { + EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); + ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + } + Assert.assertNotNull(ed); + IndexDefinition[] indexDefinitions = ed.getIndexes(); + Assert.assertNotNull(indexDefinitions); + Assert.assertEquals(2, indexDefinitions.length); + for (IndexDefinition def : indexDefinitions) { + Assert.assertNotNull(def.getIndexName()); + Assert.assertNotNull(def.getIndexColumns()); + Assert.assertEquals(1, def.getIndexColumns().length); + } + } + + @Test + public void testWriteEmptyIndexFieldAndDeleteWithoutPartition() throws Exception { + EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily()); + + EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + if (ed == null) { + EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); + ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + } + String[] partitions = ed.getPartitions(); + ed.setPartitions(null); + + try { + List list = new ArrayList(); + TestLogAPIEntity e = new TestLogAPIEntity(); + e.setField1(1); + e.setField2(2); + e.setField3(3); + e.setField4(4L); + e.setField5(5.0); + e.setField6(5.0); + e.setField7("7"); + e.setTags(new HashMap()); e.getTags().put("tag1", "value1"); - list.add(e); - - GenericEntityWriter writer = new GenericEntityWriter(ed.getService()); - List result = writer.write(list); - Assert.assertNotNull(result); - - List indexRowkeys = new ArrayList(); - IndexDefinition[] indexDefs = ed.getIndexes(); - for (IndexDefinition index : indexDefs) { - byte[] indexRowkey = index.generateIndexRowkey(e); - indexRowkeys.add(indexRowkey); - } - byte[][] qualifiers = new byte[7][]; - qualifiers[0] = "a".getBytes(); - qualifiers[1] = "b".getBytes(); - qualifiers[2] = "c".getBytes(); - qualifiers[3] = "d".getBytes(); - qualifiers[4] = "e".getBytes(); - qualifiers[5] = "f".getBytes(); - qualifiers[6] = "g".getBytes(); - - UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); - reader.open(); - InternalLog log = reader.read(); - Assert.assertNotNull(log); - - TaggedLogAPIEntity newEntity = HBaseInternalLogHelper.buildEntity(log, ed); - Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); - TestLogAPIEntity e1 = (TestLogAPIEntity)newEntity; - Assert.assertEquals(e.getField1(), e1.getField1()); - Assert.assertEquals(e.getField2(), e1.getField2()); - Assert.assertEquals(e.getField3(), e1.getField3()); - Assert.assertEquals(e.getField4(), e1.getField4()); - Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); - Assert.assertEquals(e.getField6(), e1.getField6()); - Assert.assertEquals(e.getField7(), e1.getField7()); - - log = reader.read(); - Assert.assertNotNull(log); - newEntity = HBaseInternalLogHelper.buildEntity(log, ed); - Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); - e1 = (TestLogAPIEntity)newEntity; - Assert.assertEquals(e.getField1(), e1.getField1()); - Assert.assertEquals(e.getField2(), e1.getField2()); - Assert.assertEquals(e.getField3(), e1.getField3()); - Assert.assertEquals(e.getField4(), e1.getField4()); - Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); - Assert.assertEquals(e.getField6(), e1.getField6()); - Assert.assertEquals(e.getField7(), e1.getField7()); - - log = reader.read(); - Assert.assertNull(log); - reader.close(); - - GenericDeleter deleter = new GenericDeleter(ed.getTable(), ed.getColumnFamily()); - deleter.delete(list); - - reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); - reader.open(); - log = reader.read(); - Assert.assertNull(log); - reader.close(); - } finally { - ed.setPartitions(partitions); - } - hbase.deleteTable(entityDefinition.getTable()); - } - - - /* - * testWriteEmptyIndexFieldAndDeleteWithPartition(eagle.log.entity.TestTestLogAPIEntity): expected:<86400000> but was:<0> - */ - //@Test - public void testWriteEmptyIndexFieldAndDeleteWithPartition() throws Exception { + list.add(e); + + GenericEntityWriter writer = new GenericEntityWriter(ed.getService()); + List result = writer.write(list); + Assert.assertNotNull(result); + + List indexRowkeys = new ArrayList(); + IndexDefinition[] indexDefs = ed.getIndexes(); + for (IndexDefinition index : indexDefs) { + byte[] indexRowkey = index.generateIndexRowkey(e); + indexRowkeys.add(indexRowkey); + } + byte[][] qualifiers = new byte[7][]; + qualifiers[0] = "a".getBytes(); + qualifiers[1] = "b".getBytes(); + qualifiers[2] = "c".getBytes(); + qualifiers[3] = "d".getBytes(); + qualifiers[4] = "e".getBytes(); + qualifiers[5] = "f".getBytes(); + qualifiers[6] = "g".getBytes(); + + UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); + reader.open(); + InternalLog log = reader.read(); + Assert.assertNotNull(log); + + TaggedLogAPIEntity newEntity = HBaseInternalLogHelper.buildEntity(log, ed); + Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); + TestLogAPIEntity e1 = (TestLogAPIEntity) newEntity; + Assert.assertEquals(e.getField1(), e1.getField1()); + Assert.assertEquals(e.getField2(), e1.getField2()); + Assert.assertEquals(e.getField3(), e1.getField3()); + Assert.assertEquals(e.getField4(), e1.getField4()); + Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); + Assert.assertEquals(e.getField6(), e1.getField6()); + Assert.assertEquals(e.getField7(), e1.getField7()); + + log = reader.read(); + Assert.assertNotNull(log); + newEntity = HBaseInternalLogHelper.buildEntity(log, ed); + Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); + e1 = (TestLogAPIEntity) newEntity; + Assert.assertEquals(e.getField1(), e1.getField1()); + Assert.assertEquals(e.getField2(), e1.getField2()); + Assert.assertEquals(e.getField3(), e1.getField3()); + Assert.assertEquals(e.getField4(), e1.getField4()); + Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); + Assert.assertEquals(e.getField6(), e1.getField6()); + Assert.assertEquals(e.getField7(), e1.getField7()); + + log = reader.read(); + Assert.assertNull(log); + reader.close(); + + GenericDeleter deleter = new GenericDeleter(ed.getTable(), ed.getColumnFamily()); + deleter.delete(list); + + reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); + reader.open(); + log = reader.read(); + Assert.assertNull(log); + reader.close(); + } finally { + ed.setPartitions(partitions); + } + hbase.deleteTable(entityDefinition.getTable()); + } + + + /* + * testWriteEmptyIndexFieldAndDeleteWithPartition(eagle.log.entity.TestTestLogAPIEntity): expected:<86400000> but was:<0> + */ + //@Test + public void testWriteEmptyIndexFieldAndDeleteWithPartition() throws Exception { EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily()); - EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - if (ed == null) { - EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); - ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - } - String[] partitions = ed.getPartitions(); - String[] newPart = new String[2]; - newPart[0] = "cluster"; - newPart[1] = "datacenter"; - ed.setPartitions(newPart); - - try { - List list = new ArrayList(); - TestLogAPIEntity e = new TestLogAPIEntity(); - e.setField1(1); - e.setField2(2); - e.setField3(3); - e.setField4(4L); - e.setField5(5.0); - e.setField6(5.0); - e.setField7("7"); - e.setTags(new HashMap()); - e.getTags().put("cluster", "test4UT"); - e.getTags().put("datacenter", "dc1"); - list.add(e); - - GenericEntityWriter writer = new GenericEntityWriter(ed.getService()); - List result = writer.write(list); - Assert.assertNotNull(result); - - List indexRowkeys = new ArrayList(); - IndexDefinition[] indexDefs = ed.getIndexes(); - for (IndexDefinition index : indexDefs) { - byte[] indexRowkey = index.generateIndexRowkey(e); - indexRowkeys.add(indexRowkey); - } - byte[][] qualifiers = new byte[9][]; - qualifiers[0] = "a".getBytes(); - qualifiers[1] = "b".getBytes(); - qualifiers[2] = "c".getBytes(); - qualifiers[3] = "d".getBytes(); - qualifiers[4] = "e".getBytes(); - qualifiers[5] = "f".getBytes(); - qualifiers[6] = "g".getBytes(); - qualifiers[7] = "cluster".getBytes(); - qualifiers[8] = "datacenter".getBytes(); - - UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); - reader.open(); - InternalLog log = reader.read(); - Assert.assertNotNull(log); - - TaggedLogAPIEntity newEntity = HBaseInternalLogHelper.buildEntity(log, ed); - Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); - TestLogAPIEntity e1 = (TestLogAPIEntity)newEntity; - Assert.assertEquals(e.getField1(), e1.getField1()); - Assert.assertEquals(e.getField2(), e1.getField2()); - Assert.assertEquals(e.getField3(), e1.getField3()); - Assert.assertEquals(e.getField4(), e1.getField4()); - Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); - Assert.assertEquals(e.getField6(), e1.getField6()); - Assert.assertEquals(e.getField7(), e1.getField7()); - Assert.assertEquals("test4UT", e1.getTags().get("cluster")); - Assert.assertEquals("dc1", e1.getTags().get("datacenter")); - Assert.assertEquals(EntityConstants.FIXED_WRITE_TIMESTAMP, e1.getTimestamp()); - - log = reader.read(); - Assert.assertNotNull(log); - newEntity = HBaseInternalLogHelper.buildEntity(log, ed); - Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); - e1 = (TestLogAPIEntity)newEntity; - Assert.assertEquals(e.getField1(), e1.getField1()); - Assert.assertEquals(e.getField2(), e1.getField2()); - Assert.assertEquals(e.getField3(), e1.getField3()); - Assert.assertEquals(e.getField4(), e1.getField4()); - Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); - Assert.assertEquals(e.getField6(), e1.getField6()); - Assert.assertEquals(e.getField7(), e1.getField7()); - Assert.assertEquals("test4UT", e1.getTags().get("cluster")); - Assert.assertEquals("dc1", e1.getTags().get("datacenter")); - Assert.assertEquals(EntityConstants.FIXED_WRITE_TIMESTAMP, e1.getTimestamp()); - - log = reader.read(); - Assert.assertNull(log); - reader.close(); - - GenericDeleter deleter = new GenericDeleter(ed.getTable(), ed.getColumnFamily()); - deleter.delete(list); - - reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); - reader.open(); - log = reader.read(); - Assert.assertNull(log); - reader.close(); - } finally { - ed.setPartitions(partitions); - } - hbase.deleteTable(entityDefinition.getTable()); - } - - /** - * testWriteEmptyIndexFieldAndDeleteWithPartitionAndTimeSeries(eagle.log.entity.TestTestLogAPIEntity): expected:<1434809555569> but was:<0> - */ - - //@Test - public void testWriteEmptyIndexFieldAndDeleteWithPartitionAndTimeSeries() throws Exception { + EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + if (ed == null) { + EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); + ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + } + String[] partitions = ed.getPartitions(); + String[] newPart = new String[2]; + newPart[0] = "cluster"; + newPart[1] = "datacenter"; + ed.setPartitions(newPart); + + try { + List list = new ArrayList(); + TestLogAPIEntity e = new TestLogAPIEntity(); + e.setField1(1); + e.setField2(2); + e.setField3(3); + e.setField4(4L); + e.setField5(5.0); + e.setField6(5.0); + e.setField7("7"); + e.setTags(new HashMap()); + e.getTags().put("cluster", "test4UT"); + e.getTags().put("datacenter", "dc1"); + list.add(e); + + GenericEntityWriter writer = new GenericEntityWriter(ed.getService()); + List result = writer.write(list); + Assert.assertNotNull(result); + + List indexRowkeys = new ArrayList(); + IndexDefinition[] indexDefs = ed.getIndexes(); + for (IndexDefinition index : indexDefs) { + byte[] indexRowkey = index.generateIndexRowkey(e); + indexRowkeys.add(indexRowkey); + } + byte[][] qualifiers = new byte[9][]; + qualifiers[0] = "a".getBytes(); + qualifiers[1] = "b".getBytes(); + qualifiers[2] = "c".getBytes(); + qualifiers[3] = "d".getBytes(); + qualifiers[4] = "e".getBytes(); + qualifiers[5] = "f".getBytes(); + qualifiers[6] = "g".getBytes(); + qualifiers[7] = "cluster".getBytes(); + qualifiers[8] = "datacenter".getBytes(); + + UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); + reader.open(); + InternalLog log = reader.read(); + Assert.assertNotNull(log); + + TaggedLogAPIEntity newEntity = HBaseInternalLogHelper.buildEntity(log, ed); + Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); + TestLogAPIEntity e1 = (TestLogAPIEntity) newEntity; + Assert.assertEquals(e.getField1(), e1.getField1()); + Assert.assertEquals(e.getField2(), e1.getField2()); + Assert.assertEquals(e.getField3(), e1.getField3()); + Assert.assertEquals(e.getField4(), e1.getField4()); + Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); + Assert.assertEquals(e.getField6(), e1.getField6()); + Assert.assertEquals(e.getField7(), e1.getField7()); + Assert.assertEquals("test4UT", e1.getTags().get("cluster")); + Assert.assertEquals("dc1", e1.getTags().get("datacenter")); + Assert.assertEquals(EntityConstants.FIXED_WRITE_TIMESTAMP, e1.getTimestamp()); + + log = reader.read(); + Assert.assertNotNull(log); + newEntity = HBaseInternalLogHelper.buildEntity(log, ed); + Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); + e1 = (TestLogAPIEntity) newEntity; + Assert.assertEquals(e.getField1(), e1.getField1()); + Assert.assertEquals(e.getField2(), e1.getField2()); + Assert.assertEquals(e.getField3(), e1.getField3()); + Assert.assertEquals(e.getField4(), e1.getField4()); + Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); + Assert.assertEquals(e.getField6(), e1.getField6()); + Assert.assertEquals(e.getField7(), e1.getField7()); + Assert.assertEquals("test4UT", e1.getTags().get("cluster")); + Assert.assertEquals("dc1", e1.getTags().get("datacenter")); + Assert.assertEquals(EntityConstants.FIXED_WRITE_TIMESTAMP, e1.getTimestamp()); + + log = reader.read(); + Assert.assertNull(log); + reader.close(); + + GenericDeleter deleter = new GenericDeleter(ed.getTable(), ed.getColumnFamily()); + deleter.delete(list); + + reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); + reader.open(); + log = reader.read(); + Assert.assertNull(log); + reader.close(); + } finally { + ed.setPartitions(partitions); + } + hbase.deleteTable(entityDefinition.getTable()); + } + + /** + * testWriteEmptyIndexFieldAndDeleteWithPartitionAndTimeSeries(eagle.log.entity.TestTestLogAPIEntity): expected:<1434809555569> but was:<0> + */ + + //@Test + public void testWriteEmptyIndexFieldAndDeleteWithPartitionAndTimeSeries() throws Exception { EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily()); - EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - if (ed == null) { - EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); - ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - } - String[] partitions = ed.getPartitions(); - String[] newPart = new String[2]; - newPart[0] = "cluster"; - newPart[1] = "datacenter"; - ed.setPartitions(newPart); - boolean isTimeSeries = ed.isTimeSeries(); - ed.setTimeSeries(true); - long now = System.currentTimeMillis(); - - try { - List list = new ArrayList(); - TestLogAPIEntity e = new TestLogAPIEntity(); - e.setField1(1); - e.setField2(2); - e.setField3(3); - e.setField4(4L); - e.setField5(5.0); - e.setField6(5.0); - e.setField7("7"); - e.setTags(new HashMap()); - e.getTags().put("cluster", "test4UT"); - e.getTags().put("datacenter", "dc1"); - e.setTimestamp(now); - list.add(e); - - GenericEntityWriter writer = new GenericEntityWriter(ed.getService()); - List result = writer.write(list); - Assert.assertNotNull(result); - - List indexRowkeys = new ArrayList(); - IndexDefinition[] indexDefs = ed.getIndexes(); - for (IndexDefinition index : indexDefs) { - byte[] indexRowkey = index.generateIndexRowkey(e); - indexRowkeys.add(indexRowkey); - } - byte[][] qualifiers = new byte[9][]; - qualifiers[0] = "a".getBytes(); - qualifiers[1] = "b".getBytes(); - qualifiers[2] = "c".getBytes(); - qualifiers[3] = "d".getBytes(); - qualifiers[4] = "e".getBytes(); - qualifiers[5] = "f".getBytes(); - qualifiers[6] = "g".getBytes(); - qualifiers[7] = "cluster".getBytes(); - qualifiers[8] = "datacenter".getBytes(); - - UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); - reader.open(); - InternalLog log = reader.read(); - Assert.assertNotNull(log); - - TaggedLogAPIEntity newEntity = HBaseInternalLogHelper.buildEntity(log, ed); - Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); - TestLogAPIEntity e1 = (TestLogAPIEntity)newEntity; - Assert.assertEquals(e.getField1(), e1.getField1()); - Assert.assertEquals(e.getField2(), e1.getField2()); - Assert.assertEquals(e.getField3(), e1.getField3()); - Assert.assertEquals(e.getField4(), e1.getField4()); - Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); - Assert.assertEquals(e.getField6(), e1.getField6()); - Assert.assertEquals(e.getField7(), e1.getField7()); - Assert.assertEquals("test4UT", e1.getTags().get("cluster")); - Assert.assertEquals("dc1", e1.getTags().get("datacenter")); - Assert.assertEquals(now, e1.getTimestamp()); - - log = reader.read(); - Assert.assertNotNull(log); - newEntity = HBaseInternalLogHelper.buildEntity(log, ed); - Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); - e1 = (TestLogAPIEntity)newEntity; - Assert.assertEquals(e.getField1(), e1.getField1()); - Assert.assertEquals(e.getField2(), e1.getField2()); - Assert.assertEquals(e.getField3(), e1.getField3()); - Assert.assertEquals(e.getField4(), e1.getField4()); - Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); - Assert.assertEquals(e.getField6(), e1.getField6()); - Assert.assertEquals(e.getField7(), e1.getField7()); - Assert.assertEquals("test4UT", e1.getTags().get("cluster")); - Assert.assertEquals("dc1", e1.getTags().get("datacenter")); - Assert.assertEquals(now, e1.getTimestamp()); - - log = reader.read(); - Assert.assertNull(log); - reader.close(); - - GenericDeleter deleter = new GenericDeleter(ed.getTable(), ed.getColumnFamily()); - deleter.delete(list); - - reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); - reader.open(); - log = reader.read(); - Assert.assertNull(log); - reader.close(); - } finally { - ed.setPartitions(partitions); - ed.setTimeSeries(isTimeSeries); - } - hbase.deleteTable(entityDefinition.getTable()); - } + EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + if (ed == null) { + EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); + ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + } + String[] partitions = ed.getPartitions(); + String[] newPart = new String[2]; + newPart[0] = "cluster"; + newPart[1] = "datacenter"; + ed.setPartitions(newPart); + boolean isTimeSeries = ed.isTimeSeries(); + ed.setTimeSeries(true); + long now = System.currentTimeMillis(); + + try { + List list = new ArrayList(); + TestLogAPIEntity e = new TestLogAPIEntity(); + e.setField1(1); + e.setField2(2); + e.setField3(3); + e.setField4(4L); + e.setField5(5.0); + e.setField6(5.0); + e.setField7("7"); + e.setTags(new HashMap()); + e.getTags().put("cluster", "test4UT"); + e.getTags().put("datacenter", "dc1"); + e.setTimestamp(now); + list.add(e); + + GenericEntityWriter writer = new GenericEntityWriter(ed.getService()); + List result = writer.write(list); + Assert.assertNotNull(result); + + List indexRowkeys = new ArrayList(); + IndexDefinition[] indexDefs = ed.getIndexes(); + for (IndexDefinition index : indexDefs) { + byte[] indexRowkey = index.generateIndexRowkey(e); + indexRowkeys.add(indexRowkey); + } + byte[][] qualifiers = new byte[9][]; + qualifiers[0] = "a".getBytes(); + qualifiers[1] = "b".getBytes(); + qualifiers[2] = "c".getBytes(); + qualifiers[3] = "d".getBytes(); + qualifiers[4] = "e".getBytes(); + qualifiers[5] = "f".getBytes(); + qualifiers[6] = "g".getBytes(); + qualifiers[7] = "cluster".getBytes(); + qualifiers[8] = "datacenter".getBytes(); + + UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); + reader.open(); + InternalLog log = reader.read(); + Assert.assertNotNull(log); + + TaggedLogAPIEntity newEntity = HBaseInternalLogHelper.buildEntity(log, ed); + Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); + TestLogAPIEntity e1 = (TestLogAPIEntity) newEntity; + Assert.assertEquals(e.getField1(), e1.getField1()); + Assert.assertEquals(e.getField2(), e1.getField2()); + Assert.assertEquals(e.getField3(), e1.getField3()); + Assert.assertEquals(e.getField4(), e1.getField4()); + Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); + Assert.assertEquals(e.getField6(), e1.getField6()); + Assert.assertEquals(e.getField7(), e1.getField7()); + Assert.assertEquals("test4UT", e1.getTags().get("cluster")); + Assert.assertEquals("dc1", e1.getTags().get("datacenter")); + Assert.assertEquals(now, e1.getTimestamp()); + + log = reader.read(); + Assert.assertNotNull(log); + newEntity = HBaseInternalLogHelper.buildEntity(log, ed); + Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass()); + e1 = (TestLogAPIEntity) newEntity; + Assert.assertEquals(e.getField1(), e1.getField1()); + Assert.assertEquals(e.getField2(), e1.getField2()); + Assert.assertEquals(e.getField3(), e1.getField3()); + Assert.assertEquals(e.getField4(), e1.getField4()); + Assert.assertEquals(e.getField5(), e1.getField5(), 0.001); + Assert.assertEquals(e.getField6(), e1.getField6()); + Assert.assertEquals(e.getField7(), e1.getField7()); + Assert.assertEquals("test4UT", e1.getTags().get("cluster")); + Assert.assertEquals("dc1", e1.getTags().get("datacenter")); + Assert.assertEquals(now, e1.getTimestamp()); + + log = reader.read(); + Assert.assertNull(log); + reader.close(); + + GenericDeleter deleter = new GenericDeleter(ed.getTable(), ed.getColumnFamily()); + deleter.delete(list); + + reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null); + reader.open(); + log = reader.read(); + Assert.assertNull(log); + reader.close(); + } finally { + ed.setPartitions(partitions); + ed.setTimeSeries(isTimeSeries); + } + hbase.deleteTable(entityDefinition.getTable()); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/base/taggedlog/TestTaggedLogAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/base/taggedlog/TestTaggedLogAPIEntity.java index d6db6a24ee..bfa79f5bb2 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/base/taggedlog/TestTaggedLogAPIEntity.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/base/taggedlog/TestTaggedLogAPIEntity.java @@ -16,35 +16,33 @@ */ package org.apache.eagle.log.entity.base.taggedlog; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; import org.apache.eagle.log.base.taggedlog.EntityJsonModule; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.meta.Column; -import org.junit.Assert; - import org.codehaus.jackson.map.annotate.JsonSerialize; +import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + public class TestTaggedLogAPIEntity { private static ObjectMapper objectMapper; @BeforeClass - public static void setUp(){ + public static void setUp() { objectMapper = new ObjectMapper(); objectMapper.setFilters(TaggedLogAPIEntity.getFilterProvider()); objectMapper.registerModule(new EntityJsonModule()); } - @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) + @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) private class MockSubTaggedLogAPIEntity extends TaggedLogAPIEntity { public double getField1() { return field1; @@ -72,7 +70,7 @@ public void setField2(String field2) { } @SuppressWarnings("unchecked") - @Test + @Test public void testJsonSerializeFilter() throws IOException { MockSubTaggedLogAPIEntity mock = new MockSubTaggedLogAPIEntity(); Assert.assertTrue(mock instanceof TaggedLogAPIEntity); @@ -91,7 +89,7 @@ public void testJsonSerializeFilter() throws IOException { mock.setPrefix("mock"); mock.setField2("ok"); mock.setField1(12.345); - mock.setTags(new HashMap(){{ + mock.setTags(new HashMap() {{ put("tagName", "tagValue"); }}); mock.setExp(new HashMap() {{ @@ -105,9 +103,9 @@ public void testJsonSerializeFilter() throws IOException { @Test public void testJsonSerializeMap() throws JsonProcessingException { - Map,List> entries = new HashMap,List>(){ + Map, List> entries = new HashMap, List>() { { - put(Arrays.asList("a","b"),Arrays.asList(1,2,3)); + put(Arrays.asList("a", "b"), Arrays.asList(1, 2, 3)); } }; String json = objectMapper.writeValueAsString(entries.entrySet()); diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestEntityQualifierHelper.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestEntityQualifierHelper.java index e22c699a56..532d1a8e39 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestEntityQualifierHelper.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestEntityQualifierHelper.java @@ -28,152 +28,153 @@ import java.util.List; /** -* @since : 10/15/14 2014 -*/ + * @since : 10/15/14 2014 + */ public class TestEntityQualifierHelper { - private EntityDefinition ed; - @Before - public void setUp(){ - try { - if(EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity") == null){ - EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); - } - ed = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity"); - } catch (InstantiationException e) { - e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. - } catch (IllegalAccessException e) { - e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. - } - } - - @Test - public void testEd(){ - Assert.assertNotNull(ed); - Assert.assertNotNull(ed.getQualifierNameMap().get("a")); - Assert.assertNull(ed.getQualifierNameMap().get("notexist")); - } - - @Test - public void testIntEntityQualifierHelper(){ - byte[] value = EntityQualifierUtils.toBytes(ed, "field1", "2"); - Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(1)) > 0); - Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(2)) == 0); - Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(3)) < 0); - } - - @Test - public void testStringEntityQualifierHelper(){ - byte[] value = EntityQualifierUtils.toBytes(ed, "field7", "xyz"); - Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xyy")) > 0); - Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xyz")) == 0); - Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xzz")) < 0); - - Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xy")) > 0); - } - - @Test - public void testDoubleEntityQualifierHelper(){ - byte[] value = EntityQualifierUtils.toBytes(ed, "field5", "1.0"); - Assert.assertTrue(Bytes.compareTo(value,Bytes.toBytes(0.5)) > 0); - Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(1.0)) == 0); - Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(2.2)) < 0); + private EntityDefinition ed; + + @Before + public void setUp() { + try { + if (EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity") == null) { + EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); + } + ed = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity"); + } catch (InstantiationException e) { + e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. + } catch (IllegalAccessException e) { + e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. + } + } + + @Test + public void testEd() { + Assert.assertNotNull(ed); + Assert.assertNotNull(ed.getQualifierNameMap().get("a")); + Assert.assertNull(ed.getQualifierNameMap().get("notexist")); + } + + @Test + public void testIntEntityQualifierHelper() { + byte[] value = EntityQualifierUtils.toBytes(ed, "field1", "2"); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(1)) > 0); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(2)) == 0); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(3)) < 0); + } + + @Test + public void testStringEntityQualifierHelper() { + byte[] value = EntityQualifierUtils.toBytes(ed, "field7", "xyz"); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xyy")) > 0); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xyz")) == 0); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xzz")) < 0); + + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xy")) > 0); + } + + @Test + public void testDoubleEntityQualifierHelper() { + byte[] value = EntityQualifierUtils.toBytes(ed, "field5", "1.0"); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(0.5)) > 0); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(1.0)) == 0); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(2.2)) < 0); // TODO There is problem with negative double // Assert.assertTrue(Bytes.compareTo(Bytes.toBytes(-0.6),Bytes.toBytes(-0.5)) < 0); - } - - @Test - public void testLongEntityQualifierHelper(){ - byte[] value = EntityQualifierUtils.toBytes(ed, "field4", "100000"); - Assert.assertTrue(Bytes.compareTo(value,Bytes.toBytes(100000l-1l )) > 0); - Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(100000l)) == 0); - Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(100000l + 1l)) < 0); - } - - @Test - public void testNegativeLongEntityQualifierHelper(){ - Exception ex = null; - try{ - byte[] value = EntityQualifierUtils.toBytes(ed, "field4", "-100000"); - }catch (IllegalArgumentException e){ - ex = e; - } - Assert.assertNull(ex); - } - - @Test - public void testParseAsList(){ - List set = EntityQualifierUtils.parseList("(\"abc1\",\"abc2\")"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("abc1",set.toArray()[0]); - Assert.assertEquals("abc2",set.toArray()[1]); - - set = EntityQualifierUtils.parseList("(1,\"abc2\")"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("1",set.toArray()[0]); - Assert.assertEquals("abc2",set.toArray()[1]); - - set = EntityQualifierUtils.parseList("(-1.5,\"abc2\")"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("-1.5",set.toArray()[0]); - Assert.assertEquals("abc2",set.toArray()[1]); - - set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,abc\")"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("-1.5",set.toArray()[0]); - Assert.assertEquals("-1.5,abc",set.toArray()[1]); - - set = EntityQualifierUtils.parseList("(-1.5,\"\\\"abc\\\"\")"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("-1.5",set.toArray()[0]); - Assert.assertEquals("\"abc\"",set.toArray()[1]); - - set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,\\\"abc\")"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("-1.5",set.toArray()[0]); - Assert.assertEquals("-1.5,\"abc",set.toArray()[1]); - - set = EntityQualifierUtils.parseList("(\"\\\"-1.5\\\",abc1\",\"-1.5,\\\"abc2\")"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("\"-1.5\",abc1",set.toArray()[0]); - Assert.assertEquals("-1.5,\"abc2",set.toArray()[1]); - - set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,\"abc\")"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("-1.5",set.toArray()[0]); - Assert.assertEquals("-1.5,\"abc",set.toArray()[1]); - - set = EntityQualifierUtils.parseList("(\"\\\"value1,part1\\\",\\\"value1,part2\\\"\",\"value2\")"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("\"value1,part1\",\"value1,part2\"",set.toArray()[0]); - Assert.assertEquals("value2",set.toArray()[1]); - - //////////////////////////////// - // Bad Format - //////////////////////////////// - set = EntityQualifierUtils.parseList("(\"a,b)"); - Assert.assertEquals(1,set.size()); - Assert.assertEquals("a,b",set.toArray()[0]); - - set = EntityQualifierUtils.parseList("(a,b\")"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("a",set.toArray()[0]); - Assert.assertEquals("b",set.toArray()[1]); - - set = EntityQualifierUtils.parseList("(a\",b)"); - Assert.assertEquals(1,set.size()); - Assert.assertEquals("a\",b",set.toArray()[0]); - - set = EntityQualifierUtils.parseList("(abc,def)"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("abc",set.toArray()[0]); - Assert.assertEquals("def",set.toArray()[1]); - - set = EntityQualifierUtils.parseList("(1.5,def)"); - Assert.assertEquals(2,set.size()); - Assert.assertEquals("1.5",set.toArray()[0]); - Assert.assertEquals("def",set.toArray()[1]); - } + } + + @Test + public void testLongEntityQualifierHelper() { + byte[] value = EntityQualifierUtils.toBytes(ed, "field4", "100000"); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(100000l - 1l)) > 0); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(100000l)) == 0); + Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(100000l + 1l)) < 0); + } + + @Test + public void testNegativeLongEntityQualifierHelper() { + Exception ex = null; + try { + byte[] value = EntityQualifierUtils.toBytes(ed, "field4", "-100000"); + } catch (IllegalArgumentException e) { + ex = e; + } + Assert.assertNull(ex); + } + + @Test + public void testParseAsList() { + List set = EntityQualifierUtils.parseList("(\"abc1\",\"abc2\")"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("abc1", set.toArray()[0]); + Assert.assertEquals("abc2", set.toArray()[1]); + + set = EntityQualifierUtils.parseList("(1,\"abc2\")"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("1", set.toArray()[0]); + Assert.assertEquals("abc2", set.toArray()[1]); + + set = EntityQualifierUtils.parseList("(-1.5,\"abc2\")"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("-1.5", set.toArray()[0]); + Assert.assertEquals("abc2", set.toArray()[1]); + + set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,abc\")"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("-1.5", set.toArray()[0]); + Assert.assertEquals("-1.5,abc", set.toArray()[1]); + + set = EntityQualifierUtils.parseList("(-1.5,\"\\\"abc\\\"\")"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("-1.5", set.toArray()[0]); + Assert.assertEquals("\"abc\"", set.toArray()[1]); + + set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,\\\"abc\")"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("-1.5", set.toArray()[0]); + Assert.assertEquals("-1.5,\"abc", set.toArray()[1]); + + set = EntityQualifierUtils.parseList("(\"\\\"-1.5\\\",abc1\",\"-1.5,\\\"abc2\")"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("\"-1.5\",abc1", set.toArray()[0]); + Assert.assertEquals("-1.5,\"abc2", set.toArray()[1]); + + set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,\"abc\")"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("-1.5", set.toArray()[0]); + Assert.assertEquals("-1.5,\"abc", set.toArray()[1]); + + set = EntityQualifierUtils.parseList("(\"\\\"value1,part1\\\",\\\"value1,part2\\\"\",\"value2\")"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("\"value1,part1\",\"value1,part2\"", set.toArray()[0]); + Assert.assertEquals("value2", set.toArray()[1]); + + //////////////////////////////// + // Bad Format + //////////////////////////////// + set = EntityQualifierUtils.parseList("(\"a,b)"); + Assert.assertEquals(1, set.size()); + Assert.assertEquals("a,b", set.toArray()[0]); + + set = EntityQualifierUtils.parseList("(a,b\")"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("a", set.toArray()[0]); + Assert.assertEquals("b", set.toArray()[1]); + + set = EntityQualifierUtils.parseList("(a\",b)"); + Assert.assertEquals(1, set.size()); + Assert.assertEquals("a\",b", set.toArray()[0]); + + set = EntityQualifierUtils.parseList("(abc,def)"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("abc", set.toArray()[0]); + Assert.assertEquals("def", set.toArray()[1]); + + set = EntityQualifierUtils.parseList("(1.5,def)"); + Assert.assertEquals(2, set.size()); + Assert.assertEquals("1.5", set.toArray()[0]); + Assert.assertEquals("def", set.toArray()[1]); + } // @Test // public void testEscapeRegExp(){ diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestExpressionComparator.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestExpressionComparator.java index 37adb556e1..f554c22956 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestExpressionComparator.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestExpressionComparator.java @@ -27,7 +27,7 @@ public class TestExpressionComparator { @Test - public void testCompareToForEval(){ + public void testCompareToForEval() { QualifierFilterEntity entity = new QualifierFilterEntity(); // a+b >= a+100.0 entity.setKey("a/b"); @@ -36,36 +36,36 @@ public void testCompareToForEval(){ entity.setValueType(TokenType.EXP); entity.setOp(ComparisonOperator.GREATER_OR_EQUAL); EntityDefinition qualifierDisplayNameMap = null; - BooleanExpressionComparator comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap); + BooleanExpressionComparator comparator = new BooleanExpressionComparator(entity, qualifierDisplayNameMap); - Map context = new HashMap(); - Assert.assertEquals("Should return 0 because not given enough variable",0,comparator.compareTo(context)); + Map context = new HashMap(); + Assert.assertEquals("Should return 0 because not given enough variable", 0, comparator.compareTo(context)); context.put("a", 80.0); - context.put("b",20.0); - context.put("c",3.0); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("b", 20.0); + context.put("c", 3.0); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",80.0); - context.put("b",20.0); - context.put("c",4.0); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", 80.0); + context.put("b", 20.0); + context.put("c", 4.0); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",80.0); - context.put("b",20.0); - context.put("c",5.0); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", 80.0); + context.put("b", 20.0); + context.put("c", 5.0); + Assert.assertEquals(0, comparator.compareTo(context)); // Return false once any Double.isInfinite ( 80.0 / 0.0 ) - Assert.assertTrue(Double.isInfinite( 80.0 / 0.0 )); - context.put("a",80.0); - context.put("b",0.0); + Assert.assertTrue(Double.isInfinite(80.0 / 0.0)); + context.put("a", 80.0); + context.put("b", 0.0); context.put("c", 5.0); - Assert.assertEquals(0,comparator.compareTo(context)); + Assert.assertEquals(0, comparator.compareTo(context)); } @Test - public void testCompareToForOp(){ + public void testCompareToForOp() { QualifierFilterEntity entity = new QualifierFilterEntity(); // a+b >= a+100.0 @@ -74,122 +74,122 @@ public void testCompareToForOp(){ entity.setOp(ComparisonOperator.GREATER_OR_EQUAL); EntityDefinition qualifierDisplayNameMap = new EntityDefinition(); - BooleanExpressionComparator comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap); + BooleanExpressionComparator comparator = new BooleanExpressionComparator(entity, qualifierDisplayNameMap); - Map context = new HashMap(); - context.put("a",100.1); - context.put("b",100.1); - Assert.assertEquals(1,comparator.compareTo(context)); + Map context = new HashMap(); + context.put("a", 100.1); + context.put("b", 100.1); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",100.1); - context.put("b",100.0); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", 100.1); + context.put("b", 100.0); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",100.0); - context.put("b",99.9); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", 100.0); + context.put("b", 99.9); + Assert.assertEquals(0, comparator.compareTo(context)); - context.put("a",-200.0); - context.put("b",100.0); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", -200.0); + context.put("b", 100.0); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",-200.0); - context.put("b",-100.0); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", -200.0); + context.put("b", -100.0); + Assert.assertEquals(0, comparator.compareTo(context)); // a+b = a+100.0 entity.setOp(ComparisonOperator.GREATER); - comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap); + comparator = new BooleanExpressionComparator(entity, qualifierDisplayNameMap); - context.put("a",100.1); - context.put("b",100.1); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", 100.1); + context.put("b", 100.1); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",100.1); - context.put("b",100.0); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", 100.1); + context.put("b", 100.0); + Assert.assertEquals(0, comparator.compareTo(context)); - context.put("a",100.0); - context.put("b",99.9); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", 100.0); + context.put("b", 99.9); + Assert.assertEquals(0, comparator.compareTo(context)); - context.put("a",-200.0); - context.put("b",100.0); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", -200.0); + context.put("b", 100.0); + Assert.assertEquals(0, comparator.compareTo(context)); - context.put("a",-200.0); - context.put("b",-100.0); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", -200.0); + context.put("b", -100.0); + Assert.assertEquals(0, comparator.compareTo(context)); // a+b = a+100.0 entity.setOp(ComparisonOperator.LESS); - comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap); + comparator = new BooleanExpressionComparator(entity, qualifierDisplayNameMap); - context.put("a",100.1); - context.put("b",100.1); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", 100.1); + context.put("b", 100.1); + Assert.assertEquals(0, comparator.compareTo(context)); - context.put("a",100.1); - context.put("b",100.0); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", 100.1); + context.put("b", 100.0); + Assert.assertEquals(0, comparator.compareTo(context)); - context.put("a",100.0); - context.put("b",99.9); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", 100.0); + context.put("b", 99.9); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",-200.0); - context.put("b",100.0); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", -200.0); + context.put("b", 100.0); + Assert.assertEquals(0, comparator.compareTo(context)); - context.put("a",-200.0); - context.put("b",-100.0); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", -200.0); + context.put("b", -100.0); + Assert.assertEquals(1, comparator.compareTo(context)); // a+b <= a+100.0 entity.setOp(ComparisonOperator.LESS_OR_EQUAL); - comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap); + comparator = new BooleanExpressionComparator(entity, qualifierDisplayNameMap); - context.put("a",100.1); - context.put("b",100.1); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", 100.1); + context.put("b", 100.1); + Assert.assertEquals(0, comparator.compareTo(context)); - context.put("a",100.1); - context.put("b",100.0); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", 100.1); + context.put("b", 100.0); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",100.0); - context.put("b",99.9); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", 100.0); + context.put("b", 99.9); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",-200.0); - context.put("b",100.0); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", -200.0); + context.put("b", 100.0); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",-200.0); - context.put("b",-100.0); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", -200.0); + context.put("b", -100.0); + Assert.assertEquals(1, comparator.compareTo(context)); entity.setOp(ComparisonOperator.NOT_EQUAL); - comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap); + comparator = new BooleanExpressionComparator(entity, qualifierDisplayNameMap); - context.put("a",100.1); - context.put("b",100.1); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", 100.1); + context.put("b", 100.1); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",100.1); - context.put("b",100.0); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", 100.1); + context.put("b", 100.0); + Assert.assertEquals(0, comparator.compareTo(context)); - context.put("a",100.0); - context.put("b",99.9); - Assert.assertEquals(1,comparator.compareTo(context)); + context.put("a", 100.0); + context.put("b", 99.9); + Assert.assertEquals(1, comparator.compareTo(context)); - context.put("a",-200.0); - context.put("b",100.0); - Assert.assertEquals(0,comparator.compareTo(context)); + context.put("a", -200.0); + context.put("b", 100.0); + Assert.assertEquals(0, comparator.compareTo(context)); - context.put("a",-200.0); + context.put("a", -200.0); context.put("b", -100.0); - Assert.assertEquals(1,comparator.compareTo(context)); + Assert.assertEquals(1, comparator.compareTo(context)); } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestHBaseFilterBuilder.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestHBaseFilterBuilder.java index 30e452396e..b96b378a87 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestHBaseFilterBuilder.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestHBaseFilterBuilder.java @@ -32,251 +32,251 @@ import org.slf4j.LoggerFactory; public class TestHBaseFilterBuilder { - private final static Logger LOG = LoggerFactory.getLogger(TestHBaseFilterBuilder.class); - private EntityDefinition ed; + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseFilterBuilder.class); + private EntityDefinition ed; - private Filter buildFilter(String query) throws EagleQueryParseException { - ORExpression expression = new EagleQueryParser(query).parse(); - HBaseFilterBuilder builder = new HBaseFilterBuilder(ed,expression); - Filter filterList = builder.buildFilters(); - LOG.info("\n" + expression + " \n=> " + filterList); - return filterList; - } + private Filter buildFilter(String query) throws EagleQueryParseException { + ORExpression expression = new EagleQueryParser(query).parse(); + HBaseFilterBuilder builder = new HBaseFilterBuilder(ed, expression); + Filter filterList = builder.buildFilters(); + LOG.info("\n" + expression + " \n=> " + filterList); + return filterList; + } - @Before - public void setUp(){ - try { - ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - if(ed == null){ - EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); - ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); - } - } catch (InstantiationException e) { - Assert.fail(e.getMessage()); - } catch (IllegalAccessException e) { - Assert.fail(e.getMessage()); - } - } + @Before + public void setUp() { + try { + ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + if (ed == null) { + EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); + ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class); + } + } catch (InstantiationException e) { + Assert.fail(e.getMessage()); + } catch (IllegalAccessException e) { + Assert.fail(e.getMessage()); + } + } - /** - * Should success without exception - */ - @Test - public void testQueryParseAndBuildFilterSuccess(){ - String[] queries = new String[]{ - "@cluster = \"cluster1\" and @datacenter = \"dc1\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"job_1234\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"PigLatin: \\\"quoted_pig_job_name_value\\\"\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job_1234\",\"job_4567\")", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (1234,\"job_4567\")", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (1234,\"sample job name: \\\"quoted_job_name_value\\\"\")", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID CONTAINS \"job_1234\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID CONTAINS job_1234", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID NOT CONTAINS \"job_456\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is \"job_789\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not \"job_789\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is null", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not null", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is NULL", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not NULL", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = NULL", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID != null", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \".*job_1234.*\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID !=~ \".*job_1234.*\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID !=~ \"\\\\|_\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 ", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field3 = 100000", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 = 1.56", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 > 1.56", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 >= 1.56", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 < 1.56", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 <= 1.56", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < 100000)\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 in (\"100000\",\"1\"))\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 in (\"100000\",\"1\"))\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field7 in (\"\\\"value1-part1,value1-part2\\\"\",\"value2\"))\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 not in (\"100000\",\"1\"))\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 NOT IN (\"100000\",\"1\"))\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field7 NOT IN (\"\\\"value1-part1,value1-part2\\\"\",\"value2\"))\"", - // expression filter - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > 12", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field5 > EXP{field3/field7 - field2}", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > EXP{field1 * field2}", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > EXP{field1 * field2}", - }; - for(String query: queries){ - try { - Filter filter = buildFilter(query); - Assert.assertNotNull(filter); - } catch (EagleQueryParseException e) { - Assert.fail(e.getMessage()); - } catch (Exception ex){ - Assert.fail(ex.getMessage()); - } - } - } + /** + * Should success without exception + */ + @Test + public void testQueryParseAndBuildFilterSuccess() { + String[] queries = new String[] { + "@cluster = \"cluster1\" and @datacenter = \"dc1\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"job_1234\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"PigLatin: \\\"quoted_pig_job_name_value\\\"\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job_1234\",\"job_4567\")", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (1234,\"job_4567\")", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (1234,\"sample job name: \\\"quoted_job_name_value\\\"\")", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID CONTAINS \"job_1234\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID CONTAINS job_1234", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID NOT CONTAINS \"job_456\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is \"job_789\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not \"job_789\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is null", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not null", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is NULL", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not NULL", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = NULL", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID != null", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \".*job_1234.*\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID !=~ \".*job_1234.*\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID !=~ \"\\\\|_\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 ", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field3 = 100000", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 = 1.56", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 > 1.56", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 >= 1.56", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 < 1.56", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 <= 1.56", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < 100000)\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 in (\"100000\",\"1\"))\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 in (\"100000\",\"1\"))\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field7 in (\"\\\"value1-part1,value1-part2\\\"\",\"value2\"))\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 not in (\"100000\",\"1\"))\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 NOT IN (\"100000\",\"1\"))\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field7 NOT IN (\"\\\"value1-part1,value1-part2\\\"\",\"value2\"))\"", + // expression filter + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > 12", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field5 > EXP{field3/field7 - field2}", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > EXP{field1 * field2}", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > EXP{field1 * field2}", + }; + for (String query : queries) { + try { + Filter filter = buildFilter(query); + Assert.assertNotNull(filter); + } catch (EagleQueryParseException e) { + Assert.fail(e.getMessage()); + } catch (Exception ex) { + Assert.fail(ex.getMessage()); + } + } + } - /** - * Should throw exception - */ - @Test - public void testNegativeQueryParseSuccessfullyButBuildFilterFailed(){ - String[] queries = new String[]{ - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag < \"job_1234\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag <= \"job_1234\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag >= \"job_1234\"", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 < null", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 <= null", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 > NULL", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 >= NULL", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 =~ NULL", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 !=~ NULL", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 contains NULL", - "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 not contains NULL" - }; - for(String query: queries){ - try { - @SuppressWarnings("unused") - Filter filter = buildFilter(query); - Assert.fail("Should throw exception: "+query); - } catch (IllegalArgumentException e) { - LOG.info("Expect exception: " + e.getMessage()); - } catch (EagleQueryParseException e) { - Assert.fail("Should parse successfully: "+query); - } - } - } + /** + * Should throw exception + */ + @Test + public void testNegativeQueryParseSuccessfullyButBuildFilterFailed() { + String[] queries = new String[] { + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag < \"job_1234\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag <= \"job_1234\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag >= \"job_1234\"", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 < null", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 <= null", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 > NULL", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 >= NULL", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 =~ NULL", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 !=~ NULL", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 contains NULL", + "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 not contains NULL" + }; + for (String query : queries) { + try { + @SuppressWarnings("unused") + Filter filter = buildFilter(query); + Assert.fail("Should throw exception: " + query); + } catch (IllegalArgumentException e) { + LOG.info("Expect exception: " + e.getMessage()); + } catch (EagleQueryParseException e) { + Assert.fail("Should parse successfully: " + query); + } + } + } - @Test - public void testParsedFilter(){ - String q1 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field3 = 100000"; - try { - FilterList filterList = (FilterList) buildFilter(q1); - Assert.assertEquals(FilterList.Operator.MUST_PASS_ONE,filterList.getOperator()); - Assert.assertEquals(1,filterList.getFilters().size()); - Assert.assertEquals(2,((FilterList) filterList.getFilters().get(0)).getFilters().size()); - } catch (EagleQueryParseException e) { - Assert.fail(e.getMessage()); - } + @Test + public void testParsedFilter() { + String q1 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field3 = 100000"; + try { + FilterList filterList = (FilterList) buildFilter(q1); + Assert.assertEquals(FilterList.Operator.MUST_PASS_ONE, filterList.getOperator()); + Assert.assertEquals(1, filterList.getFilters().size()); + Assert.assertEquals(2, ((FilterList) filterList.getFilters().get(0)).getFilters().size()); + } catch (EagleQueryParseException e) { + Assert.fail(e.getMessage()); + } - String q2 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < 100000)"; - try { - FilterList filterList = (FilterList) buildFilter(q2); - Assert.assertEquals(FilterList.Operator.MUST_PASS_ONE,filterList.getOperator()); - Assert.assertEquals(2,filterList.getFilters().size()); - Assert.assertEquals(2,((FilterList) filterList.getFilters().get(0)).getFilters().size()); - } catch (EagleQueryParseException e) { - Assert.fail(e.getMessage()); - } + String q2 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < 100000)"; + try { + FilterList filterList = (FilterList) buildFilter(q2); + Assert.assertEquals(FilterList.Operator.MUST_PASS_ONE, filterList.getOperator()); + Assert.assertEquals(2, filterList.getFilters().size()); + Assert.assertEquals(2, ((FilterList) filterList.getFilters().get(0)).getFilters().size()); + } catch (EagleQueryParseException e) { + Assert.fail(e.getMessage()); + } - // Test parse success but bad type of value - String q3 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < \"bad_int_100000\")"; - boolean q3Ex = false; - try { - Assert.assertNull(buildFilter(q3)); - } catch (EagleQueryParseException e) { - Assert.fail(e.getMessage()); - } catch (IllegalArgumentException e){ - LOG.debug("Expect: ", e); - Assert.assertTrue(e.getCause() instanceof NumberFormatException); - q3Ex = true; - } - Assert.assertTrue(q3Ex); - } + // Test parse success but bad type of value + String q3 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < \"bad_int_100000\")"; + boolean q3Ex = false; + try { + Assert.assertNull(buildFilter(q3)); + } catch (EagleQueryParseException e) { + Assert.fail(e.getMessage()); + } catch (IllegalArgumentException e) { + LOG.debug("Expect: ", e); + Assert.assertTrue(e.getCause() instanceof NumberFormatException); + q3Ex = true; + } + Assert.assertTrue(q3Ex); + } - @Test - public void testWithUnescapedString(){ - /////////////////////////////////// - // Tag filter with IN or EQUAL - // Should use RowKeyFilter only - /////////////////////////////////// - String query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"job.1234\""; - try { - FilterList filter = (FilterList) buildFilter(query); - Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); - Assert.assertFalse("Should use rowkey filter only",filter.toString().matches(".*job.1234.*")); - } catch (EagleQueryParseException e) { - Assert.fail(e.getMessage()); - } catch (Exception ex){ - Assert.fail(ex.getMessage()); - } + @Test + public void testWithUnescapedString() { + /////////////////////////////////// + // Tag filter with IN or EQUAL + // Should use RowKeyFilter only + /////////////////////////////////// + String query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"job.1234\""; + try { + FilterList filter = (FilterList) buildFilter(query); + Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); + Assert.assertFalse("Should use rowkey filter only", filter.toString().matches(".*job.1234.*")); + } catch (EagleQueryParseException e) { + Assert.fail(e.getMessage()); + } catch (Exception ex) { + Assert.fail(ex.getMessage()); + } - query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job_1234\")"; - try { - FilterList filter = (FilterList) buildFilter(query); - Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); - Assert.assertFalse("Should use rowkey filter only",filter.toString().matches(".*job_1234.*")); - } catch (EagleQueryParseException e) { - Assert.fail(e.getMessage()); - } catch (Exception ex){ - Assert.fail(ex.getMessage()); - } + query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job_1234\")"; + try { + FilterList filter = (FilterList) buildFilter(query); + Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); + Assert.assertFalse("Should use rowkey filter only", filter.toString().matches(".*job_1234.*")); + } catch (EagleQueryParseException e) { + Assert.fail(e.getMessage()); + } catch (Exception ex) { + Assert.fail(ex.getMessage()); + } - query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job.1234\")"; - try { - FilterList filter = (FilterList) buildFilter(query); - Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); - Assert.assertFalse("Should use rowkey filter only",filter.toString().matches(".*job.*1234.*")); - } catch (EagleQueryParseException e) { - Assert.fail(e.getMessage()); - } catch (Exception ex){ - Assert.fail(ex.getMessage()); - } + query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job.1234\")"; + try { + FilterList filter = (FilterList) buildFilter(query); + Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); + Assert.assertFalse("Should use rowkey filter only", filter.toString().matches(".*job.*1234.*")); + } catch (EagleQueryParseException e) { + Assert.fail(e.getMessage()); + } catch (Exception ex) { + Assert.fail(ex.getMessage()); + } - /////////////////////////////// - // Tag with other operators - /////////////////////////////// - query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \"job_1234\""; + /////////////////////////////// + // Tag with other operators + /////////////////////////////// + query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \"job_1234\""; - try { - FilterList filter = (FilterList) buildFilter(query); - Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); - Assert.assertTrue(filter.toString().matches(".*job_1234.*")); - } catch (EagleQueryParseException e) { - Assert.fail(e.getMessage()); - } catch (Exception ex){ - Assert.fail(ex.getMessage()); - } + try { + FilterList filter = (FilterList) buildFilter(query); + Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); + Assert.assertTrue(filter.toString().matches(".*job_1234.*")); + } catch (EagleQueryParseException e) { + Assert.fail(e.getMessage()); + } catch (Exception ex) { + Assert.fail(ex.getMessage()); + } - query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \"job.1234\""; + query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \"job.1234\""; - try { - FilterList filter = (FilterList) buildFilter(query); - Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); - Assert.assertTrue(filter.toString().matches(".*job.1234.*")); - } catch (EagleQueryParseException e) { - Assert.fail(e.getMessage()); - } catch (Exception ex){ - Assert.fail(ex.getMessage()); - } + try { + FilterList filter = (FilterList) buildFilter(query); + Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); + Assert.assertTrue(filter.toString().matches(".*job.1234.*")); + } catch (EagleQueryParseException e) { + Assert.fail(e.getMessage()); + } catch (Exception ex) { + Assert.fail(ex.getMessage()); + } - /////////////////////////////// - // Tag with IN - // Should escape regexp chars - /////////////////////////////// - query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field7 = \"job_1234\""; + /////////////////////////////// + // Tag with IN + // Should escape regexp chars + /////////////////////////////// + query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field7 = \"job_1234\""; - try { - FilterList filter = (FilterList) buildFilter(query); - Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); - Assert.assertTrue(filter.toString().matches(".*job_1234.*")); - } catch (EagleQueryParseException e) { - Assert.fail(e.getMessage()); - } catch (Exception ex){ - ex.printStackTrace(); - Assert.fail(ex.getMessage()); - } + try { + FilterList filter = (FilterList) buildFilter(query); + Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); + Assert.assertTrue(filter.toString().matches(".*job_1234.*")); + } catch (EagleQueryParseException e) { + Assert.fail(e.getMessage()); + } catch (Exception ex) { + ex.printStackTrace(); + Assert.fail(ex.getMessage()); + } - query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field7 in (\"job.1234\",\"others\")"; + query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field7 in (\"job.1234\",\"others\")"; - try { - FilterList filter = (FilterList) buildFilter(query); - Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); - Assert.assertTrue(filter.toString().matches(".*job\\.1234.*")); - } catch (EagleQueryParseException e) { - Assert.fail(e.getMessage()); - } catch (Exception ex){ - Assert.fail(ex.getMessage()); - } - } + try { + FilterList filter = (FilterList) buildFilter(query); + Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass()); + Assert.assertTrue(filter.toString().matches(".*job\\.1234.*")); + } catch (EagleQueryParseException e) { + Assert.fail(e.getMessage()); + } catch (Exception ex) { + Assert.fail(ex.getMessage()); + } + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestTypedByteArrayComparator.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestTypedByteArrayComparator.java index 1c0f4165fb..fa07c88c61 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestTypedByteArrayComparator.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestTypedByteArrayComparator.java @@ -28,23 +28,23 @@ */ public class TestTypedByteArrayComparator { @Test - public void testCompare(){ + public void testCompare() { EntitySerDeser serDeser = new DoubleSerDeser(); - TypedByteArrayComparator comparator = new TypedByteArrayComparator(serDeser.serialize(0.9),serDeser.type()); + TypedByteArrayComparator comparator = new TypedByteArrayComparator(serDeser.serialize(0.9), serDeser.type()); Assert.assertTrue(comparator.compareTo(serDeser.serialize(0.8)) > 0); Assert.assertTrue(comparator.compareTo(serDeser.serialize(1.1)) < 0); Assert.assertTrue(comparator.compareTo(serDeser.serialize(0.9)) == 0); - Assert.assertTrue(comparator.compareTo(serDeser.serialize(- 0.9)) > 0); + Assert.assertTrue(comparator.compareTo(serDeser.serialize(-0.9)) > 0); serDeser = new IntSerDeser(); - comparator = new TypedByteArrayComparator(serDeser.serialize(9),serDeser.type()); + comparator = new TypedByteArrayComparator(serDeser.serialize(9), serDeser.type()); Assert.assertTrue(comparator.compareTo(serDeser.serialize(8)) > 0); Assert.assertTrue(comparator.compareTo(serDeser.serialize(11)) < 0); Assert.assertTrue(comparator.compareTo(serDeser.serialize(9)) == 0); Assert.assertTrue(comparator.compareTo(serDeser.serialize(-9)) > 0); serDeser = new LongSerDeser(); - comparator = new TypedByteArrayComparator(serDeser.serialize(9l),serDeser.type()); + comparator = new TypedByteArrayComparator(serDeser.serialize(9l), serDeser.type()); Assert.assertTrue(comparator.compareTo(serDeser.serialize(8l)) > 0); Assert.assertTrue(comparator.compareTo(serDeser.serialize(11l)) < 0); Assert.assertTrue(comparator.compareTo(serDeser.serialize(9l)) == 0); @@ -52,10 +52,10 @@ public void testCompare(){ } @Test - public void testClassName(){ - Assert.assertEquals("long",long.class.getName()); + public void testClassName() { + Assert.assertEquals("long", long.class.getName()); Assert.assertEquals("java.lang.Long", Long.class.getName()); - Assert.assertEquals("long",long.class.toString()); + Assert.assertEquals("long", long.class.toString()); Assert.assertEquals("class java.lang.Long", Long.class.toString()); } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestArraySerDeser.java index 98db12f022..dae2bd0e6a 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestArraySerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestArraySerDeser.java @@ -17,48 +17,47 @@ package org.apache.eagle.log.entity.meta; import org.junit.Assert; - import org.junit.Test; public class TestArraySerDeser { - - @Test - public void testIntArraySerDeser(){ - IntArraySerDeser serDeser = new IntArraySerDeser(); - int[] ints = new int[] {1, 34, 21, 82}; - byte[] bytes = serDeser.serialize(ints); - Assert.assertEquals((ints.length+1)*4, bytes.length); - int[] targets = serDeser.deserialize(bytes); - Assert.assertEquals(ints.length, targets.length); - for(int i=0; i sources = new ArrayList(); + sources.add("value1"); + sources.add("value2"); + sources.add("value3"); -public class TestListSerDeser { + byte[] bytes = serDeser.serialize(sources); + Assert.assertEquals(4 + sources.size() * 8 + 18, bytes.length); + List targets = serDeser.deserialize(bytes); + Assert.assertEquals(sources.size(), targets.size()); + + Assert.assertTrue(targets.contains("value1")); + Assert.assertTrue(targets.contains("value2")); + Assert.assertTrue(targets.contains("value3")); + } + + + @SuppressWarnings("rawtypes") + @Test + public void testIntegerMapSerDeser() { + ListSerDeser serDeser = new ListSerDeser(); + List sources = new ArrayList(); + sources.add(1); + sources.add(2); + sources.add(3); + + byte[] bytes = serDeser.serialize(sources); + Assert.assertEquals(4 + sources.size() * 8 + 12, bytes.length); + List targets = serDeser.deserialize(bytes); + Assert.assertEquals(sources.size(), targets.size()); + + Assert.assertTrue(targets.contains(1)); + Assert.assertTrue(targets.contains(2)); + Assert.assertTrue(targets.contains(3)); + } + + + @SuppressWarnings( {"rawtypes", "unchecked"}) + @Test + public void testListListSerDeser() { + ListSerDeser serDeser = new ListSerDeser(); + List> sources = new ArrayList>(); + List list1 = new ArrayList(); + list1.add("value1"); + list1.add("value2"); + list1.add("value3"); + sources.add(list1); + + List list2 = new ArrayList(); + list2.add("value4"); + list2.add("value5"); + sources.add(list2); + + byte[] bytes = serDeser.serialize(sources); + List targets = serDeser.deserialize(bytes); + Assert.assertEquals(sources.size(), targets.size()); + + list1 = (List) targets.get(0); + Assert.assertNotNull(list1); + Assert.assertEquals(3, list1.size()); + Assert.assertTrue(list1.contains("value1")); + Assert.assertTrue(list1.contains("value2")); + Assert.assertTrue(list1.contains("value3")); - @SuppressWarnings("rawtypes") - @Test - public void testStringListSerDeser() { - ListSerDeser serDeser = new ListSerDeser(); - List sources = new ArrayList(); - sources.add("value1"); - sources.add("value2"); - sources.add("value3"); - - byte[] bytes = serDeser.serialize(sources); - Assert.assertEquals(4 + sources.size() * 8 + 18, bytes.length); - List targets = serDeser.deserialize(bytes); - Assert.assertEquals(sources.size(), targets.size()); - - Assert.assertTrue(targets.contains("value1")); - Assert.assertTrue(targets.contains("value2")); - Assert.assertTrue(targets.contains("value3")); - } - - - @SuppressWarnings("rawtypes") - @Test - public void testIntegerMapSerDeser() { - ListSerDeser serDeser = new ListSerDeser(); - List sources = new ArrayList(); - sources.add(1); - sources.add(2); - sources.add(3); - - byte[] bytes = serDeser.serialize(sources); - Assert.assertEquals(4 + sources.size() * 8 + 12, bytes.length); - List targets = serDeser.deserialize(bytes); - Assert.assertEquals(sources.size(), targets.size()); - - Assert.assertTrue(targets.contains(1)); - Assert.assertTrue(targets.contains(2)); - Assert.assertTrue(targets.contains(3)); - } - - - @SuppressWarnings({ "rawtypes", "unchecked" }) - @Test - public void testListListSerDeser() { - ListSerDeser serDeser = new ListSerDeser(); - List> sources = new ArrayList>(); - List list1 = new ArrayList(); - list1.add("value1"); - list1.add("value2"); - list1.add("value3"); - sources.add(list1); - - List list2 = new ArrayList(); - list2.add("value4"); - list2.add("value5"); - sources.add(list2); - - byte[] bytes = serDeser.serialize(sources); - List targets = serDeser.deserialize(bytes); - Assert.assertEquals(sources.size(), targets.size()); - - list1 = (List)targets.get(0); - Assert.assertNotNull(list1); - Assert.assertEquals(3, list1.size()); - Assert.assertTrue(list1.contains("value1")); - Assert.assertTrue(list1.contains("value2")); - Assert.assertTrue(list1.contains("value3")); - - list2 = (List)targets.get(1); - Assert.assertNotNull(list2); - Assert.assertEquals(2, list2.size()); - Assert.assertTrue(list2.contains("value4")); - Assert.assertTrue(list2.contains("value5")); - } + list2 = (List) targets.get(1); + Assert.assertNotNull(list2); + Assert.assertEquals(2, list2.size()); + Assert.assertTrue(list2.contains("value4")); + Assert.assertTrue(list2.contains("value5")); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestMapSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestMapSerDeser.java index 96051f4c3d..dc963cf1ce 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestMapSerDeser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestMapSerDeser.java @@ -16,91 +16,90 @@ */ package org.apache.eagle.log.entity.meta; +import org.junit.Assert; +import org.junit.Test; + import java.util.HashMap; import java.util.Map; -import org.junit.Assert; +public class TestMapSerDeser { -import org.junit.Test; + @SuppressWarnings("rawtypes") + @Test + public void testStringToStringMapSerDeser() { + MapSerDeser serDeser = new MapSerDeser(); + Map sources = new HashMap(); + sources.put("test1", "value1"); + sources.put("test2", null); + sources.put("test3", "value3"); -public class TestMapSerDeser { + byte[] bytes = serDeser.serialize(sources); + Assert.assertEquals(4 + sources.size() * 16 + 27, bytes.length); + Map targets = serDeser.deserialize(bytes); + Assert.assertEquals(sources.size(), targets.size()); + + Assert.assertEquals("value1", targets.get("test1")); + Assert.assertNull(targets.get("test2")); + Assert.assertEquals("value3", targets.get("test3")); + } + + + @SuppressWarnings("rawtypes") + @Test + public void testStringToIntegerMapSerDeser() { + MapSerDeser serDeser = new MapSerDeser(); + Map sources = new HashMap(); + sources.put("test1", 1); + sources.put("test2", null); + sources.put("test3", 3); + + byte[] bytes = serDeser.serialize(sources); + Assert.assertEquals(4 + sources.size() * 16 + 23, bytes.length); + Map targets = serDeser.deserialize(bytes); + Assert.assertEquals(sources.size(), targets.size()); + + Assert.assertEquals(1, targets.get("test1")); + Assert.assertNull(targets.get("test2")); + Assert.assertEquals(3, targets.get("test3")); + } + + + @SuppressWarnings( {"rawtypes", "unchecked"}) + @Test + public void testStringToMapMapSerDeser() { + MapSerDeser serDeser = new MapSerDeser(); + Map> sources = new HashMap>(); + Map map1 = new HashMap(); + map1.put("key11", "value11"); + map1.put("key12", null); + map1.put("key13", "value13"); + sources.put("test1", map1); + sources.put("test2", null); + Map map3 = new HashMap(); + map3.put("key31", "value31"); + map3.put("key32", null); + map3.put("key33", "value33"); + sources.put("test3", map3); + + byte[] bytes = serDeser.serialize(sources); + Map targets = serDeser.deserialize(bytes); + Assert.assertEquals(sources.size(), targets.size()); + + map1 = (Map) targets.get("test1"); + Assert.assertNotNull(map1); + Assert.assertEquals(3, map1.size()); + Assert.assertEquals("value11", map1.get("key11")); + Assert.assertNull(map1.get("key12")); + Assert.assertEquals("value13", map1.get("key13")); + + Assert.assertNull(targets.get("test2")); - @SuppressWarnings("rawtypes") - @Test - public void testStringToStringMapSerDeser() { - MapSerDeser serDeser = new MapSerDeser(); - Map sources = new HashMap(); - sources.put("test1", "value1"); - sources.put("test2", null); - sources.put("test3", "value3"); - - byte[] bytes = serDeser.serialize(sources); - Assert.assertEquals(4 + sources.size() * 16 + 27, bytes.length); - Map targets = serDeser.deserialize(bytes); - Assert.assertEquals(sources.size(), targets.size()); - - Assert.assertEquals("value1", targets.get("test1")); - Assert.assertNull(targets.get("test2")); - Assert.assertEquals("value3", targets.get("test3")); - } - - - @SuppressWarnings("rawtypes") - @Test - public void testStringToIntegerMapSerDeser() { - MapSerDeser serDeser = new MapSerDeser(); - Map sources = new HashMap(); - sources.put("test1", 1); - sources.put("test2", null); - sources.put("test3", 3); - - byte[] bytes = serDeser.serialize(sources); - Assert.assertEquals(4 + sources.size() * 16 + 23, bytes.length); - Map targets = serDeser.deserialize(bytes); - Assert.assertEquals(sources.size(), targets.size()); - - Assert.assertEquals(1, targets.get("test1")); - Assert.assertNull(targets.get("test2")); - Assert.assertEquals(3, targets.get("test3")); - } - - - @SuppressWarnings({ "rawtypes", "unchecked" }) - @Test - public void testStringToMapMapSerDeser() { - MapSerDeser serDeser = new MapSerDeser(); - Map> sources = new HashMap>(); - Map map1 = new HashMap(); - map1.put("key11", "value11"); - map1.put("key12", null); - map1.put("key13", "value13"); - sources.put("test1", map1); - sources.put("test2", null); - Map map3 = new HashMap(); - map3.put("key31", "value31"); - map3.put("key32", null); - map3.put("key33", "value33"); - sources.put("test3", map3); - - byte[] bytes = serDeser.serialize(sources); - Map targets = serDeser.deserialize(bytes); - Assert.assertEquals(sources.size(), targets.size()); - - map1 = (Map)targets.get("test1"); - Assert.assertNotNull(map1); - Assert.assertEquals(3, map1.size()); - Assert.assertEquals("value11", map1.get("key11")); - Assert.assertNull(map1.get("key12")); - Assert.assertEquals("value13", map1.get("key13")); - - Assert.assertNull(targets.get("test2")); - - map3 = (Map)targets.get("test3"); - Assert.assertNotNull(map3); - Assert.assertEquals(3, map3.size()); - Assert.assertEquals("value31", map3.get("key31")); - Assert.assertNull(map3.get("key32")); - Assert.assertEquals("value33", map3.get("key33")); - } + map3 = (Map) targets.get("test3"); + Assert.assertNotNull(map3); + Assert.assertEquals(3, map3.size()); + Assert.assertEquals("value31", map3.get("key31")); + Assert.assertNull(map3.get("key32")); + Assert.assertEquals("value33", map3.get("key33")); + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/repo/TestEntityRepositoryScanner.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/repo/TestEntityRepositoryScanner.java index 1b64b20ef4..e165375ce4 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/repo/TestEntityRepositoryScanner.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/repo/TestEntityRepositoryScanner.java @@ -22,9 +22,9 @@ public class TestEntityRepositoryScanner { - @Test - public void testScan() throws InstantiationException, IllegalAccessException { - EntityRepositoryScanner.scan(); - Assert.assertNotNull(EntityDefinitionManager.getEntityByServiceName("MetricMetadataService")); - } + @Test + public void testScan() throws InstantiationException, IllegalAccessException { + EntityRepositoryScanner.scan(); + Assert.assertNotNull(EntityDefinitionManager.getEntityByServiceName("MetricMetadataService")); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionParser.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionParser.java index ea548b8655..18ed7a606a 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionParser.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionParser.java @@ -16,7 +16,7 @@ */ package org.apache.eagle.log.expression; /** - * + * */ import org.junit.Assert; @@ -28,229 +28,229 @@ * @since Nov 10, 2014 */ public class TestExpressionParser { - - @Test - public void testSingleVariable() throws Exception{ - String exprStr = "mapProgress"; - ExpressionParser parser = new ExpressionParser(exprStr); - Double value = parser.setVariable("mapProgress", 100.0) - .eval(); - Assert.assertEquals(value, 100.0, 0.001); - List dependentFields = parser.getDependentFields(); - Assert.assertEquals(dependentFields.size(), 1); - Assert.assertEquals(dependentFields.get(0), "mapProgress"); - } - - @Test - public void testgetDependency() throws Exception{ - /** NOTICE: expression should be enclosure with "EXP{}" , This is for making antlr easy to parse - * variable name cannot be "pi" OR "E", there are parssi builtin constants */ - String exprStr = "min(mAx, Max) / abs(MAX)"; - ExpressionParser parser = new ExpressionParser(exprStr); - List variables = parser.getDependentFields(); - Assert.assertEquals(variables.size(), 3); - Assert.assertTrue(variables.contains("mAx")); - Assert.assertTrue(variables.contains("Max")); - Assert.assertTrue(variables.contains("MAX")); - } - @Test - public void testFunction() throws Exception{ - String exprStr = "min(mapProgress, reduceProgress) / abs(endTime - startTime)"; - ExpressionParser parser = new ExpressionParser(exprStr); - Double value = parser.setVariable("mapProgress", 100.0) - .setVariable("reduceProgress", 20.0) - .setVariable("endTime", 1415590100000.0) - .setVariable("startTime", 1415590000000.0) - .eval(); - Assert.assertEquals(value, 0.0002, 0.001); - } - - @Test - public void testOperator() throws Exception{ - String exprStr = "(a+b*c) / (2*(d-e))"; - ExpressionParser parser = new ExpressionParser(exprStr); - Double value = parser.setVariable("a", 200.0) - .setVariable("b", 400.0) - .setVariable("c", 3.0) - .setVariable("d", 225.0) - .setVariable("e", -125.0) - .eval(); - Assert.assertEquals(value, 2.0, 0.001); - } - - @Test - public void testOperatorWithFunction() throws Exception{ - String exprStr = "(max(a, b)* min(a, b)) / abs(a-b+c-d)"; - ExpressionParser parser = new ExpressionParser(exprStr); - Double value = parser.setVariable("a", 300.0) - .setVariable("b", 200.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertEquals(value, 600.0, 0.001); - } + @Test + public void testSingleVariable() throws Exception { + String exprStr = "mapProgress"; + ExpressionParser parser = new ExpressionParser(exprStr); + Double value = parser.setVariable("mapProgress", 100.0) + .eval(); + Assert.assertEquals(value, 100.0, 0.001); + List dependentFields = parser.getDependentFields(); + Assert.assertEquals(dependentFields.size(), 1); + Assert.assertEquals(dependentFields.get(0), "mapProgress"); + } + + @Test + public void testgetDependency() throws Exception { + /** NOTICE: expression should be enclosure with "EXP{}" , This is for making antlr easy to parse + * variable name cannot be "pi" OR "E", there are parssi builtin constants */ + String exprStr = "min(mAx, Max) / abs(MAX)"; + ExpressionParser parser = new ExpressionParser(exprStr); + List variables = parser.getDependentFields(); + Assert.assertEquals(variables.size(), 3); + Assert.assertTrue(variables.contains("mAx")); + Assert.assertTrue(variables.contains("Max")); + Assert.assertTrue(variables.contains("MAX")); + } + + @Test + public void testFunction() throws Exception { + String exprStr = "min(mapProgress, reduceProgress) / abs(endTime - startTime)"; + ExpressionParser parser = new ExpressionParser(exprStr); + Double value = parser.setVariable("mapProgress", 100.0) + .setVariable("reduceProgress", 20.0) + .setVariable("endTime", 1415590100000.0) + .setVariable("startTime", 1415590000000.0) + .eval(); + Assert.assertEquals(value, 0.0002, 0.001); + } + + @Test + public void testOperator() throws Exception { + String exprStr = "(a+b*c) / (2*(d-e))"; + ExpressionParser parser = new ExpressionParser(exprStr); + Double value = parser.setVariable("a", 200.0) + .setVariable("b", 400.0) + .setVariable("c", 3.0) + .setVariable("d", 225.0) + .setVariable("e", -125.0) + .eval(); + Assert.assertEquals(value, 2.0, 0.001); + } + + @Test + public void testOperatorWithFunction() throws Exception { + String exprStr = "(max(a, b)* min(a, b)) / abs(a-b+c-d)"; + ExpressionParser parser = new ExpressionParser(exprStr); + Double value = parser.setVariable("a", 300.0) + .setVariable("b", 200.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertEquals(value, 600.0, 0.001); + } - @Test - public void testWithAtFieldName() throws Exception{ - String exprStr = "(max(a, b)* min(a, b)) / abs(a-b+c-d)"; - ExpressionParser parser = new ExpressionParser(exprStr); - Double value = parser.setVariable("a", 300.0) - .setVariable("b", 200.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertEquals(value, 600.0, 0.001); - } + @Test + public void testWithAtFieldName() throws Exception { + String exprStr = "(max(a, b)* min(a, b)) / abs(a-b+c-d)"; + ExpressionParser parser = new ExpressionParser(exprStr); + Double value = parser.setVariable("a", 300.0) + .setVariable("b", 200.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertEquals(value, 600.0, 0.001); + } - @Test - public void testConstant() throws Exception { - String exprStr = "a"; - ExpressionParser parser = new ExpressionParser(exprStr); - Double value = parser.setVariable("a", 300.0) - .setVariable("b", 200.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertEquals(value, 300.0, 0.001); + @Test + public void testConstant() throws Exception { + String exprStr = "a"; + ExpressionParser parser = new ExpressionParser(exprStr); + Double value = parser.setVariable("a", 300.0) + .setVariable("b", 200.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertEquals(value, 300.0, 0.001); - value = parser.setVariable("a", 200.0) - .setVariable("b", 200.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertEquals(value, 200.0, 0.001); - } + value = parser.setVariable("a", 200.0) + .setVariable("b", 200.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertEquals(value, 200.0, 0.001); + } - @Test - public void testBooleanExpression() throws Exception { - String exprStr = "a > b"; - ExpressionParser parser = new ExpressionParser(exprStr); - Double value = parser.setVariable("a", 300.0) - .setVariable("b", 200.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertEquals(value, 1.0, 0.001); + @Test + public void testBooleanExpression() throws Exception { + String exprStr = "a > b"; + ExpressionParser parser = new ExpressionParser(exprStr); + Double value = parser.setVariable("a", 300.0) + .setVariable("b", 200.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertEquals(value, 1.0, 0.001); - value = parser.setVariable("a", 100.0) - .setVariable("b", 200.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertEquals(value, 0.0, 0.001); + value = parser.setVariable("a", 100.0) + .setVariable("b", 200.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertEquals(value, 0.0, 0.001); - exprStr = "a < b"; - parser = new ExpressionParser(exprStr); - value = parser.setVariable("a", 300.0) - .setVariable("b", 300.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 0.0); + exprStr = "a < b"; + parser = new ExpressionParser(exprStr); + value = parser.setVariable("a", 300.0) + .setVariable("b", 300.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 0.0); - value = parser.setVariable("a", 400.0) - .setVariable("b", 300.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 0.0); + value = parser.setVariable("a", 400.0) + .setVariable("b", 300.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 0.0); - value = parser.setVariable("a", 100.0) - .setVariable("b", 200.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 1.0); + value = parser.setVariable("a", 100.0) + .setVariable("b", 200.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 1.0); - // !!! Not support well >= - exprStr = "a >= b"; - parser = new ExpressionParser(exprStr); - value = parser.setVariable("a", 300.0) - .setVariable("b", 300.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 0.0); // expect 1.0 + // !!! Not support well >= + exprStr = "a >= b"; + parser = new ExpressionParser(exprStr); + value = parser.setVariable("a", 300.0) + .setVariable("b", 300.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 0.0); // expect 1.0 - value = parser.setVariable("a", 400.0) - .setVariable("b", 300.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 1.0); // expect 1.0 + value = parser.setVariable("a", 400.0) + .setVariable("b", 300.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 1.0); // expect 1.0 - value = parser.setVariable("a", 100.0) - .setVariable("b", 200.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 1.0); // expect 0.0 + value = parser.setVariable("a", 100.0) + .setVariable("b", 200.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 1.0); // expect 0.0 - exprStr = "a <= b"; - parser = new ExpressionParser(exprStr); - value = parser.setVariable("a", 300.0) - .setVariable("b", 300.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 1.0); + exprStr = "a <= b"; + parser = new ExpressionParser(exprStr); + value = parser.setVariable("a", 300.0) + .setVariable("b", 300.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 1.0); - value = parser.setVariable("a", 400.0) - .setVariable("b", 300.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 0.0); + value = parser.setVariable("a", 400.0) + .setVariable("b", 300.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 0.0); - value = parser.setVariable("a", 100.0) - .setVariable("b", 200.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 1.0); + value = parser.setVariable("a", 100.0) + .setVariable("b", 200.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 1.0); - exprStr = "a = b"; - parser = new ExpressionParser(exprStr); - value = parser.setVariable("a", 300.0) - .setVariable("b", 300.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertEquals(value, 1.0, 0.001); + exprStr = "a = b"; + parser = new ExpressionParser(exprStr); + value = parser.setVariable("a", 300.0) + .setVariable("b", 300.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertEquals(value, 1.0, 0.001); - value = parser.setVariable("a", 100.0) - .setVariable("b", 200.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertEquals(value, 0.0, 0.001); - } + value = parser.setVariable("a", 100.0) + .setVariable("b", 200.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertEquals(value, 0.0, 0.001); + } - @Test - public void testParsiiBug() throws Exception { - // !!! Not support >= - String exprStr = "a >= b"; - ExpressionParser parser = new ExpressionParser(exprStr); - Double value = parser.setVariable("a", 300.0) - .setVariable("b", 300.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 0.0); // expect 1.0 + @Test + public void testParsiiBug() throws Exception { + // !!! Not support >= + String exprStr = "a >= b"; + ExpressionParser parser = new ExpressionParser(exprStr); + Double value = parser.setVariable("a", 300.0) + .setVariable("b", 300.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 0.0); // expect 1.0 - value = parser.setVariable("a", 400.0) - .setVariable("b", 300.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 1.0); // expect 1.0 + value = parser.setVariable("a", 400.0) + .setVariable("b", 300.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 1.0); // expect 1.0 - value = parser.setVariable("a", 100.0) - .setVariable("b", 200.0) - .setVariable("c", -300.0) - .setVariable("d", -300.0) - .eval(); - Assert.assertTrue(value == 1.0); // expect 0.0 - } + value = parser.setVariable("a", 100.0) + .setVariable("b", 200.0) + .setVariable("c", -300.0) + .setVariable("d", -300.0) + .eval(); + Assert.assertTrue(value == 1.0); // expect 0.0 + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionPerformance.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionPerformance.java index 0a2b729524..ea758395d0 100755 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionPerformance.java +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionPerformance.java @@ -15,7 +15,7 @@ * limitations under the License. */ /** - * + * */ package org.apache.eagle.log.expression; @@ -38,83 +38,83 @@ public class TestExpressionPerformance { - public interface ExpressionParser { - double parse(String exprStr, Map tuple) throws Exception; - } - - public class ParsiiParser implements ExpressionParser{ - public Expression expression; - - public double parse(String exprStr, Map tuple) throws Exception{ - Scope scope = Scope.create(); - if (expression == null) { - expression = Parser.parse(exprStr, scope); - } - for(String valName : tuple.keySet()) { - Object value = tuple.get(valName); - if(value instanceof Number) { - scope.getVariable(valName).setValue(((Number)value).doubleValue()); - } - } - return expression.evaluate(); - } - } - - public long doParse(ExpressionParser parser, String exprStr, List parameters) throws Exception{ - long startTime = System.currentTimeMillis(); - int parNum = parameters.size(); - Map tuple = new HashMap(); - for (int i = 1; i < 100000; i++) { - for (int j = 0; j < parNum; j++) { - tuple.put(parameters.get(j), (double) (i * 3 + j)); - } - parser.parse(exprStr, tuple); - } - long endTime = System.currentTimeMillis(); - return endTime - startTime; - } - - @Test - public void TestPerformance() throws Exception{ - List parsers = new ArrayList(); - parsers.add(new ParsiiParser()); + public interface ExpressionParser { + double parse(String exprStr, Map tuple) throws Exception; + } + + public class ParsiiParser implements ExpressionParser { + public Expression expression; + + public double parse(String exprStr, Map tuple) throws Exception { + Scope scope = Scope.create(); + if (expression == null) { + expression = Parser.parse(exprStr, scope); + } + for (String valName : tuple.keySet()) { + Object value = tuple.get(valName); + if (value instanceof Number) { + scope.getVariable(valName).setValue(((Number) value).doubleValue()); + } + } + return expression.evaluate(); + } + } + + public long doParse(ExpressionParser parser, String exprStr, List parameters) throws Exception { + long startTime = System.currentTimeMillis(); + int parNum = parameters.size(); + Map tuple = new HashMap(); + for (int i = 1; i < 100000; i++) { + for (int j = 0; j < parNum; j++) { + tuple.put(parameters.get(j), (double) (i * 3 + j)); + } + parser.parse(exprStr, tuple); + } + long endTime = System.currentTimeMillis(); + return endTime - startTime; + } + + @Test + public void TestPerformance() throws Exception { + List parsers = new ArrayList(); + parsers.add(new ParsiiParser()); + + String exprStr = "a + b / c * 2"; + List parameters = new ArrayList(); + parameters.add("a"); + parameters.add("b"); + parameters.add("c"); + + Map timeComsued = new HashMap(); - String exprStr = "a + b / c * 2"; - List parameters = new ArrayList(); - parameters.add("a"); - parameters.add("b"); - parameters.add("c"); - - Map timeComsued = new HashMap(); - - for (int i = 0; i < 10; i++) { - for (ExpressionParser parser : parsers) { - String name = parser.getClass().getName(); - if (timeComsued.get(name) == null) { - timeComsued.put(name, 0L); - } - timeComsued.put(name, timeComsued.get(name) + doParse(parser, exprStr, parameters)); - } - } - for (Entry time : timeComsued.entrySet()) { - System.out.println("time consumed of " + time.getKey() + ": " + time.getValue() +"ms"); - } - } + for (int i = 0; i < 10; i++) { + for (ExpressionParser parser : parsers) { + String name = parser.getClass().getName(); + if (timeComsued.get(name) == null) { + timeComsued.put(name, 0L); + } + timeComsued.put(name, timeComsued.get(name) + doParse(parser, exprStr, parameters)); + } + } + for (Entry time : timeComsued.entrySet()) { + System.out.println("time consumed of " + time.getKey() + ": " + time.getValue() + "ms"); + } + } - @Test - public void TestEvaluatoinValid() throws Exception{ - List parsers = new ArrayList(); - parsers.add(new ParsiiParser()); + @Test + public void TestEvaluatoinValid() throws Exception { + List parsers = new ArrayList(); + parsers.add(new ParsiiParser()); - String exprStr = "max(a, 3 * b) + min(b, 10000) / abs(c * 2)"; - Map tuples = new HashMap(); - tuples.put("a", 20.5); - tuples.put("b", 123.7); - tuples.put("c", 97.57); - DecimalFormat df = new DecimalFormat("#.00"); - for (ExpressionParser parser : parsers) { - System.out.println(parser.getClass().getName() + " : " + parser.parse(exprStr, tuples)); - Assert.assertEquals(df.format(parser.parse(exprStr, tuples)), "371.73"); - } - } + String exprStr = "max(a, 3 * b) + min(b, 10000) / abs(c * 2)"; + Map tuples = new HashMap(); + tuples.put("a", 20.5); + tuples.put("b", 123.7); + tuples.put("c", 97.57); + DecimalFormat df = new DecimalFormat("#.00"); + for (ExpressionParser parser : parsers) { + System.out.println(parser.getClass().getName() + " : " + parser.parse(exprStr, tuples)); + Assert.assertEquals(df.format(parser.parse(exprStr, tuples)), "371.73"); + } + } } diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/resources/log4j.properties b/eagle-core/eagle-query/eagle-entity-base/src/test/resources/log4j.properties index dccbc706a4..4b40fd4b73 100644 --- a/eagle-core/eagle-query/eagle-entity-base/src/test/resources/log4j.properties +++ b/eagle-core/eagle-query/eagle-entity-base/src/test/resources/log4j.properties @@ -12,16 +12,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - log4j.rootLogger=INFO, stdout #eagle.log.dir=./logs #eagle.log.file=eagle.log - # standard output log4j.appender.stdout=org.apache.log4j.ConsoleAppender log4j.appender.stdout.layout=org.apache.log4j.PatternLayout log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %p [%t] %c{2}[%L]: %m%n - ## Daily Rolling File Appender #log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender #log4j.appender.DRFA.File=${eagle.log.dir}/${eagle.log.file} diff --git a/eagle-core/eagle-query/eagle-query-base/pom.xml b/eagle-core/eagle-query/eagle-query-base/pom.xml index f830c272d0..ee543714a7 100644 --- a/eagle-core/eagle-query/eagle-query-base/pom.xml +++ b/eagle-core/eagle-query/eagle-query-base/pom.xml @@ -58,4 +58,16 @@ test + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + true + true + + + + diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/GenericEntityQuery.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/GenericEntityQuery.java index 1319f4366e..c47c8fdf3b 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/GenericEntityQuery.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/GenericEntityQuery.java @@ -24,51 +24,54 @@ import java.util.ArrayList; import java.util.List; -/** - * @since : 10/30/14,2014 - */ -public class GenericEntityQuery implements GenericQuery,EntityCreationListener { - private static final Logger LOG = LoggerFactory.getLogger(GenericEntityQuery.class); +public class GenericEntityQuery implements GenericQuery, EntityCreationListener { + private static final Logger LOG = LoggerFactory.getLogger(GenericEntityQuery.class); - private List entities = new ArrayList(); - private StreamReader reader; + private List entities = new ArrayList(); + private StreamReader reader; - public GenericEntityQuery(String serviceName, SearchCondition condition, String metricName) throws IllegalAccessException, InstantiationException { - if(serviceName.equals(GenericMetricEntity.GENERIC_METRIC_SERVICE)){ - if(LOG.isDebugEnabled()) LOG.debug("List metric query"); - if(metricName == null || metricName.isEmpty()){ - throw new IllegalArgumentException("metricName should not be empty for metric list query"); - } - if(!condition.getOutputFields().contains(GenericMetricEntity.VALUE_FIELD)){ - condition.getOutputFields().add(GenericMetricEntity.VALUE_FIELD); - } - reader = new GenericEntityStreamReader(serviceName, condition,metricName); - }else{ - if(LOG.isDebugEnabled()) LOG.debug("List entity query"); - reader = new GenericEntityStreamReader(serviceName, condition); - } - reader.register(this); - } + public GenericEntityQuery(String serviceName, SearchCondition condition, String metricName) throws IllegalAccessException, InstantiationException { + if (serviceName.equals(GenericMetricEntity.GENERIC_METRIC_SERVICE)) { + if (LOG.isDebugEnabled()) { + LOG.debug("List metric query"); + } + if (metricName == null || metricName.isEmpty()) { + throw new IllegalArgumentException("metricName should not be empty for metric list query"); + } + if (!condition.getOutputFields().contains(GenericMetricEntity.VALUE_FIELD)) { + condition.getOutputFields().add(GenericMetricEntity.VALUE_FIELD); + } + reader = new GenericEntityStreamReader(serviceName, condition, metricName); + } else { + if (LOG.isDebugEnabled()) { + LOG.debug("List entity query"); + } + reader = new GenericEntityStreamReader(serviceName, condition); + } + reader.register(this); + } - @Override - public long getLastTimestamp() { - return reader.getLastTimestamp(); - } + @Override + public long getLastTimestamp() { + return reader.getLastTimestamp(); + } - @Override - public void entityCreated(TaggedLogAPIEntity entity){ - entities.add(entity); - } + @Override + public void entityCreated(TaggedLogAPIEntity entity) { + entities.add(entity); + } - @Override - public List result() throws Exception{ - if(LOG.isDebugEnabled()) LOG.debug("Start reading as batch mode"); - reader.readAsStream(); - return entities; - } + @Override + public List result() throws Exception { + if (LOG.isDebugEnabled()) { + LOG.debug("Start reading as batch mode"); + } + reader.readAsStream(); + return entities; + } - @Override - public long getFirstTimeStamp() { - return reader.getFirstTimestamp(); - } + @Override + public long getFirstTimeStamp() { + return reader.getFirstTimestamp(); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/GenericQuery.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/GenericQuery.java index d3af151499..215d1311c1 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/GenericQuery.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/GenericQuery.java @@ -19,28 +19,28 @@ import java.util.List; /** + * GenericQuery interfoce. * @since : 10/30/14,2014 */ public interface GenericQuery { - /** - * Throw all exceptions to http server - * + /** + * Throw all exceptions to http server. + * * @param result entity type - * @return result entities list - * + * @return result entities list * @throws Exception - */ - List result() throws Exception; + */ + List result() throws Exception; - /** - * Get last/largest timestamp on all rows - * - * @return last timestamp - */ - long getLastTimestamp(); + /** + * Get last/largest timestamp on all rows. + * + * @return last timestamp + */ + long getLastTimestamp(); - /** - * Get first timestamp on all rows - */ - long getFirstTimeStamp(); + /** + * Get first timestamp on all rows. + */ + long getFirstTimeStamp(); } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/ListQueryCompiler.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/ListQueryCompiler.java index ab010646f8..948cd0649f 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/ListQueryCompiler.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/ListQueryCompiler.java @@ -20,14 +20,14 @@ import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; import org.apache.eagle.log.expression.ExpressionParser; +import org.apache.eagle.query.aggregate.AggregateFunctionType; +import org.apache.eagle.query.aggregate.AggregateFunctionTypeMatcher; import org.apache.eagle.query.aggregate.timeseries.SortOption; import org.apache.eagle.query.aggregate.timeseries.SortOptionsParser; import org.apache.eagle.query.parser.EagleQueryParseException; import org.apache.eagle.query.parser.EagleQueryParser; import org.apache.eagle.query.parser.ORExpression; import org.apache.eagle.query.parser.TokenConstant; -import org.apache.eagle.query.aggregate.AggregateFunctionType; -import org.apache.eagle.query.aggregate.AggregateFunctionTypeMatcher; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.slf4j.Logger; @@ -38,343 +38,367 @@ import java.util.regex.Pattern; public class ListQueryCompiler { - private final static Logger LOG = LoggerFactory.getLogger(ListQueryCompiler.class); - /** - * syntax is []{} - */ - private final static String listRegex = "^([^\\[]+)\\[(.*)\\]\\{(.+)\\}$"; - private final static Pattern _listPattern = Pattern.compile(listRegex); - - /** - * syntax is @ - */ - private final static String _fnAnyPattern = "*"; - private final static Pattern _fnPattern = TokenConstant.ID_PATTERN; - - /** - * syntax is @ - */ - private final static String expRegex = "^(EXP\\{.*\\})(\\s+AS)?(\\s+.*)?$"; - private final static Pattern _expPattern = Pattern.compile(expRegex,Pattern.CASE_INSENSITIVE); - - /** - * syntax is []{} - */ - - /** The regular expression before add EXP{} in query **/ - private final static String aggRegex = "^([^\\[]+)\\[(.*)\\]<([^>]*)>\\{(.+)\\}$"; - private final static Pattern _aggPattern = Pattern.compile(aggRegex); - - private final static String sortRegex = "^([^\\[]+)\\[(.*)\\]<([^>]*)>\\{(.+)\\}\\.\\{(.+)\\}$"; - private final static Pattern _sortPattern = Pattern.compile(sortRegex); - - private String _serviceName; - private Filter _filter; - private List _outputFields; - private List _groupbyFields; - private List _aggregateFunctionTypes; - private List _aggregateFields; - private List _sortFunctionTypes; - private List _sortFields; - private Map _outputAlias; - - /** - * Filed that must be required in filter - * - * @return - */ - public Set getFilterFields() { - return _filterFields; - } - - private Set _filterFields; - private List _sortOptions; - private boolean _hasAgg; - private List _partitionValues; - private boolean _filterIfMissing; - private ORExpression _queryExpression; - private boolean _outputAll = false; - - public ListQueryCompiler(String query) throws Exception { - this(query, false); - } - - public ListQueryCompiler(String query, boolean filterIfMissing) throws Exception{ - this._filterIfMissing = filterIfMissing; - Matcher m = _listPattern.matcher(query); - if(m.find()){ - if(m.groupCount() != 3) - throw new IllegalArgumentException("List query syntax is []{}"); - compileCollectionQuery(m); - _hasAgg = false; - partitionConstraintValidate(query); - return; - } - - /** match sort pattern fist, otherwise some sort query will be mismatch as agg pattern */ - m = _sortPattern.matcher(query); - if(m.find()){ - if(m.groupCount() != 5) - throw new IllegalArgumentException("Aggregate query syntax is []{}.{}"); - compileAggregateQuery(m); - _hasAgg = true; - partitionConstraintValidate(query); - return; - } - - m = _aggPattern.matcher(query); - if(m.find()){ - if(m.groupCount() != 4) - //if(m.groupCount() < 4 || m.groupCount() > 5) - throw new IllegalArgumentException("Aggregate query syntax is []{}.{}"); - compileAggregateQuery(m); - _hasAgg = true; - partitionConstraintValidate(query); - return; - } - - throw new IllegalArgumentException("List query syntax is []{} \n Aggregate query syntax is []{}.{}"); - } - - /** - * TODO: For now we don't support one query to query multiple partitions. In future if partition is defined - * for the entity, internally We need to spawn multiple queries and send one query for each search condition - * for each partition - * - * @param query input query to compile - */ - private void partitionConstraintValidate(String query) { - if (_partitionValues != null && _partitionValues.size() > 1) { - final String[] values = _partitionValues.get(0); - for (int i = 1; i < _partitionValues.size(); ++i) { - final String[] tmpValues = _partitionValues.get(i); - for (int j = 0; j < values.length; ++j) { - if (values[j] == null || (!values[j].equals(tmpValues[j]))) { - final String errMsg = "One query for multiple partitions is NOT allowed for now! Query: " + query; - LOG.error(errMsg); - throw new IllegalArgumentException(errMsg); - } - } - } - } - } - - public boolean hasAgg(){ - return _hasAgg; - } - - public List getQueryPartitionValues() { - return _partitionValues; - } - - public ORExpression getQueryExpression() { - return _queryExpression; - } - - private void checkEntityExistence(String entityName) throws EagleQueryParseException { - try { - if(EntityDefinitionManager.getEntityByServiceName(entityName) == null) - throw new EagleQueryParseException(entityName + " entity does not exist!"); - } catch (InstantiationException e) { - final String errMsg = "Got an InstantiationException: " + e.getMessage(); - throw new EagleQueryParseException(entityName + " entity does not exist! " + errMsg); - } catch (IllegalAccessException e) { - final String errMsg = "Got an IllegalAccessException: " + e.getMessage(); - throw new EagleQueryParseException(entityName + " entity does not exist! " + errMsg); - } - } - - public String deleteAtSign(String expression) { - return expression.replace("@", ""); - } - - private void compileCollectionQuery(Matcher m) throws EagleQueryParseException{ - _serviceName = m.group(1); - checkEntityExistence(_serviceName); - if(_outputFields==null) _outputFields = new ArrayList(); - String qy = m.group(2); - _filter = compileQy(qy); - String prjFields = m.group(3); - String[] tmp = prjFields.split(","); - for(String str : tmp){ - str = str.trim(); - Matcher fnMatcher = _fnPattern.matcher(str); - Matcher expMatcher = _expPattern.matcher(str); - if(fnMatcher.find()) { - if (fnMatcher.groupCount() == 1) - _outputFields.add(fnMatcher.group(1)); - }else if(_fnAnyPattern.equals(str)){ - if(LOG.isDebugEnabled()) LOG.debug("Output all fields"); - // _outputFields.add(_fnAnyPattern); - this._outputAll = true; - }else if (expMatcher.find()) { - String expr = deleteAtSign(expMatcher.group(1)); - String alias = expMatcher.group(3); - try { - String exprContent = TokenConstant.parseExpressionContent(expr); - _outputFields.addAll(ExpressionParser.parse(exprContent).getDependentFields()); - if(alias!=null) { - if(_outputAlias == null) _outputAlias = new HashMap(); - _outputAlias.put(exprContent,alias.trim()); - } - } catch (Exception ex){ - LOG.error("Failed to parse expression: " + expr + ", exception: " + ex.getMessage(), ex); - } finally { - _outputFields.add(expr); - } - } else { - throw new IllegalArgumentException("Field name syntax must be @ or * or Expression in syntax EXP{}"); - } - } - } - - private void compileAggregateQuery(Matcher m) throws EagleQueryParseException{ - _serviceName = m.group(1); - checkEntityExistence(_serviceName); - String qy = m.group(2); - _filter = compileQy(qy); - String groupbyFields = m.group(3); - // groupbyFields could be empty - List groupbyFieldList = null; - _groupbyFields = new ArrayList(); - if(!groupbyFields.isEmpty()){ - groupbyFieldList = Arrays.asList(groupbyFields.split(",")); - for(String str : groupbyFieldList){ - Matcher fnMatcher = _fnPattern.matcher(str.trim()); - if(!fnMatcher.find() || fnMatcher.groupCount() != 1) - throw new IllegalArgumentException("Field name syntax must be @"); - _groupbyFields.add(fnMatcher.group(1)); - } - } - String functions = m.group(4); - // functions - List functionList = Arrays.asList(functions.split(",")); - _aggregateFunctionTypes = new ArrayList(); - _aggregateFields = new ArrayList(); - for(String function : functionList){ - AggregateFunctionTypeMatcher matcher = AggregateFunctionType.matchAll(function.trim()); - if(!matcher.find()){ - throw new IllegalArgumentException("Aggregate function must have format of count|sum|avg|max|min()"); - } - _aggregateFunctionTypes.add(matcher.type()); - String aggField = deleteAtSign(matcher.field().trim()); - try { - if(_outputFields == null) _outputFields = new ArrayList(); - if(TokenConstant.isExpression(aggField)) { - _outputFields.addAll(ExpressionParser.parse(TokenConstant.parseExpressionContent(aggField)).getDependentFields()); - }else{ - _outputFields.add(aggField); - } - } catch (Exception ex){ - LOG.error("Failed to parse expression: " + aggField + ", exception: " + ex.getMessage(), ex); - } finally { - _aggregateFields.add(aggField); - } - } - - // sort options - if(m.groupCount() < 5 || m.group(5) == null) // no sort options - return; - String sortOptions = m.group(5); - if(sortOptions != null){ - LOG.info("SortOptions: " + sortOptions); - List sortOptionList = Arrays.asList(sortOptions.split(",")); - List rawSortFields = new ArrayList(); - this._sortOptions = SortOptionsParser.parse(groupbyFieldList, functionList, sortOptionList, rawSortFields); - this._sortFunctionTypes = new ArrayList<>(); - this._sortFields = new ArrayList<>(); - for (String sortField : rawSortFields) { - AggregateFunctionTypeMatcher matcher = AggregateFunctionType.matchAll(sortField); - if(matcher.find()) { - _sortFunctionTypes.add(matcher.type()); - _sortFields.add(deleteAtSign(matcher.field().trim())); - } - } - } - } - - /** - * 1. syntax level - use antlr to pass the queries - * 2. semantics level - can't distinguish tag or qualifier - * @param qy - * @return - */ - private Filter compileQy(String qy) throws EagleQueryParseException{ - try { - EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(_serviceName); - if(qy == null || qy.isEmpty()){ - if (ed.getPartitions() == null) { - if(LOG.isDebugEnabled()) LOG.warn("Query string is empty, full table scan query: " + qy); - // For hbase 0.98+, empty FilterList() will filter all rows, so we need return null instead + private static final Logger LOG = LoggerFactory.getLogger(ListQueryCompiler.class); + /** + * syntax is EntityName[Filter]{Projection}. + */ + private static final String listRegex = "^([^\\[]+)\\[(.*)\\]\\{(.+)\\}$"; + private static final Pattern _listPattern = Pattern.compile(listRegex); + + /** + * syntax is @fieldname. + */ + private static final String _fnAnyPattern = "*"; + private static final Pattern _fnPattern = TokenConstant.ID_PATTERN; + + /** + * syntax is @expression. + */ + private static final String expRegex = "^(EXP\\{.*\\})(\\s+AS)?(\\s+.*)?$"; + private static final Pattern _expPattern = Pattern.compile(expRegex, Pattern.CASE_INSENSITIVE); + + /** + * syntax is EntityName[Filter]GroupbyFields{AggregateFunctions}. + */ + + /** + * The regular expression before add EXP{Expression} in query. + **/ + private static final String aggRegex = "^([^\\[]+)\\[(.*)\\]<([^>]*)>\\{(.+)\\}$"; + private static final Pattern _aggPattern = Pattern.compile(aggRegex); + + private static final String sortRegex = "^([^\\[]+)\\[(.*)\\]<([^>]*)>\\{(.+)\\}\\.\\{(.+)\\}$"; + private static final Pattern _sortPattern = Pattern.compile(sortRegex); + + private String serviceName; + private Filter filter; + private List outputFields; + private List groupbyFields; + private List aggregateFunctionTypes; + private List aggregateFields; + private List sortFunctionTypes; + private List sortFields; + private Map outputAlias; + + /** + * Filed that must be required in filter. + * + * @return + */ + public Set getFilterFields() { + return filterFields; + } + + private Set filterFields; + private List sortOptions; + private boolean hasAgg; + private List partitionValues; + private boolean filterIfMissing; + private ORExpression queryExpression; + private boolean outputAll = false; + + public ListQueryCompiler(String query) throws Exception { + this(query, false); + } + + public ListQueryCompiler(String query, boolean filterIfMissing) throws Exception { + this.filterIfMissing = filterIfMissing; + Matcher m = _listPattern.matcher(query); + if (m.find()) { + if (m.groupCount() != 3) { + throw new IllegalArgumentException("List query syntax is []{}"); + } + compileCollectionQuery(m); + hasAgg = false; + partitionConstraintValidate(query); + return; + } + + /** match sort pattern fist, otherwise some sort query will be mismatch as agg pattern */ + m = _sortPattern.matcher(query); + if (m.find()) { + if (m.groupCount() != 5) { + throw new IllegalArgumentException("Aggregate query syntax is []{}.{}"); + } + compileAggregateQuery(m); + hasAgg = true; + partitionConstraintValidate(query); + return; + } + + m = _aggPattern.matcher(query); + if (m.find()) { + //if(m.groupCount() < 4 || m.groupCount() > 5) + if (m.groupCount() != 4) { + throw new IllegalArgumentException("Aggregate query syntax is []{}.{}"); + } + compileAggregateQuery(m); + hasAgg = true; + partitionConstraintValidate(query); + return; + } + + throw new IllegalArgumentException("List query syntax is []{} \n Aggregate query syntax is []{}" + + ".{}"); + } + + /** + * TODO: For now we don't support one query to query multiple partitions. In future if partition is defined + * for the entity, internally We need to spawn multiple queries and send one query for each search condition + * for each partition. + * + * @param query input query to compile + */ + private void partitionConstraintValidate(String query) { + if (partitionValues != null && partitionValues.size() > 1) { + final String[] values = partitionValues.get(0); + for (int i = 1; i < partitionValues.size(); ++i) { + final String[] tmpValues = partitionValues.get(i); + for (int j = 0; j < values.length; ++j) { + if (values[j] == null || (!values[j].equals(tmpValues[j]))) { + final String errMsg = "One query for multiple partitions is NOT allowed for now! Query: " + query; + LOG.error(errMsg); + throw new IllegalArgumentException(errMsg); + } + } + } + } + } + + public boolean hasAgg() { + return hasAgg; + } + + public List getQueryPartitionValues() { + return partitionValues; + } + + public ORExpression getQueryExpression() { + return queryExpression; + } + + private void checkEntityExistence(String entityName) throws EagleQueryParseException { + try { + if (EntityDefinitionManager.getEntityByServiceName(entityName) == null) { + throw new EagleQueryParseException(entityName + " entity does not exist!"); + } + } catch (InstantiationException e) { + final String errMsg = "Got an InstantiationException: " + e.getMessage(); + throw new EagleQueryParseException(entityName + " entity does not exist! " + errMsg); + } catch (IllegalAccessException e) { + final String errMsg = "Got an IllegalAccessException: " + e.getMessage(); + throw new EagleQueryParseException(entityName + " entity does not exist! " + errMsg); + } + } + + public String deleteAtSign(String expression) { + return expression.replace("@", ""); + } + + private void compileCollectionQuery(Matcher m) throws EagleQueryParseException { + serviceName = m.group(1); + checkEntityExistence(serviceName); + if (outputFields == null) { + outputFields = new ArrayList(); + } + String qy = m.group(2); + filter = compileQy(qy); + String prjFields = m.group(3); + String[] tmp = prjFields.split(","); + for (String str : tmp) { + str = str.trim(); + Matcher fnMatcher = _fnPattern.matcher(str); + Matcher expMatcher = _expPattern.matcher(str); + if (fnMatcher.find()) { + if (fnMatcher.groupCount() == 1) { + outputFields.add(fnMatcher.group(1)); + } + } else if (_fnAnyPattern.equals(str)) { + if (LOG.isDebugEnabled()) { + LOG.debug("Output all fields"); + } + // _outputFields.add(_fnAnyPattern); + this.outputAll = true; + } else if (expMatcher.find()) { + String expr = deleteAtSign(expMatcher.group(1)); + String alias = expMatcher.group(3); + try { + String exprContent = TokenConstant.parseExpressionContent(expr); + outputFields.addAll(ExpressionParser.parse(exprContent).getDependentFields()); + if (alias != null) { + if (outputAlias == null) { + outputAlias = new HashMap(); + } + outputAlias.put(exprContent, alias.trim()); + } + } catch (Exception ex) { + LOG.error("Failed to parse expression: " + expr + ", exception: " + ex.getMessage(), ex); + } finally { + outputFields.add(expr); + } + } else { + throw new IllegalArgumentException("Field name syntax must be @ or * or Expression in syntax EXP{}"); + } + } + } + + private void compileAggregateQuery(Matcher m) throws EagleQueryParseException { + serviceName = m.group(1); + checkEntityExistence(serviceName); + String qy = m.group(2); + filter = compileQy(qy); + String groupbyFields = m.group(3); + // groupbyFields could be empty + List groupbyFieldList = null; + this.groupbyFields = new ArrayList(); + if (!groupbyFields.isEmpty()) { + groupbyFieldList = Arrays.asList(groupbyFields.split(",")); + for (String str : groupbyFieldList) { + Matcher fnMatcher = _fnPattern.matcher(str.trim()); + if (!fnMatcher.find() || fnMatcher.groupCount() != 1) { + throw new IllegalArgumentException("Field name syntax must be @"); + } + this.groupbyFields.add(fnMatcher.group(1)); + } + } + String functions = m.group(4); + // functions + List functionList = Arrays.asList(functions.split(",")); + aggregateFunctionTypes = new ArrayList(); + aggregateFields = new ArrayList(); + for (String function : functionList) { + AggregateFunctionTypeMatcher matcher = AggregateFunctionType.matchAll(function.trim()); + if (!matcher.find()) { + throw new IllegalArgumentException("Aggregate function must have format of count|sum|avg|max|min()"); + } + aggregateFunctionTypes.add(matcher.type()); + String aggField = deleteAtSign(matcher.field().trim()); + try { + if (outputFields == null) { + outputFields = new ArrayList(); + } + if (TokenConstant.isExpression(aggField)) { + outputFields.addAll(ExpressionParser.parse(TokenConstant.parseExpressionContent(aggField)).getDependentFields()); + } else { + outputFields.add(aggField); + } + } catch (Exception ex) { + LOG.error("Failed to parse expression: " + aggField + ", exception: " + ex.getMessage(), ex); + } finally { + aggregateFields.add(aggField); + } + } + + // sort options + if (m.groupCount() < 5 || m.group(5) == null) { // no sort options + return; + } + String sortOptions = m.group(5); + if (sortOptions != null) { + LOG.info("SortOptions: " + sortOptions); + List sortOptionList = Arrays.asList(sortOptions.split(",")); + List rawSortFields = new ArrayList(); + this.sortOptions = SortOptionsParser.parse(groupbyFieldList, functionList, sortOptionList, rawSortFields); + this.sortFunctionTypes = new ArrayList<>(); + this.sortFields = new ArrayList<>(); + for (String sortField : rawSortFields) { + AggregateFunctionTypeMatcher matcher = AggregateFunctionType.matchAll(sortField); + if (matcher.find()) { + sortFunctionTypes.add(matcher.type()); + sortFields.add(deleteAtSign(matcher.field().trim())); + } + } + } + } + + /** + * 1. syntax level - use antlr to pass the queries + * 2. semantics level - can't distinguish tag or qualifier + * + * @param qy + * @return + */ + private Filter compileQy(String qy) throws EagleQueryParseException { + try { + EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(serviceName); + if (qy == null || qy.isEmpty()) { + if (ed.getPartitions() == null) { + if (LOG.isDebugEnabled()) { + LOG.warn("Query string is empty, full table scan query: " + qy); + } + // For hbase 0.98+, empty FilterList() will filter all rows, so we need return null instead return null; - } else { - final String errMsg = "Entity " + ed.getEntityClass().getSimpleName() + " defined partition, " - + "but query doesn't provide partition condition! Query: " + qy; - LOG.error(errMsg); - throw new IllegalArgumentException(errMsg); - } - } - EagleQueryParser parser = new EagleQueryParser(qy); - _queryExpression = parser.parse(); - - //TODO: build customize filter for EXP{} - HBaseFilterBuilder builder = new HBaseFilterBuilder(ed, _queryExpression, _filterIfMissing); - FilterList flist = builder.buildFilters(); - _partitionValues = builder.getPartitionValues(); - _filterFields = builder.getFilterFields(); - return flist; - } catch (InstantiationException e) { - final String errMsg = "Got an InstantiationException: " + e.getMessage(); - throw new EagleQueryParseException(_serviceName + " entity does not exist! " + errMsg); - } catch (IllegalAccessException e) { - final String errMsg = "Got an IllegalAccessException: " + e.getMessage(); - throw new EagleQueryParseException(_serviceName + " entity does not exist! " + errMsg); - } - } - - public String serviceName(){ - return _serviceName; - } - - public List outputFields(){ - return _outputFields; - } - - public Filter filter(){ - return _filter; - } - - public List groupbyFields(){ - return _groupbyFields; - } - - public List aggregateFunctionTypes(){ - return _aggregateFunctionTypes; - } - - public List aggregateFields(){ - return _aggregateFields; - } - - public List sortOptions(){ - return _sortOptions; - } - - public List sortFunctions() { - return _sortFunctionTypes; - } - - public List sortFields() { - return _sortFields; - } - - /** - * Output all fields (i.e. has * in out fields) - * - * @return - */ - public boolean isOutputAll(){ return _outputAll;} - public Map getOutputAlias(){ - return _outputAlias; - } + } else { + final String errMsg = "Entity " + ed.getEntityClass().getSimpleName() + " defined partition, " + + "but query doesn't provide partition condition! Query: " + qy; + LOG.error(errMsg); + throw new IllegalArgumentException(errMsg); + } + } + EagleQueryParser parser = new EagleQueryParser(qy); + queryExpression = parser.parse(); + + //TODO: build customize filter for EXP{} + HBaseFilterBuilder builder = new HBaseFilterBuilder(ed, queryExpression, filterIfMissing); + FilterList flist = builder.buildFilters(); + partitionValues = builder.getPartitionValues(); + filterFields = builder.getFilterFields(); + return flist; + } catch (InstantiationException e) { + final String errMsg = "Got an InstantiationException: " + e.getMessage(); + throw new EagleQueryParseException(serviceName + " entity does not exist! " + errMsg); + } catch (IllegalAccessException e) { + final String errMsg = "Got an IllegalAccessException: " + e.getMessage(); + throw new EagleQueryParseException(serviceName + " entity does not exist! " + errMsg); + } + } + + public String serviceName() { + return serviceName; + } + + public List outputFields() { + return outputFields; + } + + public Filter filter() { + return filter; + } + + public List groupbyFields() { + return groupbyFields; + } + + public List aggregateFunctionTypes() { + return aggregateFunctionTypes; + } + + public List aggregateFields() { + return aggregateFields; + } + + public List sortOptions() { + return sortOptions; + } + + public List sortFunctions() { + return sortFunctionTypes; + } + + public List sortFields() { + return sortFields; + } + + /** + * Output all fields (i.e. has * in out fields) + * + * @return + */ + public boolean isOutputAll() { + return outputAll; + } + + public Map getOutputAlias() { + return outputAlias; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/QueryConstants.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/QueryConstants.java index 231cc99b44..fbb0645e97 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/QueryConstants.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/QueryConstants.java @@ -16,9 +16,7 @@ */ package org.apache.eagle.query; -/** - * @since 3/25/15 - */ + public class QueryConstants { - public final static String CHARSET ="UTF-8"; + public static final String CHARSET = "UTF-8"; } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateAPIEntity.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateAPIEntity.java index 1f3214ff5c..846100b3bf 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateAPIEntity.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateAPIEntity.java @@ -16,54 +16,63 @@ */ package org.apache.eagle.query.aggregate; +import org.codehaus.jackson.annotate.JsonProperty; +import org.codehaus.jackson.map.annotate.JsonSerialize; + import java.util.ArrayList; import java.util.List; import java.util.SortedMap; import java.util.TreeMap; -import org.codehaus.jackson.annotate.JsonProperty; -import org.codehaus.jackson.map.annotate.JsonSerialize; - -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) public class AggregateAPIEntity { - private long numDirectDescendants; - private long numTotalDescendants; - private String key; - private SortedMap entityList = new TreeMap(); - private List sortedList = new ArrayList(); + private long numDirectDescendants; + private long numTotalDescendants; + private String key; + private SortedMap entityList = new TreeMap(); + private List sortedList = new ArrayList(); + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + @JsonProperty("sL") + public List getSortedList() { + return sortedList; + } + + public void setSortedList(List sortedList) { + this.sortedList = sortedList; + } + + @JsonProperty("eL") + public SortedMap getEntityList() { + return entityList; + } + + public void setEntityList(SortedMap entityList) { + this.entityList = entityList; + } + + @JsonProperty("nDD") + public long getNumDirectDescendants() { + return numDirectDescendants; + } + + public void setNumDirectDescendants(long numDirectDescendants) { + this.numDirectDescendants = numDirectDescendants; + } + + @JsonProperty("nTD") + public long getNumTotalDescendants() { + return numTotalDescendants; + } - public String getKey() { - return key; - } - public void setKey(String key) { - this.key = key; - } - @JsonProperty("sL") - public List getSortedList() { - return sortedList; - } - public void setSortedList(List sortedList) { - this.sortedList = sortedList; - } - @JsonProperty("eL") - public SortedMap getEntityList() { - return entityList; - } - public void setEntityList(SortedMap entityList) { - this.entityList = entityList; - } - @JsonProperty("nDD") - public long getNumDirectDescendants() { - return numDirectDescendants; - } - public void setNumDirectDescendants(long numDirectDescendants) { - this.numDirectDescendants = numDirectDescendants; - } - @JsonProperty("nTD") - public long getNumTotalDescendants() { - return numTotalDescendants; - } - public void setNumTotalDescendants(long numTotalDescendants) { - this.numTotalDescendants = numTotalDescendants; - } + public void setNumTotalDescendants(long numTotalDescendants) { + this.numTotalDescendants = numTotalDescendants; + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateAPIEntityFactory.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateAPIEntityFactory.java index 8e18b39658..be3415a8d9 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateAPIEntityFactory.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateAPIEntityFactory.java @@ -17,5 +17,5 @@ package org.apache.eagle.query.aggregate; public interface AggregateAPIEntityFactory { - public AggregateAPIEntity create(); + public AggregateAPIEntity create(); } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateCondition.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateCondition.java index 5555cfd033..b61adf6839 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateCondition.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateCondition.java @@ -19,55 +19,51 @@ import java.io.Serializable; import java.util.List; -/** - * - * @since : 11/7/14,2014 - */ -public class AggregateCondition implements Serializable{ - private static final long serialVersionUID = 1L; - private List groupbyFields; - private List aggregateFunctionTypes; - private List aggregateFields; - private boolean timeSeries; - private long intervalMS; +public class AggregateCondition implements Serializable { + private static final long serialVersionUID = 1L; + private List groupbyFields; + private List aggregateFunctionTypes; + private List aggregateFields; + private boolean timeSeries; + private long intervalMS; - public List getGroupbyFields() { - return groupbyFields; - } + public List getGroupbyFields() { + return groupbyFields; + } - public void setGroupbyFields(List groupbyFields) { - this.groupbyFields = groupbyFields; - } + public void setGroupbyFields(List groupbyFields) { + this.groupbyFields = groupbyFields; + } - public List getAggregateFunctionTypes() { - return aggregateFunctionTypes; - } + public List getAggregateFunctionTypes() { + return aggregateFunctionTypes; + } - public void setAggregateFunctionTypes(List aggregateFunctionTypes) { - this.aggregateFunctionTypes = aggregateFunctionTypes; - } + public void setAggregateFunctionTypes(List aggregateFunctionTypes) { + this.aggregateFunctionTypes = aggregateFunctionTypes; + } - public List getAggregateFields() { - return aggregateFields; - } + public List getAggregateFields() { + return aggregateFields; + } - public void setAggregateFields(List aggregateFields) { - this.aggregateFields = aggregateFields; - } + public void setAggregateFields(List aggregateFields) { + this.aggregateFields = aggregateFields; + } - public boolean isTimeSeries() { - return timeSeries; - } + public boolean isTimeSeries() { + return timeSeries; + } - public void setTimeSeries(boolean timeSeries) { - this.timeSeries = timeSeries; - } + public void setTimeSeries(boolean timeSeries) { + this.timeSeries = timeSeries; + } - public long getIntervalMS() { - return intervalMS; - } + public long getIntervalMS() { + return intervalMS; + } - public void setIntervalMS(long intervalMS) { - this.intervalMS = intervalMS; - } + public void setIntervalMS(long intervalMS) { + this.intervalMS = intervalMS; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionNotSupportedException.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionNotSupportedException.java index df35c8ba72..09ce4b78d3 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionNotSupportedException.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionNotSupportedException.java @@ -16,13 +16,14 @@ */ package org.apache.eagle.query.aggregate; -public class AggregateFunctionNotSupportedException extends RuntimeException{ - static final long serialVersionUID = -4548788354899625887L; - public AggregateFunctionNotSupportedException(){ - super(); - } - - public AggregateFunctionNotSupportedException(String message){ - super(message); - } +public class AggregateFunctionNotSupportedException extends RuntimeException { + static final long serialVersionUID = -4548788354899625887L; + + public AggregateFunctionNotSupportedException() { + super(); + } + + public AggregateFunctionNotSupportedException(String message) { + super(message); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionType.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionType.java index 8ac3b8cbd0..b6e916030f 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionType.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionType.java @@ -21,65 +21,67 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -public enum AggregateFunctionType{ - count("^(count)$"), - sum("^sum\\((.*)\\)$"), - avg("^avg\\((.*)\\)$"), - max("^max\\((.*)\\)$"), - min("^min\\((.*)\\)$"); - - private Pattern pattern; - private AggregateFunctionType(String patternString){ - this.pattern = Pattern.compile(patternString); - } +public enum AggregateFunctionType { + count("^(count)$"), + sum("^sum\\((.*)\\)$"), + avg("^avg\\((.*)\\)$"), + max("^max\\((.*)\\)$"), + min("^min\\((.*)\\)$"); - /** - * This method is thread safe - * match and retrieve back the aggregated fields, for count, aggregateFields can be null - * @param function - * @return - */ - public AggregateFunctionTypeMatcher matcher(String function){ - Matcher m = pattern.matcher(function); + private Pattern pattern; - if(m.find()){ - return new AggregateFunctionTypeMatcher(this, true, m.group(1)); - }else{ - return new AggregateFunctionTypeMatcher(this, false, null); - } - } + private AggregateFunctionType(String patternString) { + this.pattern = Pattern.compile(patternString); + } - public static AggregateFunctionTypeMatcher matchAll(String function){ - for(AggregateFunctionType type : values()){ - Matcher m = type.pattern.matcher(function); - if(m.find()){ - return new AggregateFunctionTypeMatcher(type, true, m.group(1)); - } - } - return new AggregateFunctionTypeMatcher(null, false, null); - } + /** + * This method is thread safe + * match and retrieve back the aggregated fields, for count, aggregateFields can be null. + * + * @param function + * @return + */ + public AggregateFunctionTypeMatcher matcher(String function) { + Matcher m = pattern.matcher(function); - public static byte[] serialize(AggregateFunctionType type){ - return type.name().getBytes(); - } + if (m.find()) { + return new AggregateFunctionTypeMatcher(this, true, m.group(1)); + } else { + return new AggregateFunctionTypeMatcher(this, false, null); + } + } - public static AggregateFunctionType deserialize(byte[] type){ - return valueOf(new String(type)); - } + public static AggregateFunctionTypeMatcher matchAll(String function) { + for (AggregateFunctionType type : values()) { + Matcher m = type.pattern.matcher(function); + if (m.find()) { + return new AggregateFunctionTypeMatcher(type, true, m.group(1)); + } + } + return new AggregateFunctionTypeMatcher(null, false, null); + } - public static List toBytesList(List types){ - List result = new ArrayList(); - for(AggregateFunctionType type:types){ - result.add(serialize(type)); - } - return result; - } + public static byte[] serialize(AggregateFunctionType type) { + return type.name().getBytes(); + } - public static List fromBytesList(List types){ - List result = new ArrayList(); - for(byte[] bs:types){ - result.add(deserialize(bs)); - } - return result; - } + public static AggregateFunctionType deserialize(byte[] type) { + return valueOf(new String(type)); + } + + public static List toBytesList(List types) { + List result = new ArrayList(); + for (AggregateFunctionType type : types) { + result.add(serialize(type)); + } + return result; + } + + public static List fromBytesList(List types) { + List result = new ArrayList(); + for (byte[] bs : types) { + result.add(deserialize(bs)); + } + return result; + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionTypeMatcher.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionTypeMatcher.java index 6b2bc131a4..c82922786d 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionTypeMatcher.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateFunctionTypeMatcher.java @@ -17,25 +17,25 @@ package org.apache.eagle.query.aggregate; public class AggregateFunctionTypeMatcher { - private final AggregateFunctionType type; - private final boolean matched; - private final String field; + private final AggregateFunctionType type; + private final boolean matched; + private final String field; - public AggregateFunctionTypeMatcher(AggregateFunctionType type, boolean matched, String field){ - this.type = type; - this.matched = matched; - this.field = field; - } - - public boolean find(){ - return this.matched; - } - - public String field(){ - return this.field; - } - - public AggregateFunctionType type(){ - return this.type; - } + public AggregateFunctionTypeMatcher(AggregateFunctionType type, boolean matched, String field) { + this.type = type; + this.matched = matched; + this.field = field; + } + + public boolean find() { + return this.matched; + } + + public String field() { + return this.field; + } + + public AggregateFunctionType type() { + return this.type; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateParams.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateParams.java index 616184dc85..7790f1e8e9 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateParams.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateParams.java @@ -19,59 +19,70 @@ import java.util.ArrayList; import java.util.List; -public class AggregateParams{ - List groupbyFields; - boolean counting; - List sumFunctionFields = new ArrayList(); - List sortFieldOrders = new ArrayList(); - - public List getSortFieldOrders() { - return sortFieldOrders; - } - public void setSortFieldOrders(List sortFieldOrders) { - this.sortFieldOrders = sortFieldOrders; - } - public List getGroupbyFields() { - return groupbyFields; - } - public void setGroupbyFields(List groupbyFields) { - this.groupbyFields = groupbyFields; - } - public boolean isCounting() { - return counting; - } - public void setCounting(boolean counting) { - this.counting = counting; - } - public List getSumFunctionFields() { - return sumFunctionFields; - } - public void setSumFunctionFields(List sumFunctionFields) { - this.sumFunctionFields = sumFunctionFields; - } - - public static class SortFieldOrder{ - public static final String SORT_BY_AGGREGATE_KEY = "key"; - public static final String SORT_BY_COUNT = "count"; - private String field; - private boolean ascendant; - - public SortFieldOrder(String field, boolean ascendant) { - super(); - this.field = field; - this.ascendant = ascendant; - } - public String getField() { - return field; - } - public void setField(String field) { - this.field = field; - } - public boolean isAscendant() { - return ascendant; - } - public void setAscendant(boolean ascendant) { - this.ascendant = ascendant; - } - } +public class AggregateParams { + List groupbyFields; + boolean counting; + List sumFunctionFields = new ArrayList(); + List sortFieldOrders = new ArrayList(); + + public List getSortFieldOrders() { + return sortFieldOrders; + } + + public void setSortFieldOrders(List sortFieldOrders) { + this.sortFieldOrders = sortFieldOrders; + } + + public List getGroupbyFields() { + return groupbyFields; + } + + public void setGroupbyFields(List groupbyFields) { + this.groupbyFields = groupbyFields; + } + + public boolean isCounting() { + return counting; + } + + public void setCounting(boolean counting) { + this.counting = counting; + } + + public List getSumFunctionFields() { + return sumFunctionFields; + } + + public void setSumFunctionFields(List sumFunctionFields) { + this.sumFunctionFields = sumFunctionFields; + } + + public static class SortFieldOrder { + public static final String SORT_BY_AGGREGATE_KEY = "key"; + public static final String SORT_BY_COUNT = "count"; + private String field; + private boolean ascendant; + + public SortFieldOrder(String field, boolean ascendant) { + super(); + this.field = field; + this.ascendant = ascendant; + } + + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public boolean isAscendant() { + return ascendant; + } + + public void setAscendant(boolean ascendant) { + this.ascendant = ascendant; + } + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateParamsValidator.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateParamsValidator.java index 95005745d4..bfbf347f9d 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateParamsValidator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateParamsValidator.java @@ -20,75 +20,76 @@ import java.util.List; public class AggregateParamsValidator { - /** - * This method handle the following sytle syntax - * sum(numConfiguredMapSlots), count group by cluster, rack - * 1. ensure that all gb fields must occur in outputField or outputTag - * 2. ensure that all summarized fields must occur in outputField, - * for example, for function=sum(numConfiguredMapSlots), numConfiguredMapSlots must occur in outputField - * 3. groupby should be pre-appended with a root groupby field - * @param outputTags - * @param outputFields - * @param groupbys - * @param functions - * @throws IllegalArgumentException - */ - public static AggregateParams compileAggregateParams(List outputTags, List outputFields, List groupbys, List functions, List sortFieldOrders) - throws IllegalArgumentException, AggregateFunctionNotSupportedException{ - AggregateParams aggParams = new AggregateParams(); - // ensure that all gb fields must occur in outputField or outputTag - for(String groupby : groupbys){ - if(!outputTags.contains(groupby) && !outputFields.contains(groupby)){ - throw new IllegalArgumentException(groupby + ", All gb fields should appear in outputField list or outputTag list"); - } - } - - // parse functions and ensure that all summarized fields must occur in outputField - for(String function : functions){ - AggregateFunctionTypeMatcher m = AggregateFunctionType.count.matcher(function); - if(m.find()){ - aggParams.setCounting(true); - continue; - } + /** + * This method handle the following sytle syntax + * sum(numConfiguredMapSlots), count group by cluster, rack + * 1. ensure that all gb fields must occur in outputField or outputTag + * 2. ensure that all summarized fields must occur in outputField, + * for example, for function=sum(numConfiguredMapSlots), numConfiguredMapSlots must occur in outputField + * 3. groupby should be pre-appended with a root groupby field + * + * @param outputTags + * @param outputFields + * @param groupbys + * @param functions + * @throws IllegalArgumentException + */ + public static AggregateParams compileAggregateParams(List outputTags, List outputFields, List groupbys, List functions, List sortFieldOrders) + throws IllegalArgumentException, AggregateFunctionNotSupportedException { + AggregateParams aggParams = new AggregateParams(); + // ensure that all gb fields must occur in outputField or outputTag + for (String groupby : groupbys) { + if (!outputTags.contains(groupby) && !outputFields.contains(groupby)) { + throw new IllegalArgumentException(groupby + ", All gb fields should appear in outputField list or outputTag list"); + } + } - m = AggregateFunctionType.sum.matcher(function); - if(m.find()){ - if(!outputFields.contains(m.field())){ - throw new IllegalArgumentException(m.field() + ", All summary function fields should appear in outputField list"); - } - aggParams.getSumFunctionFields().add(m.field()); - continue; - } - - throw new AggregateFunctionNotSupportedException("function " + function + " is not supported, only count, sum aggregate functions are now supported"); - } - - // groupby should be pre-appended with a root groupby field - List groupbyFields = new ArrayList(); - groupbyFields.add(Aggregator.GROUPBY_ROOT_FIELD_NAME); - groupbyFields.addAll(groupbys); - aggParams.setGroupbyFields(groupbyFields); + // parse functions and ensure that all summarized fields must occur in outputField + for (String function : functions) { + AggregateFunctionTypeMatcher m = AggregateFunctionType.count.matcher(function); + if (m.find()) { + aggParams.setCounting(true); + continue; + } - // check sort field orders - boolean byKeySorting = false; - for(String sortFieldOrder : sortFieldOrders){ - AggregateParams.SortFieldOrder sfo = SortFieldOrderType.matchAll(sortFieldOrder); - if(sfo == null){ - throw new IllegalArgumentException(sortFieldOrder + ", All sort field order should be =(asc|desc)"); - } - if(sfo.getField().equals(AggregateParams.SortFieldOrder.SORT_BY_AGGREGATE_KEY)){ - byKeySorting = true; - }else if(!sfo.getField().equals(AggregateParams.SortFieldOrder.SORT_BY_COUNT)){ - if(!groupbys.contains(sfo.getField()) && !aggParams.getSumFunctionFields().contains(sfo.getField())){ - throw new IllegalArgumentException(sortFieldOrder + ", All sort field order should appear in gb or function fields"); - } - } - aggParams.getSortFieldOrders().add(sfo); - } - // always add key ascendant to the last aggregation key if not specified - if(!byKeySorting){ - aggParams.getSortFieldOrders().add(new AggregateParams.SortFieldOrder(AggregateParams.SortFieldOrder.SORT_BY_AGGREGATE_KEY, true)); - } - return aggParams; - } + m = AggregateFunctionType.sum.matcher(function); + if (m.find()) { + if (!outputFields.contains(m.field())) { + throw new IllegalArgumentException(m.field() + ", All summary function fields should appear in outputField list"); + } + aggParams.getSumFunctionFields().add(m.field()); + continue; + } + + throw new AggregateFunctionNotSupportedException("function " + function + " is not supported, only count, sum aggregate functions are now supported"); + } + + // groupby should be pre-appended with a root groupby field + List groupbyFields = new ArrayList(); + groupbyFields.add(Aggregator.GROUPBY_ROOT_FIELD_NAME); + groupbyFields.addAll(groupbys); + aggParams.setGroupbyFields(groupbyFields); + + // check sort field orders + boolean byKeySorting = false; + for (String sortFieldOrder : sortFieldOrders) { + AggregateParams.SortFieldOrder sfo = SortFieldOrderType.matchAll(sortFieldOrder); + if (sfo == null) { + throw new IllegalArgumentException(sortFieldOrder + ", All sort field order should be =(asc|desc)"); + } + if (sfo.getField().equals(AggregateParams.SortFieldOrder.SORT_BY_AGGREGATE_KEY)) { + byKeySorting = true; + } else if (!sfo.getField().equals(AggregateParams.SortFieldOrder.SORT_BY_COUNT)) { + if (!groupbys.contains(sfo.getField()) && !aggParams.getSumFunctionFields().contains(sfo.getField())) { + throw new IllegalArgumentException(sortFieldOrder + ", All sort field order should appear in gb or function fields"); + } + } + aggParams.getSortFieldOrders().add(sfo); + } + // always add key ascendant to the last aggregation key if not specified + if (!byKeySorting) { + aggParams.getSortFieldOrders().add(new AggregateParams.SortFieldOrder(AggregateParams.SortFieldOrder.SORT_BY_AGGREGATE_KEY, true)); + } + return aggParams; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateResultAPIEntity.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateResultAPIEntity.java index c1c87d36ac..be27d1d8e2 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateResultAPIEntity.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/AggregateResultAPIEntity.java @@ -18,35 +18,42 @@ import org.codehaus.jackson.map.annotate.JsonSerialize; -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) public class AggregateResultAPIEntity { - private boolean success; - private String exception; - private long elapsedms; - private AggregateAPIEntity entity; - - public long getElapsedms() { - return elapsedms; - } - public void setElapsedms(long elapsedms) { - this.elapsedms = elapsedms; - } - public AggregateAPIEntity getEntity() { - return entity; - } - public void setEntity(AggregateAPIEntity entity) { - this.entity = entity; - } - public boolean isSuccess() { - return success; - } - public void setSuccess(boolean success) { - this.success = success; - } - public String getException() { - return exception; - } - public void setException(String exception) { - this.exception = exception; - } + private boolean success; + private String exception; + private long elapsedms; + private AggregateAPIEntity entity; + + public long getElapsedms() { + return elapsedms; + } + + public void setElapsedms(long elapsedms) { + this.elapsedms = elapsedms; + } + + public AggregateAPIEntity getEntity() { + return entity; + } + + public void setEntity(AggregateAPIEntity entity) { + this.entity = entity; + } + + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getException() { + return exception; + } + + public void setException(String exception) { + this.exception = exception; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/Aggregator.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/Aggregator.java index de911e50e8..fc0ffe135c 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/Aggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/Aggregator.java @@ -16,153 +16,157 @@ */ package org.apache.eagle.query.aggregate; +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.lang.reflect.Method; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +public class Aggregator { + private static final Logger LOG = LoggerFactory.getLogger(Aggregator.class); + public static final String GROUPBY_ROOT_FIELD_NAME = "site"; + public static final String GROUPBY_ROOT_FIELD_VALUE = "xyz"; + public static final String UNASSIGNED_GROUPBY_ROOT_FIELD_NAME = "unassigned"; -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; + private final AggregateAPIEntityFactory factory; + private final AggregateAPIEntity root; + private final List groupbys; + private final List sumFunctionFields; + private final boolean counting; + + public Aggregator(AggregateAPIEntityFactory factory, AggregateAPIEntity root, List groupbys, boolean counting, List sumFunctionFields) { + this.factory = factory; + this.root = root; + this.groupbys = groupbys; + this.sumFunctionFields = sumFunctionFields; + this.counting = counting; + } + + /** + * this locate result can be cached? we don't need check if it's TaggedLogAPIEntity each time when iterating entities. + * + * @param groupby + * @param obj + * @return + * @throws Exception + */ + private String locateGroupbyField(String groupby, TaggedLogAPIEntity obj) { + if (groupby.equals(GROUPBY_ROOT_FIELD_NAME)) { + return GROUPBY_ROOT_FIELD_VALUE; + } + // check tag first + String tagv = obj.getTags().get(groupby); + if (tagv != null) { + return tagv; + } + // check against pojo, or qualifierValues + String fn = groupby.substring(0, 1).toUpperCase() + groupby.substring(1, groupby.length()); + try { + Method getM = obj.getClass().getMethod("get" + fn); + Object value = getM.invoke(obj); + return (String) value; + } catch (Exception ex) { + LOG.warn(groupby + " field is in neither tags nor fields, " + ex.getMessage()); + return null; + } + } + + /** + * accumulate a list of entities. + * + * @param entities + * @throws Exception + */ + public void accumulateAll(List entities) throws Exception { + for (TaggedLogAPIEntity entity : entities) { + accumulate(entity); + } + } + + /** + * currently only group by tags + * groupbys' first item always is site, which is a reserved field. + */ + public void accumulate(TaggedLogAPIEntity entity) throws Exception { + AggregateAPIEntity current = root; + for (String groupby : groupbys) { + // TODO tagv is empty, so what to do? use a reserved field_name "unassigned" ? + // TODO we should support all Pojo with java bean style object + String tagv = locateGroupbyField(groupby, entity); + if (tagv == null || tagv.isEmpty()) { + tagv = UNASSIGNED_GROUPBY_ROOT_FIELD_NAME; + } + Map children = current.getEntityList(); + if (children.get(tagv) == null) { + children.put(tagv, factory.create()); + current.setNumDirectDescendants(current.getNumDirectDescendants() + 1); + } + AggregateAPIEntity child = children.get(tagv); + // go through all aggregate functions including count, summary etc. + if (counting) { + count(child); + } + for (String sumFunctionField : sumFunctionFields) { + sum(child, entity, sumFunctionField); + } + + current = child; + } + + } -public class Aggregator { - private static final Logger LOG = LoggerFactory.getLogger(Aggregator.class); - public static final String GROUPBY_ROOT_FIELD_NAME = "site"; - public static final String GROUPBY_ROOT_FIELD_VALUE = "xyz"; - public static final String UNASSIGNED_GROUPBY_ROOT_FIELD_NAME = "unassigned"; - - private final AggregateAPIEntityFactory factory; - private final AggregateAPIEntity root; - private final List groupbys; - private final List sumFunctionFields; - private final boolean counting; - - public Aggregator(AggregateAPIEntityFactory factory, AggregateAPIEntity root, List groupbys, boolean counting, List sumFunctionFields){ - this.factory = factory; - this.root = root; - this.groupbys = groupbys; - this.sumFunctionFields = sumFunctionFields; - this.counting = counting; - } - /** - * this locate result can be cached? we don't need check if it's TaggedLogAPIEntity each time when iterating entities - * @param groupby - * @param obj - * @return - * @throws Exception - */ - private String locateGroupbyField(String groupby, TaggedLogAPIEntity obj){ - if(groupby.equals(GROUPBY_ROOT_FIELD_NAME)){ - return GROUPBY_ROOT_FIELD_VALUE; - } - // check tag first - String tagv = obj.getTags().get(groupby); - if(tagv != null) - return tagv; - // check against pojo, or qualifierValues - String fn = groupby.substring(0,1).toUpperCase()+groupby.substring(1, groupby.length()); - try{ - Method getM = obj.getClass().getMethod("get"+fn); - Object value = getM.invoke(obj); - return (String)value; - }catch(Exception ex){ - LOG.warn(groupby + " field is in neither tags nor fields, " + ex.getMessage()); - return null; - } - } - - /** - * accumulate a list of entities - * @param entities - * @throws Exception - */ - public void accumulateAll(List entities) throws Exception{ - for(TaggedLogAPIEntity entity : entities){ - accumulate(entity); - } - } - - /** - * currently only group by tags - * groupbys' first item always is site, which is a reserved field - */ - public void accumulate(TaggedLogAPIEntity entity) throws Exception{ - AggregateAPIEntity current = root; - for(String groupby : groupbys){ - // TODO tagv is empty, so what to do? use a reserved field_name "unassigned" ? - // TODO we should support all Pojo with java bean style object - String tagv = locateGroupbyField(groupby, entity); - if(tagv == null || tagv.isEmpty()){ - tagv = UNASSIGNED_GROUPBY_ROOT_FIELD_NAME; - } - Map children = current.getEntityList(); - if(children.get(tagv) == null){ - children.put(tagv, factory.create()); - current.setNumDirectDescendants(current.getNumDirectDescendants()+1); - } - AggregateAPIEntity child = children.get(tagv); - // go through all aggregate functions including count, summary etc. - if(counting) - count(child); - for(String sumFunctionField : sumFunctionFields){ - sum(child, entity, sumFunctionField); - } - - current = child; - } - - } + /** + * use java bean specifications? + * reflection is not efficient, let us find out solutions. + */ + private void sum(Object targetObj, TaggedLogAPIEntity srcObj, String fieldName) throws Exception { + try { + String fn = fieldName.substring(0, 1).toUpperCase() + fieldName.substring(1, fieldName.length()); + Method srcGetMethod = srcObj.getClass().getMethod("get" + fn); + Object srcValue = srcGetMethod.invoke(srcObj); + if (srcValue == null) { + return; // silently don't count this source object + } + Method targetGetMethod = targetObj.getClass().getMethod("get" + fn); + Object targetValue = targetGetMethod.invoke(targetObj); + if (targetValue instanceof Long) { + Method setM = targetObj.getClass().getMethod("set" + fn, long.class); + Long tmp1 = (Long) targetValue; + // TODO, now source object always have type "java.lang.String", later on we should support various type including integer type + Long tmp2 = null; + if (srcValue instanceof String) { + tmp2 = Long.valueOf((String) srcValue); + } else if (srcValue instanceof Long) { + tmp2 = (Long) srcValue; + } else { + throw new IllegalAggregateFieldTypeException(srcValue.getClass().toString() + " type is not support. The source type must be Long or String"); + } + setM.invoke(targetObj, tmp1.longValue() + tmp2.longValue()); + } else if (targetValue instanceof Double) { + Method setM = targetObj.getClass().getMethod("set" + fn, double.class); + Double tmp1 = (Double) targetValue; + String src = (String) srcValue; + Double tmp2 = Double.valueOf(src); + setM.invoke(targetObj, tmp1.doubleValue() + tmp2.doubleValue()); + } else { + throw new IllegalAggregateFieldTypeException(targetValue.getClass().toString() + " type is not support. The target type must be long or double"); + } + } catch (Exception ex) { + LOG.error("Cannot do sum aggregation for field " + fieldName, ex); + throw ex; + } + } - - /** - * use java bean specifications? - * reflection is not efficient, let us find out solutions - */ - private void sum(Object targetObj, TaggedLogAPIEntity srcObj, String fieldName) throws Exception{ - try{ - String fn = fieldName.substring(0,1).toUpperCase()+fieldName.substring(1, fieldName.length()); - Method srcGetMethod = srcObj.getClass().getMethod("get"+fn); - Object srcValue = srcGetMethod.invoke(srcObj); - if(srcValue == null){ - return; // silently don't count this source object - } - Method targetGetMethod = targetObj.getClass().getMethod("get"+fn); - Object targetValue = targetGetMethod.invoke(targetObj); - if(targetValue instanceof Long){ - Method setM = targetObj.getClass().getMethod("set"+fn, long.class); - Long tmp1 = (Long)targetValue; - // TODO, now source object always have type "java.lang.String", later on we should support various type including integer type - Long tmp2 = null; - if(srcValue instanceof String){ - tmp2 = Long.valueOf((String)srcValue); - }else if(srcValue instanceof Long){ - tmp2 = (Long)srcValue; - }else{ - throw new IllegalAggregateFieldTypeException(srcValue.getClass().toString() + " type is not support. The source type must be Long or String"); - } - setM.invoke(targetObj, tmp1.longValue()+tmp2.longValue()); - }else if(targetValue instanceof Double){ - Method setM = targetObj.getClass().getMethod("set"+fn, double.class); - Double tmp1 = (Double)targetValue; - String src = (String) srcValue; - Double tmp2 = Double.valueOf(src); - setM.invoke(targetObj, tmp1.doubleValue()+tmp2.doubleValue()); - }else{ - throw new IllegalAggregateFieldTypeException(targetValue.getClass().toString() + " type is not support. The target type must be long or double"); - } - }catch(Exception ex){ - LOG.error("Cannot do sum aggregation for field " + fieldName, ex); - throw ex; - } - } - - /** - * count possible not only count for number of descendants but also count for not-null fields - * @param targetObj - * @throws Exception - */ - private void count(AggregateAPIEntity targetObj) throws Exception{ - targetObj.setNumTotalDescendants(targetObj.getNumTotalDescendants()+1); - } + /** + * count possible not only count for number of descendants but also count for not-null fields. + * + * @param targetObj + * @throws Exception + */ + private void count(AggregateAPIEntity targetObj) throws Exception { + targetObj.setNumTotalDescendants(targetObj.getNumTotalDescendants() + 1); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/BucketQuery.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/BucketQuery.java index a00c5adc23..9ca7265fed 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/BucketQuery.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/BucketQuery.java @@ -16,69 +16,70 @@ */ package org.apache.eagle.query.aggregate; +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; + import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; - public class BucketQuery { - public final static String UNASSIGNED_BUCKET = "unassigned"; - private List bucketFields; - private int limit; - private Map root = new HashMap(); - - public BucketQuery(List bucketFields, int limit){ - this.bucketFields = bucketFields; - this.limit = limit; - } - - @SuppressWarnings("unchecked") - public void put(TaggedLogAPIEntity entity){ - Map current = root; - int bucketCount = bucketFields.size(); - if(bucketCount <= 0) - return; // silently return - int i = 0; - String bucketFieldValue = null; - for(; i()); - } - // for the last level of bucket, it is not Map, instead it is List - current = (Map)current.get(bucketFieldValue); - } - List bucketContent = (List)current.get(bucketFieldValue); - if(bucketContent == null){ - bucketContent = new ArrayList(); - current.put(bucketFieldValue, bucketContent); - } - - if(bucketContent.size() >= limit){ - return; - }else{ - bucketContent.add(entity); - } - } - - public void batchPut(List entities){ - for(TaggedLogAPIEntity entity : entities){ - put(entity); - } - } - - public Map get(){ - return root; - } + public static final String UNASSIGNED_BUCKET = "unassigned"; + private List bucketFields; + private int limit; + private Map root = new HashMap(); + + public BucketQuery(List bucketFields, int limit) { + this.bucketFields = bucketFields; + this.limit = limit; + } + + @SuppressWarnings("unchecked") + public void put(TaggedLogAPIEntity entity) { + Map current = root; + int bucketCount = bucketFields.size(); + if (bucketCount <= 0) { + return; // silently return + } + int i = 0; + String bucketFieldValue = null; + for (; i < bucketCount; i++) { + String bucketField = bucketFields.get(i); + bucketFieldValue = entity.getTags().get(bucketField); + if (bucketFieldValue == null || bucketFieldValue.isEmpty()) { + bucketFieldValue = UNASSIGNED_BUCKET; + } + // for last bucket, bypass the following logic + if (i == bucketCount - 1) { + break; + } + + if (current.get(bucketFieldValue) == null) { + current.put(bucketFieldValue, new HashMap()); + } + // for the last level of bucket, it is not Map, instead it is List + current = (Map) current.get(bucketFieldValue); + } + List bucketContent = (List) current.get(bucketFieldValue); + if (bucketContent == null) { + bucketContent = new ArrayList(); + current.put(bucketFieldValue, bucketContent); + } + + if (bucketContent.size() >= limit) { + return; + } else { + bucketContent.add(entity); + } + } + + public void batchPut(List entities) { + for (TaggedLogAPIEntity entity : entities) { + put(entity); + } + } + + public Map get() { + return root; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/IllegalAggregateFieldTypeException.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/IllegalAggregateFieldTypeException.java index 3e3e739eab..05f7fb8ea3 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/IllegalAggregateFieldTypeException.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/IllegalAggregateFieldTypeException.java @@ -16,13 +16,14 @@ */ package org.apache.eagle.query.aggregate; -public class IllegalAggregateFieldTypeException extends RuntimeException{ - static final long serialVersionUID = -4548788354899625887L; - public IllegalAggregateFieldTypeException(){ - super(); - } - - public IllegalAggregateFieldTypeException(String message){ - super(message + ", only count and sum are support"); - } +public class IllegalAggregateFieldTypeException extends RuntimeException { + static final long serialVersionUID = -4548788354899625887L; + + public IllegalAggregateFieldTypeException() { + super(); + } + + public IllegalAggregateFieldTypeException(String message) { + super(message + ", only count and sum are support"); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/PostAggregateSorting.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/PostAggregateSorting.java index b8012552f3..ef1991c74b 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/PostAggregateSorting.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/PostAggregateSorting.java @@ -16,86 +16,87 @@ */ package org.apache.eagle.query.aggregate; -import java.lang.reflect.Method; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.SortedSet; -import java.util.TreeSet; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.lang.reflect.Method; +import java.util.*; + public class PostAggregateSorting { - private static final Logger LOG = LoggerFactory.getLogger(PostAggregateSorting.class); - - private static SortedSet> sortByValue(Map map, List sortedFields) { - SortedSet> sortedEntries = new TreeSet>(new MapKeyValueComparator(sortedFields)); - sortedEntries.addAll(map.entrySet()); - return sortedEntries; - } + private static final Logger LOG = LoggerFactory.getLogger(PostAggregateSorting.class); + + private static SortedSet> sortByValue(Map map, List sortedFields) { + SortedSet> sortedEntries = new TreeSet>(new MapKeyValueComparator(sortedFields)); + sortedEntries.addAll(map.entrySet()); + return sortedEntries; + } - /** - * recursively populate sorted list from entity list - * @param entity - */ - public static void sort(AggregateAPIEntity entity, List sortFieldOrders){ - // sort should internally add key field to AggregateAPIEntity before the sorting starts as "key" could be sorted against - Map children = entity.getEntityList(); - for(Map.Entry e : children.entrySet()){ - e.getValue().setKey(e.getKey()); - } - SortedSet> set = sortByValue(children, sortFieldOrders); - for(Map.Entry entry : set){ - entity.getSortedList().add(entry.getValue()); - } - for(Map.Entry entry : entity.getEntityList().entrySet()){ - sort(entry.getValue(), sortFieldOrders); - } - entity.setEntityList(null); - } + /** + * recursively populate sorted list from entity list. + * + * @param entity + */ + public static void sort(AggregateAPIEntity entity, List sortFieldOrders) { + // sort should internally add key field to AggregateAPIEntity before the sorting starts as "key" could be sorted against + Map children = entity.getEntityList(); + for (Map.Entry e : children.entrySet()) { + e.getValue().setKey(e.getKey()); + } + SortedSet> set = sortByValue(children, sortFieldOrders); + for (Map.Entry entry : set) { + entity.getSortedList().add(entry.getValue()); + } + for (Map.Entry entry : entity.getEntityList().entrySet()) { + sort(entry.getValue(), sortFieldOrders); + } + entity.setEntityList(null); + } + + private static class MapKeyValueComparator implements Comparator> { + private List sortedFieldOrders; + + public MapKeyValueComparator(List sortedFields) { + this.sortedFieldOrders = sortedFields; + } - private static class MapKeyValueComparator implements Comparator>{ - private List sortedFieldOrders; - public MapKeyValueComparator(List sortedFields){ - this.sortedFieldOrders = sortedFields; - } - @Override - public int compare(Map.Entry e1, Map.Entry e2){ - int r = 0; - AggregateAPIEntity entity1 = e1.getValue(); - AggregateAPIEntity entity2 = e2.getValue(); - for(AggregateParams.SortFieldOrder sortFieldOrder : sortedFieldOrders){ - // TODO count should not be literal, compare numTotalDescendants - if(sortFieldOrder.getField().equals(AggregateParams.SortFieldOrder.SORT_BY_COUNT)){ - long tmp = entity1.getNumTotalDescendants() - entity2.getNumTotalDescendants(); - r = (tmp == 0) ? 0 : ((tmp > 0) ? 1 : -1); - }else if(sortFieldOrder.getField().equals(AggregateParams.SortFieldOrder.SORT_BY_AGGREGATE_KEY)){ - r = entity1.getKey().compareTo(entity2.getKey()); - }else{ - try{ - String sortedField = sortFieldOrder.getField(); - String tmp1 = sortedField.substring(0, 1).toUpperCase()+sortedField.substring(1); - Method getMethod1 = entity1.getClass().getMethod("get"+tmp1); - Object r1 = getMethod1.invoke(entity1); - Long comp1 = (Long)r1; - String tmp2 = sortedField.substring(0, 1).toUpperCase()+sortedField.substring(1); - Method getMethod2 = entity2.getClass().getMethod("get"+tmp2); - Object r2 = getMethod2.invoke(entity2); - Long comp2 = (Long)r2; - r = comp1.compareTo(comp2); - }catch(Exception ex){ - LOG.error("Can not get corresponding field for sorting", ex); - r = 0; - } - } - if(r == 0) continue; - if(!sortFieldOrder.isAscendant()){ - r = -r; - } - return r; - } - return r; + @Override + public int compare(Map.Entry e1, Map.Entry e2) { + int r = 0; + AggregateAPIEntity entity1 = e1.getValue(); + AggregateAPIEntity entity2 = e2.getValue(); + for (AggregateParams.SortFieldOrder sortFieldOrder : sortedFieldOrders) { + // TODO count should not be literal, compare numTotalDescendants + if (sortFieldOrder.getField().equals(AggregateParams.SortFieldOrder.SORT_BY_COUNT)) { + long tmp = entity1.getNumTotalDescendants() - entity2.getNumTotalDescendants(); + r = (tmp == 0) ? 0 : ((tmp > 0) ? 1 : -1); + } else if (sortFieldOrder.getField().equals(AggregateParams.SortFieldOrder.SORT_BY_AGGREGATE_KEY)) { + r = entity1.getKey().compareTo(entity2.getKey()); + } else { + try { + String sortedField = sortFieldOrder.getField(); + String tmp1 = sortedField.substring(0, 1).toUpperCase() + sortedField.substring(1); + Method getMethod1 = entity1.getClass().getMethod("get" + tmp1); + Object r1 = getMethod1.invoke(entity1); + Long comp1 = (Long) r1; + String tmp2 = sortedField.substring(0, 1).toUpperCase() + sortedField.substring(1); + Method getMethod2 = entity2.getClass().getMethod("get" + tmp2); + Object r2 = getMethod2.invoke(entity2); + Long comp2 = (Long) r2; + r = comp1.compareTo(comp2); + } catch (Exception ex) { + LOG.error("Can not get corresponding field for sorting", ex); + r = 0; + } + } + if (r == 0) { + continue; + } + if (!sortFieldOrder.isAscendant()) { + r = -r; + } + return r; + } + return r; } - } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/SortFieldOrderType.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/SortFieldOrderType.java index 6d47c7f988..688b1e1283 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/SortFieldOrderType.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/SortFieldOrderType.java @@ -20,40 +20,43 @@ import java.util.regex.Pattern; public enum SortFieldOrderType { - key("^(key)=(asc|desc)$"), - count("^(count)=(asc|desc)$"), - sum("^sum\\((.*)\\)=(asc|desc)$"), - avg("^avg\\((.*)\\)(asc|desc)$"), - max("^max\\((.*)\\)(asc|desc)$"), - min("^min\\((.*)\\)(asc|desc)$"); - - private Pattern pattern; - private SortFieldOrderType(String patternString){ - this.pattern = Pattern.compile(patternString); - } + key("^(key)=(asc|desc)$"), + count("^(count)=(asc|desc)$"), + sum("^sum\\((.*)\\)=(asc|desc)$"), + avg("^avg\\((.*)\\)(asc|desc)$"), + max("^max\\((.*)\\)(asc|desc)$"), + min("^min\\((.*)\\)(asc|desc)$"); - /** - * This method is thread safe - * match and retrieve back the aggregated fields, for count, aggregateFields can be null - * @param sortFieldOrder - * @return - */ - public SortFieldOrderTypeMatcher matcher(String sortFieldOrder){ - Matcher m = pattern.matcher(sortFieldOrder); - - if(m.find()){ - return new SortFieldOrderTypeMatcher(true, m.group(1), m.group(2)); - }else{ - return new SortFieldOrderTypeMatcher(false, null, null); - } - } - - public static AggregateParams.SortFieldOrder matchAll(String sortFieldOrder){ - for(SortFieldOrderType type : SortFieldOrderType.values()){ - SortFieldOrderTypeMatcher m = type.matcher(sortFieldOrder); - if(m.find()) - return m.sortFieldOrder(); - } - return null; - } + private Pattern pattern; + + private SortFieldOrderType(String patternString) { + this.pattern = Pattern.compile(patternString); + } + + /** + * This method is thread safe + * match and retrieve back the aggregated fields, for count, aggregateFields can be null. + * + * @param sortFieldOrder + * @return + */ + public SortFieldOrderTypeMatcher matcher(String sortFieldOrder) { + Matcher m = pattern.matcher(sortFieldOrder); + + if (m.find()) { + return new SortFieldOrderTypeMatcher(true, m.group(1), m.group(2)); + } else { + return new SortFieldOrderTypeMatcher(false, null, null); + } + } + + public static AggregateParams.SortFieldOrder matchAll(String sortFieldOrder) { + for (SortFieldOrderType type : SortFieldOrderType.values()) { + SortFieldOrderTypeMatcher m = type.matcher(sortFieldOrder); + if (m.find()) { + return m.sortFieldOrder(); + } + } + return null; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/SortFieldOrderTypeMatcher.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/SortFieldOrderTypeMatcher.java index 0b4d4080a2..8ef5c28ba4 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/SortFieldOrderTypeMatcher.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/SortFieldOrderTypeMatcher.java @@ -18,21 +18,21 @@ public class SortFieldOrderTypeMatcher { - private boolean matched; - private AggregateParams.SortFieldOrder sortFieldOrder; + private boolean matched; + private AggregateParams.SortFieldOrder sortFieldOrder; - public SortFieldOrderTypeMatcher(boolean matched, String field, String order){ - this.matched = matched; - if(matched){ - this.sortFieldOrder = new AggregateParams.SortFieldOrder(field, order.equals("asc")); - } - } - - public boolean find(){ - return this.matched; - } - - public AggregateParams.SortFieldOrder sortFieldOrder(){ - return this.sortFieldOrder; - } + public SortFieldOrderTypeMatcher(boolean matched, String field, String order) { + this.matched = matched; + if (matched) { + this.sortFieldOrder = new AggregateParams.SortFieldOrder(field, order.equals("asc")); + } + } + + public boolean find() { + return this.matched; + } + + public AggregateParams.SortFieldOrder sortFieldOrder() { + return this.sortFieldOrder; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/Function.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/Function.java index 83c683c271..b4cbc75ae7 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/Function.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/Function.java @@ -16,91 +16,110 @@ */ package org.apache.eagle.query.aggregate.raw; -public abstract class Function{ - private int count = 0; - protected void incrCount(int num){ count += num; } - public int count(){ return count; } - public abstract void run(double v,int count); - public void run(double v){ run(v,1); } - public abstract double result(); - - public static class Avg extends Function { - private double total; - public Avg(){ - this.total = 0.0; - } - @Override - public void run(double v,int count){ - this.incrCount(count); - total += v; - } - @Override - public double result(){ - return this.total/this.count(); - } - } - - public static class Max extends Function { - private double maximum; - public Max(){ - // TODO is this a bug, or only positive numeric calculation is supported - this.maximum = 0.0; - } - - @Override - public void run(double v,int count){ - this.incrCount(count); - if(v > maximum){ - maximum = v; - } - } - - @Override - public double result(){ - return maximum; - } - } - - public static class Min extends Function { - private double minimum; - public Min(){ - // TODO is this a bug, or only positive numeric calculation is supported - this.minimum = Double.MAX_VALUE; - } - @Override - public void run(double v,int count){ - this.incrCount(count); - if(v < minimum){ - minimum = v; - } - } - - @Override - public double result(){ - return minimum; - } - } - - public static class Sum extends Function { - private double summary; - public Sum(){ - this.summary = 0.0; - } - @Override - public void run(double v,int count){ - this.incrCount(count); - this.summary += v; - } - - @Override - public double result(){ - return this.summary; - } - } - - public static class Count extends Sum{ - public Count(){ - super(); - } - } +public abstract class Function { + private int count = 0; + + protected void incrCount(int num) { + count += num; + } + + public int count() { + return count; + } + + public abstract void run(double v, int count); + + public void run(double v) { + run(v, 1); + } + + public abstract double result(); + + public static class Avg extends Function { + private double total; + + public Avg() { + this.total = 0.0; + } + + @Override + public void run(double v, int count) { + this.incrCount(count); + total += v; + } + + @Override + public double result() { + return this.total / this.count(); + } + } + + public static class Max extends Function { + private double maximum; + + public Max() { + // TODO is this a bug, or only positive numeric calculation is supported + this.maximum = 0.0; + } + + @Override + public void run(double v, int count) { + this.incrCount(count); + if (v > maximum) { + maximum = v; + } + } + + @Override + public double result() { + return maximum; + } + } + + public static class Min extends Function { + private double minimum; + + public Min() { + // TODO is this a bug, or only positive numeric calculation is supported + this.minimum = Double.MAX_VALUE; + } + + @Override + public void run(double v, int count) { + this.incrCount(count); + if (v < minimum) { + minimum = v; + } + } + + @Override + public double result() { + return minimum; + } + } + + public static class Sum extends Function { + private double summary; + + public Sum() { + this.summary = 0.0; + } + + @Override + public void run(double v, int count) { + this.incrCount(count); + this.summary += v; + } + + @Override + public double result() { + return this.summary; + } + } + + public static class Count extends Sum { + public Count() { + super(); + } + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/FunctionFactory.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/FunctionFactory.java index c6d186133a..e735018b64 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/FunctionFactory.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/FunctionFactory.java @@ -21,55 +21,55 @@ import java.util.HashMap; import java.util.Map; -public abstract class FunctionFactory{ - public abstract Function createFunction(); +public abstract class FunctionFactory { + public abstract Function createFunction(); - public static class AvgFactory extends FunctionFactory { - @Override - public Function createFunction(){ - return new Function.Avg(); - } - } + public static class AvgFactory extends FunctionFactory { + @Override + public Function createFunction() { + return new Function.Avg(); + } + } - public static class MaxFactory extends FunctionFactory { - @Override - public Function createFunction(){ - return new Function.Max(); - } - } + public static class MaxFactory extends FunctionFactory { + @Override + public Function createFunction() { + return new Function.Max(); + } + } - public static class MinFactory extends FunctionFactory { - @Override - public Function createFunction(){ - return new Function.Min(); - } - } + public static class MinFactory extends FunctionFactory { + @Override + public Function createFunction() { + return new Function.Min(); + } + } - public static class CountFactory extends FunctionFactory { - @Override - public Function createFunction(){ - return new Function.Count(); - } - } + public static class CountFactory extends FunctionFactory { + @Override + public Function createFunction() { + return new Function.Count(); + } + } - public static class SumFactory extends FunctionFactory { - @Override - public Function createFunction(){ - return new Function.Sum(); - } - } + public static class SumFactory extends FunctionFactory { + @Override + public Function createFunction() { + return new Function.Sum(); + } + } - public static FunctionFactory locateFunctionFactory(AggregateFunctionType funcType){ - return _functionFactories.get(funcType.name()); - } + public static FunctionFactory locateFunctionFactory(AggregateFunctionType funcType) { + return _functionFactories.get(funcType.name()); + } - private static Map _functionFactories = new HashMap(); - static{ - _functionFactories.put(AggregateFunctionType.count.name(), new CountFactory()); - _functionFactories.put(AggregateFunctionType.sum.name(), new SumFactory()); - _functionFactories.put(AggregateFunctionType.min.name(), new MinFactory()); - _functionFactories.put(AggregateFunctionType.max.name(), new MaxFactory()); - _functionFactories.put(AggregateFunctionType.avg.name(), new AvgFactory()); - } + private static Map _functionFactories = new HashMap(); + + static { + _functionFactories.put(AggregateFunctionType.count.name(), new CountFactory()); + _functionFactories.put(AggregateFunctionType.sum.name(), new SumFactory()); + _functionFactories.put(AggregateFunctionType.min.name(), new MinFactory()); + _functionFactories.put(AggregateFunctionType.max.name(), new MaxFactory()); + _functionFactories.put(AggregateFunctionType.avg.name(), new AvgFactory()); + } } - \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKey.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKey.java index c8ed2602af..07bcd89193 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKey.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKey.java @@ -17,7 +17,6 @@ package org.apache.eagle.query.aggregate.raw; import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.io.ByteWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; @@ -38,112 +37,116 @@ * */ public class GroupbyKey implements Writable { - private final WritableList value; + private final WritableList value; - public void addValue(byte[] value){ - this.value.add(new BytesWritable(value)); - } - public void addAll(List list){ - this.value.addAll(list); - } + public void addValue(byte[] value) { + this.value.add(new BytesWritable(value)); + } - public List getValue(){ - return value; - } + public void addAll(List list) { + this.value.addAll(list); + } - /** - * empty constructor - */ - public GroupbyKey(){ - this.value = new WritableList(BytesWritable.class); - } + public List getValue() { + return value; + } - /** - * clear for reuse - */ - public void clear(){ - value.clear(); - } + /** + * empty constructor. + */ + public GroupbyKey() { + this.value = new WritableList(BytesWritable.class); + } - /** - * copy constructor - * @param key - */ - public GroupbyKey(GroupbyKey key){ - this(); - ListIterator it = key.value.listIterator(); -// ListIterator it = key.value.listIterator(); - while(it.hasNext()){ - this.value.add(it.next()); - } - } + /** + * clear for reuse. + */ + public void clear() { + value.clear(); + } - public GroupbyKey(List bytes){ - this(); - for(byte[] bt:bytes){ - this.addValue(bt); - } - } + /** + * copy constructor. + * + * @param key + */ + public GroupbyKey(GroupbyKey key) { + this(); + ListIterator it = key.value.listIterator(); + // ListIterator it = key.value.listIterator(); + while (it.hasNext()) { + this.value.add(it.next()); + } + } - @Override - public boolean equals(Object obj){ - if(obj == this) - return true; - if(!(obj instanceof GroupbyKey)){ - return false; - } - GroupbyKey that = (GroupbyKey)obj; - ListIterator e1 = this.value.listIterator(); - ListIterator e2 = that.value.listIterator(); - while(e1.hasNext() && e2.hasNext()){ - if(!Arrays.equals(e1.next().getBytes(), e2.next().getBytes())) - return false; - } - return !(e1.hasNext() || e2.hasNext()); - } + public GroupbyKey(List bytes) { + this(); + for (byte[] bt : bytes) { + this.addValue(bt); + } + } - @Override - public String toString() { - List items = new ArrayList<>(this.value.size()); - ListIterator iterator = this.value.listIterator(); - while(iterator.hasNext()){ - items.add(iterator.next().toString()); - } - return String.format("%s(%s)",this.getClass().getSimpleName(),StringUtils.join(items,",")); - } + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof GroupbyKey)) { + return false; + } + GroupbyKey that = (GroupbyKey) obj; + ListIterator e1 = this.value.listIterator(); + ListIterator e2 = that.value.listIterator(); + while (e1.hasNext() && e2.hasNext()) { + if (!Arrays.equals(e1.next().getBytes(), e2.next().getBytes())) { + return false; + } + } + return !(e1.hasNext() || e2.hasNext()); + } - @Override - public int hashCode(){ - ListIterator e1 = this.value.listIterator(); - int hash = 0xFFFFFFFF; - while(e1.hasNext()){ - hash ^= Arrays.hashCode(e1.next().getBytes()); - } - return hash; - } + @Override + public String toString() { + List items = new ArrayList<>(this.value.size()); + ListIterator iterator = this.value.listIterator(); + while (iterator.hasNext()) { + items.add(iterator.next().toString()); + } + return String.format("%s(%s)", this.getClass().getSimpleName(), StringUtils.join(items, ",")); + } - /** - * Serialize the fields of this object to out. - * - * @param out DataOuput to serialize this object into. - * @throws java.io.IOException - */ - @Override - public void write(DataOutput out) throws IOException { - this.value.write(out); - } + @Override + public int hashCode() { + ListIterator e1 = this.value.listIterator(); + int hash = 0xFFFFFFFF; + while (e1.hasNext()) { + hash ^= Arrays.hashCode(e1.next().getBytes()); + } + return hash; + } - /** - * Deserialize the fields of this object from in. - *

    - *

    For efficiency, implementations should attempt to re-use storage in the - * existing object where possible.

    - * - * @param in DataInput to deseriablize this object from. - * @throws java.io.IOException - */ - @Override - public void readFields(DataInput in) throws IOException { - this.value.readFields(in); - } + /** + * Serialize the fields of this object to out. + * + * @param out DataOuput to serialize this object into. + * @throws java.io.IOException + */ + @Override + public void write(DataOutput out) throws IOException { + this.value.write(out); + } + + /** + * Deserialize the fields of this object from in. + *

    + *

    For efficiency, implementations should attempt to re-use storage in the + * existing object where possible.

    + * + * @param in DataInput to deseriablize this object from. + * @throws java.io.IOException + */ + @Override + public void readFields(DataInput in) throws IOException { + this.value.readFields(in); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyAggregatable.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyAggregatable.java index 7e20029526..4e745a26ad 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyAggregatable.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyAggregatable.java @@ -19,21 +19,18 @@ import java.util.List; /** - * The generic interface to unify the GroupbyKeyValue-based results of different + * The generic interface to unify the GroupbyKeyValue-based results of different * business logic aggregates like RawAggregator or TimeSeriesAggregator * * @see org.apache.eagle.query.aggregate.timeseries.TimeSeriesAggregator * @see RawAggregator - * * @since : 11/3/14,2014 - * */ public interface GroupbyKeyAggregatable { - /** - * @see RawAggregator#getGroupbyKeyValues() - * @see org.apache.eagle.query.aggregate.timeseries.TimeSeriesAggregator#getGroupbyKeyValues() - * - * @return - */ - public List getGroupbyKeyValues(); + /** + * @return + * @see RawAggregator#getGroupbyKeyValues() + * @see org.apache.eagle.query.aggregate.timeseries.TimeSeriesAggregator#getGroupbyKeyValues() + */ + public List getGroupbyKeyValues(); } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyComparator.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyComparator.java index f976c8cb86..9178334b99 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyComparator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyComparator.java @@ -23,21 +23,23 @@ import java.util.List; import java.util.ListIterator; -public class GroupbyKeyComparator implements Comparator{ - @Override - public int compare(GroupbyKey key1, GroupbyKey key2){ - List list1 = key1.getValue(); - List list2 = key2.getValue(); - - if(list1 == null || list2 == null || list1.size() != list2.size()) - throw new IllegalArgumentException("2 list of groupby fields must be non-null and have the same size"); - ListIterator e1 = list1.listIterator(); - ListIterator e2 = list2.listIterator(); - while(e1.hasNext() && e2.hasNext()){ - int r = Bytes.compareTo(e1.next().copyBytes(), e2.next().copyBytes()); - if(r != 0) - return r; - } - return 0; - } +public class GroupbyKeyComparator implements Comparator { + @Override + public int compare(GroupbyKey key1, GroupbyKey key2) { + List list1 = key1.getValue(); + List list2 = key2.getValue(); + + if (list1 == null || list2 == null || list1.size() != list2.size()) { + throw new IllegalArgumentException("2 list of groupby fields must be non-null and have the same size"); + } + ListIterator e1 = list1.listIterator(); + ListIterator e2 = list2.listIterator(); + while (e1.hasNext() && e2.hasNext()) { + int r = Bytes.compareTo(e1.next().copyBytes(), e2.next().copyBytes()); + if (r != 0) { + return r; + } + } + return 0; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyValue.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyValue.java index 2256761105..f1a8f93b37 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyValue.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyValue.java @@ -23,69 +23,72 @@ import java.io.IOException; /** - *

    Groupby KeyValue Structure

    + *

    Groupby KeyValue Structure.

    *
      * {
      *  key: GroupbyKey
      *  value: GroupbyValue
      * }
      * 
    + * * @see GroupbyKey * @see GroupbyValue - * * @since : 11/4/14,2014 */ public class GroupbyKeyValue implements Writable { - private GroupbyKey key; - private GroupbyValue value; - public GroupbyKeyValue(){ - this.key = new GroupbyKey(); - this.value = new GroupbyValue(); - } - public GroupbyKeyValue(GroupbyKey key,GroupbyValue value){ - this.key = key; - this.value = value; - } - public GroupbyKey getKey() { - return key; - } + private GroupbyKey key; + private GroupbyValue value; + + public GroupbyKeyValue() { + this.key = new GroupbyKey(); + this.value = new GroupbyValue(); + } + + public GroupbyKeyValue(GroupbyKey key, GroupbyValue value) { + this.key = key; + this.value = value; + } + + public GroupbyKey getKey() { + return key; + } - public void setKey(GroupbyKey key) { - this.key = key; - } + public void setKey(GroupbyKey key) { + this.key = key; + } - public GroupbyValue getValue() { - return value; - } + public GroupbyValue getValue() { + return value; + } - public void setValue(GroupbyValue value) { - this.value = value; - } + public void setValue(GroupbyValue value) { + this.value = value; + } - /** - * Serialize the fields of this object to out. - * - * @param out DataOuput to serialize this object into. - * @throws java.io.IOException - */ - @Override - public void write(DataOutput out) throws IOException { - this.key.write(out); - this.value.write(out); - } + /** + * Serialize the fields of this object to out. + * + * @param out DataOuput to serialize this object into. + * @throws java.io.IOException + */ + @Override + public void write(DataOutput out) throws IOException { + this.key.write(out); + this.value.write(out); + } - /** - * Deserialize the fields of this object from in. - *

    - *

    For efficiency, implementations should attempt to re-use storage in the - * existing object where possible.

    - * - * @param in DataInput to deseriablize this object from. - * @throws java.io.IOException - */ - @Override - public void readFields(DataInput in) throws IOException { - this.key.readFields(in); - this.value.readFields(in); - } + /** + * Deserialize the fields of this object from in. + *

    + *

    For efficiency, implementations should attempt to re-use storage in the + * existing object where possible.

    + * + * @param in DataInput to deseriablize this object from. + * @throws java.io.IOException + */ + @Override + public void readFields(DataInput in) throws IOException { + this.key.readFields(in); + this.value.readFields(in); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyValueCreationListener.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyValueCreationListener.java index 6ca4becf45..3ffe4bb1b5 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyValueCreationListener.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyKeyValueCreationListener.java @@ -16,9 +16,6 @@ */ package org.apache.eagle.query.aggregate.raw; -/** - * @since : 11/11/14,2014 - */ public interface GroupbyKeyValueCreationListener { - void keyValueCreated(GroupbyKeyValue kv); + void keyValueCreated(GroupbyKeyValue kv); } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyValue.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyValue.java index b7f2c43a7f..ef001cfd04 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyValue.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/GroupbyValue.java @@ -37,103 +37,109 @@ * } * * - * TODO: Add self-described serializer or deserializer for meta bytes array, so that any side of the RPC will know how to read/write meta information + *

    TODO: Add self-described serializer or deserializer for meta bytes array, so that any side of the RPC will know how to read/write meta information * * @since : 11/4/14,2014 */ -public class GroupbyValue implements Writable{ - private final WritableList value; - private WritableList meta; - private int initialCapacity=1; - public GroupbyValue(){ - this(1); - } - /** - * Constructs an empty list with the specified initial capacity. - * - * @param initialCapacity the initial capacity of the list - * @exception IllegalArgumentException if the specified initial capacity - * is negative - */ - public GroupbyValue(int initialCapacity ){ - this.initialCapacity = initialCapacity; - this.value = new WritableList(DoubleWritable.class,this.initialCapacity); - this.meta = new WritableList(BytesWritable.class,this.initialCapacity); - } - - public WritableList getValue(){ - return this.value; - } - - public WritableList getMeta(){ - return this.meta; - } - - public DoubleWritable get(int index){ - return this.value.get(index); - } - - public BytesWritable getMeta(int index){ - if(this.meta==null) return null; - return this.meta.get(index); - } - - // Values - public void add(DoubleWritable value){ - this.value.add(value); - } - public void add(Double value){ - this.value.add(new DoubleWritable(value)); - } - - public void set(int index,DoubleWritable value){ - this.value.set(index, value); - } - - ////////////// - // Meta - ///////////// - public void addMeta(BytesWritable meta){ - this.meta.add(meta); - } - - public void addMeta(int meta){ - this.meta.add(new BytesWritable(ByteUtil.intToBytes(meta))); - } - - public void setMeta(int index,BytesWritable meta){ - this.meta.set(index,meta); - } - public void setMeta(int index,int meta){ - this.meta.set(index, new BytesWritable(ByteUtil.intToBytes(meta))); - } - - /** - * Serialize the fields of this object to out. - * - * @param out DataOuput to serialize this object into. - * @throws java.io.IOException - */ - @Override - public void write(DataOutput out) throws IOException { - out.writeInt(this.initialCapacity); - this.value.write(out); - this.meta.write(out); - } - - /** - * Deserialize the fields of this object from in. - *

    - *

    For efficiency, implementations should attempt to re-use storage in the - * existing object where possible.

    - * - * @param in DataInput to deseriablize this object from. - * @throws java.io.IOException - */ - @Override - public void readFields(DataInput in) throws IOException { - this.initialCapacity = in.readInt(); - this.value.readFields(in); - this.meta.readFields(in); - } +public class GroupbyValue implements Writable { + private final WritableList value; + private WritableList meta; + private int initialCapacity = 1; + + public GroupbyValue() { + this(1); + } + + /** + * Constructs an empty list with the specified initial capacity. + * + * @param initialCapacity the initial capacity of the list + * @throws IllegalArgumentException if the specified initial capacity + * is negative + */ + public GroupbyValue(int initialCapacity) { + this.initialCapacity = initialCapacity; + this.value = new WritableList(DoubleWritable.class, this.initialCapacity); + this.meta = new WritableList(BytesWritable.class, this.initialCapacity); + } + + public WritableList getValue() { + return this.value; + } + + public WritableList getMeta() { + return this.meta; + } + + public BytesWritable getMeta(int index) { + if (this.meta == null) { + return null; + } + return this.meta.get(index); + } + + public DoubleWritable get(int index) { + return this.value.get(index); + } + + // Values + public void add(DoubleWritable value) { + this.value.add(value); + } + + public void add(Double value) { + this.value.add(new DoubleWritable(value)); + } + + public void set(int index, DoubleWritable value) { + this.value.set(index, value); + } + + ////////////// + // Meta + ///////////// + public void addMeta(BytesWritable meta) { + this.meta.add(meta); + } + + public void addMeta(int meta) { + this.meta.add(new BytesWritable(ByteUtil.intToBytes(meta))); + } + + public void setMeta(int index, BytesWritable meta) { + this.meta.set(index, meta); + } + + public void setMeta(int index, int meta) { + this.meta.set(index, new BytesWritable(ByteUtil.intToBytes(meta))); + } + + /** + * Serialize the fields of this object to out. + * + * @param out DataOuput to serialize this object into. + * @throws java.io.IOException + */ + @Override + public void write(DataOutput out) throws IOException { + out.writeInt(this.initialCapacity); + this.value.write(out); + this.meta.write(out); + } + + /** + * Deserialize the fields of this object from in. + *

    + *

    For efficiency, implementations should attempt to re-use storage in the + * existing object where possible.

    + * + * @param in DataInput to deseriablize this object from. + * @throws java.io.IOException + */ + @Override + public void readFields(DataInput in) throws IOException { + this.initialCapacity = in.readInt(); + this.value.readFields(in); + this.meta.readFields(in); + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/RawAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/RawAggregator.java index 0468074adb..d96188ec39 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/RawAggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/RawAggregator.java @@ -24,48 +24,46 @@ import java.util.ListIterator; import java.util.Map; -public class RawAggregator implements QualifierCreationListener,GroupbyKeyAggregatable { - private List groupbyFields; - private GroupbyKey key; - private static final byte[] UNASSIGNED = "unassigned".getBytes(); - private RawGroupbyBucket bucket; +public class RawAggregator implements QualifierCreationListener, GroupbyKeyAggregatable { + private List groupbyFields; + private GroupbyKey key; + private static final byte[] UNASSIGNED = "unassigned".getBytes(); + private RawGroupbyBucket bucket; - public RawAggregator(List groupbyFields, List aggregateFunctionTypes, List aggregatedFields, EntityDefinition ed){ - this.groupbyFields = groupbyFields; - key = new GroupbyKey(); - bucket = new RawGroupbyBucket(aggregateFunctionTypes, aggregatedFields, ed); - } + public RawAggregator(List groupbyFields, List aggregateFunctionTypes, List aggregatedFields, EntityDefinition ed) { + this.groupbyFields = groupbyFields; + key = new GroupbyKey(); + bucket = new RawGroupbyBucket(aggregateFunctionTypes, aggregatedFields, ed); + } - @Override - public void qualifierCreated(Map qualifiers){ - key.clear(); - ListIterator it = groupbyFields.listIterator(); - while(it.hasNext()){ - byte[] groupbyFieldValue = qualifiers.get(it.next()); - if(groupbyFieldValue == null){ - key.addValue(UNASSIGNED); - }else{ - key.addValue(groupbyFieldValue); - } - } - GroupbyKey newKey = null; - if(bucket.exists(key)){ - newKey = key; - }else{ - newKey = new GroupbyKey(key); - } - - bucket.addDatapoint(newKey, qualifiers); - } + @Override + public void qualifierCreated(Map qualifiers) { + key.clear(); + ListIterator it = groupbyFields.listIterator(); + while (it.hasNext()) { + byte[] groupbyFieldValue = qualifiers.get(it.next()); + if (groupbyFieldValue == null) { + key.addValue(UNASSIGNED); + } else { + key.addValue(groupbyFieldValue); + } + } + GroupbyKey newKey = null; + if (bucket.exists(key)) { + newKey = key; + } else { + newKey = new GroupbyKey(key); + } - /** - * @return - */ - public Map, List> result(){ - return bucket.result(); - } + bucket.addDatapoint(newKey, qualifiers); + } - public List getGroupbyKeyValues(){ - return bucket.groupbyKeyValues(); - } + + public Map, List> result() { + return bucket.result(); + } + + public List getGroupbyKeyValues() { + return bucket.groupbyKeyValues(); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/RawGroupbyBucket.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/RawGroupbyBucket.java index 47b84a0af0..ad97780318 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/RawGroupbyBucket.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/RawGroupbyBucket.java @@ -30,167 +30,175 @@ import java.util.*; public class RawGroupbyBucket { - private final static Logger LOG = LoggerFactory.getLogger(RawGroupbyBucket.class); + private static final Logger LOG = LoggerFactory.getLogger(RawGroupbyBucket.class); - private List aggregatedFields; - private EntityDefinition entityDefinition; + private List aggregatedFields; + private EntityDefinition entityDefinition; - - private List types; - private SortedMap> group2FunctionMap = - new TreeMap>(new GroupbyKeyComparator()); - public RawGroupbyBucket(List types, List aggregatedFields, EntityDefinition ed){ - this.types = types; - this.aggregatedFields = aggregatedFields; - this.entityDefinition = ed; - } + private List types; + private SortedMap> group2FunctionMap = + new TreeMap>(new GroupbyKeyComparator()); - public boolean exists(GroupbyKey key){ - return group2FunctionMap.containsKey(key); - } + public RawGroupbyBucket(List types, List aggregatedFields, EntityDefinition ed) { + this.types = types; + this.aggregatedFields = aggregatedFields; + this.entityDefinition = ed; + } - public void addDatapoint(GroupbyKey groupbyKey, Map values){ - // locate groupby bucket - List functions = group2FunctionMap.get(groupbyKey); - if(functions == null){ - functions = new ArrayList(); - for(AggregateFunctionType type : types){ - FunctionFactory ff = FunctionFactory.locateFunctionFactory(type); - if(ff == null){ - LOG.error("FunctionFactory of AggregationFunctionType:"+type+" is null"); - }else{ - functions.add(ff.createFunction()); - } - } - group2FunctionMap.put(groupbyKey, functions); - } - ListIterator e1 = functions.listIterator(); - ListIterator e2 = aggregatedFields.listIterator(); - while(e1.hasNext() && e2.hasNext()){ - Function f = e1.next(); - String aggregatedField = e2.next(); - byte[] v = values.get(aggregatedField); - if(f instanceof Function.Count){ // handle count - if(entityDefinition.getMetricDefinition()==null) { - f.run(1.0); - continue; - }else if(v == null){ - aggregatedField = GenericMetricEntity.VALUE_FIELD; - v = values.get(aggregatedField); - } - } - if(v != null){ - Qualifier q = entityDefinition.getDisplayNameMap().get(aggregatedField); - EntitySerDeser serDeser = q.getSerDeser(); - // double d = 0.0; - if(serDeser instanceof IntSerDeser){ - double d= (Integer)serDeser.deserialize(v); - f.run(d); - }else if(serDeser instanceof LongSerDeser){ - double d = (Long)serDeser.deserialize(v); - f.run(d); - }else if(serDeser instanceof DoubleSerDeser){ - double d = (Double)serDeser.deserialize(v); - f.run(d); - // TODO: support numeric array type that is not metric - }else if(serDeser instanceof DoubleArraySerDeser){ - double[] d = ((DoubleArraySerDeser) serDeser).deserialize(v); - if(f instanceof Function.Count){ - f.run(d.length); - } else { - for(double i:d) f.run(i); - } - }else if(serDeser instanceof IntArraySerDeser){ - int[] d = ((IntArraySerDeser) serDeser).deserialize(v); - if(f instanceof Function.Count){ - f.run(d.length); - }else{ - for(int i:d) f.run(i); - } - }else{ - if(LOG.isDebugEnabled()) LOG.debug("EntitySerDeser of field "+aggregatedField+" is not IntSerDeser or LongSerDeser or DoubleSerDeser or IntArraySerDeser or DoubleArraySerDeser, default as 0.0"); - } - }else if(TokenConstant.isExpression(aggregatedField)){ - String expression = TokenConstant.parseExpressionContent(aggregatedField); - try { - Map doubleMap = EntityQualifierUtils.bytesMapToDoubleMap(values, entityDefinition); - if(entityDefinition.getMetricDefinition() == null) { - double value = ExpressionParser.eval(expression,doubleMap); - // LOG.info("DEBUG: Eval "+expression +" = "+value); - f.run(value); - }else{ - Qualifier qualifier = entityDefinition.getDisplayNameMap().get(GenericMetricEntity.VALUE_FIELD); - EntitySerDeser _serDeser = qualifier.getSerDeser(); - byte[] valueBytes = values.get(GenericMetricEntity.VALUE_FIELD); - if( _serDeser instanceof DoubleArraySerDeser){ - double[] d = (double[]) _serDeser.deserialize(valueBytes); - if(f instanceof Function.Count) { - f.run(d.length); - }else{ - for(double i:d){ - doubleMap.put(GenericMetricEntity.VALUE_FIELD,i); - f.run(ExpressionParser.eval(expression, doubleMap)); - } - } - }else if(_serDeser instanceof IntArraySerDeser){ - int[] d = (int[]) _serDeser.deserialize(valueBytes); - if(f instanceof Function.Count) { - f.run(d.length); - }else { - for (double i : d) { - doubleMap.put(GenericMetricEntity.VALUE_FIELD, i); - f.run(ExpressionParser.eval(expression, doubleMap)); - } - } - }else{ - double value = ExpressionParser.eval(expression,doubleMap); - f.run(value); - } - } - } catch (Exception e) { - LOG.error("Got exception to evaluate expression: "+expression+", exception: "+e.getMessage(),e); - } - } - } - } + public boolean exists(GroupbyKey key) { + return group2FunctionMap.containsKey(key); + } - /** - * expensive operation - create objects and format the result - * @return - */ - public List groupbyKeyValues(){ - List results = new ArrayList(); - for(Map.Entry> entry : this.group2FunctionMap.entrySet()){ - GroupbyValue value = new GroupbyValue(); - for(Function f : entry.getValue()){ - value.add(new DoubleWritable(f.result())); - value.addMeta(f.count()); - } - results.add(new GroupbyKeyValue(entry.getKey(),value)); - } - return results; - } + public void addDatapoint(GroupbyKey groupbyKey, Map values) { + // locate groupby bucket + List functions = group2FunctionMap.get(groupbyKey); + if (functions == null) { + functions = new ArrayList(); + for (AggregateFunctionType type : types) { + FunctionFactory ff = FunctionFactory.locateFunctionFactory(type); + if (ff == null) { + LOG.error("FunctionFactory of AggregationFunctionType:" + type + " is null"); + } else { + functions.add(ff.createFunction()); + } + } + group2FunctionMap.put(groupbyKey, functions); + } + ListIterator e1 = functions.listIterator(); + ListIterator e2 = aggregatedFields.listIterator(); + while (e1.hasNext() && e2.hasNext()) { + Function f = e1.next(); + String aggregatedField = e2.next(); + byte[] v = values.get(aggregatedField); + if (f instanceof Function.Count) { // handle count + if (entityDefinition.getMetricDefinition() == null) { + f.run(1.0); + continue; + } else if (v == null) { + aggregatedField = GenericMetricEntity.VALUE_FIELD; + v = values.get(aggregatedField); + } + } + if (v != null) { + Qualifier q = entityDefinition.getDisplayNameMap().get(aggregatedField); + EntitySerDeser serDeser = q.getSerDeser(); + // double d = 0.0; + if (serDeser instanceof IntSerDeser) { + double d = (Integer) serDeser.deserialize(v); + f.run(d); + } else if (serDeser instanceof LongSerDeser) { + double d = (Long) serDeser.deserialize(v); + f.run(d); + } else if (serDeser instanceof DoubleSerDeser) { + double d = (Double) serDeser.deserialize(v); + f.run(d); + // TODO: support numeric array type that is not metric + } else if (serDeser instanceof DoubleArraySerDeser) { + double[] d = ((DoubleArraySerDeser) serDeser).deserialize(v); + if (f instanceof Function.Count) { + f.run(d.length); + } else { + for (double i : d) { + f.run(i); + } + } + } else if (serDeser instanceof IntArraySerDeser) { + int[] d = ((IntArraySerDeser) serDeser).deserialize(v); + if (f instanceof Function.Count) { + f.run(d.length); + } else { + for (int i : d) { + f.run(i); + } + } + } else { + if (LOG.isDebugEnabled()) { + LOG.debug("EntitySerDeser of field " + aggregatedField + " is not IntSerDeser or LongSerDeser or DoubleSerDeser or IntArraySerDeser or DoubleArraySerDeser, default as 0.0"); + } + } + } else if (TokenConstant.isExpression(aggregatedField)) { + String expression = TokenConstant.parseExpressionContent(aggregatedField); + try { + Map doubleMap = EntityQualifierUtils.bytesMapToDoubleMap(values, entityDefinition); + if (entityDefinition.getMetricDefinition() == null) { + double value = ExpressionParser.eval(expression, doubleMap); + // LOG.info("DEBUG: Eval "+expression +" = "+value); + f.run(value); + } else { + Qualifier qualifier = entityDefinition.getDisplayNameMap().get(GenericMetricEntity.VALUE_FIELD); + EntitySerDeser _serDeser = qualifier.getSerDeser(); + byte[] valueBytes = values.get(GenericMetricEntity.VALUE_FIELD); + if (_serDeser instanceof DoubleArraySerDeser) { + double[] d = (double[]) _serDeser.deserialize(valueBytes); + if (f instanceof Function.Count) { + f.run(d.length); + } else { + for (double i : d) { + doubleMap.put(GenericMetricEntity.VALUE_FIELD, i); + f.run(ExpressionParser.eval(expression, doubleMap)); + } + } + } else if (_serDeser instanceof IntArraySerDeser) { + int[] d = (int[]) _serDeser.deserialize(valueBytes); + if (f instanceof Function.Count) { + f.run(d.length); + } else { + for (double i : d) { + doubleMap.put(GenericMetricEntity.VALUE_FIELD, i); + f.run(ExpressionParser.eval(expression, doubleMap)); + } + } + } else { + double value = ExpressionParser.eval(expression, doubleMap); + f.run(value); + } + } + } catch (Exception e) { + LOG.error("Got exception to evaluate expression: " + expression + ", exception: " + e.getMessage(), e); + } + } + } + } - /** - * expensive operation - create objects and format the result - * @return - */ - public Map, List> result(){ - Map, List> result = new HashMap, List>(); - for(Map.Entry> entry : this.group2FunctionMap.entrySet()){ - List values = new ArrayList(); - for(Function f : entry.getValue()){ - values.add(f.result()); - } - GroupbyKey key = entry.getKey(); - List list1 = key.getValue(); - List list2 = new ArrayList(); - for(BytesWritable e : list1){ - list2.add(new String(e.copyBytes())); - } - result.put(list2, values); - } - return result; - } + /** + * expensive operation - create objects and format the result. + * + * @return + */ + public List groupbyKeyValues() { + List results = new ArrayList(); + for (Map.Entry> entry : this.group2FunctionMap.entrySet()) { + GroupbyValue value = new GroupbyValue(); + for (Function f : entry.getValue()) { + value.add(new DoubleWritable(f.result())); + value.addMeta(f.count()); + } + results.add(new GroupbyKeyValue(entry.getKey(), value)); + } + return results; + } + + /** + * expensive operation - create objects and format the result. + * + * @return + */ + public Map, List> result() { + Map, List> result = new HashMap, List>(); + for (Map.Entry> entry : this.group2FunctionMap.entrySet()) { + List values = new ArrayList(); + for (Function f : entry.getValue()) { + values.add(f.result()); + } + GroupbyKey key = entry.getKey(); + List list1 = key.getValue(); + List list2 = new ArrayList(); + for (BytesWritable e : list1) { + list2.add(new String(e.copyBytes())); + } + result.put(list2, values); + } + return result; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/WritableList.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/WritableList.java index f9932a5c97..e314b9ea85 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/WritableList.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/raw/WritableList.java @@ -24,81 +24,79 @@ import java.lang.reflect.ParameterizedType; import java.util.ArrayList; -/** - * @since : 11/6/14,2014 - */ -public class WritableList extends ArrayList implements Writable{ - private Class itemTypeClass; - public WritableList(Class typeClass){ - this.itemTypeClass = typeClass; - } +public class WritableList extends ArrayList implements Writable { + private Class itemTypeClass; + + public WritableList(Class typeClass) { + this.itemTypeClass = typeClass; + } - public WritableList(Class typeClass,int initialCapacity){ - super(initialCapacity); - this.itemTypeClass = typeClass; - } + public WritableList(Class typeClass, int initialCapacity) { + super(initialCapacity); + this.itemTypeClass = typeClass; + } - /** - *

    Get item class by

    - *
    -	 * (Class) ((ParameterizedType)getClass().getGenericSuperclass()).getActualTypeArguments()[0];
    -	 * 
    - */ - @Deprecated - public WritableList(){ - this.itemTypeClass = (Class) ((ParameterizedType)getClass().getGenericSuperclass()).getActualTypeArguments()[0]; - } + /** + *

    Get item class by

    + *
    +     * (Class) ((ParameterizedType)getClass().getGenericSuperclass()).getActualTypeArguments()[0].
    +     * 
    + */ + @Deprecated + public WritableList() { + this.itemTypeClass = (Class) ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[0]; + } - private void check() throws IOException{ - if(this.itemTypeClass == null){ - throw new IOException("Class Type of WritableArrayList is null"); - } - } + private void check() throws IOException { + if (this.itemTypeClass == null) { + throw new IOException("Class Type of WritableArrayList is null"); + } + } - public Class getItemClass(){ - return itemTypeClass; - } + public Class getItemClass() { + return itemTypeClass; + } - /** - * Serialize the fields of this object to out. - * - * @param out DataOuput to serialize this object into. - * @throws java.io.IOException - */ - @Override - public void write(DataOutput out) throws IOException { - this.check(); - out.writeInt(this.size()); - for(Writable item: this){ - item.write(out); - } - } + /** + * Serialize the fields of this object to out. + * + * @param out DataOuput to serialize this object into. + * @throws java.io.IOException + */ + @Override + public void write(DataOutput out) throws IOException { + this.check(); + out.writeInt(this.size()); + for (Writable item : this) { + item.write(out); + } + } - /** - * Deserialize the fields of this object from in. - *

    - *

    For efficiency, implementations should attempt to re-use storage in the - * existing object where possible.

    - * - * @param in DataInput to deseriablize this object from. - * @throws java.io.IOException - */ - @Override - public void readFields(DataInput in) throws IOException { - this.check(); - int size = in.readInt(); - for(int i=0;iin. + *

    + *

    For efficiency, implementations should attempt to re-use storage in the + * existing object where possible.

    + * + * @param in DataInput to deseriablize this object from. + * @throws java.io.IOException + */ + @Override + public void readFields(DataInput in) throws IOException { + this.check(); + int size = in.readInt(); + for (int i = 0; i < size; i++) { + try { + E item = itemTypeClass.newInstance(); + item.readFields(in); + this.add(item); + } catch (InstantiationException e) { + throw new IOException("Got exception to create instance for class: " + itemTypeClass + ": " + e.getMessage(), e); + } catch (IllegalAccessException e) { + throw new IOException("Got exception to create instance for class: " + itemTypeClass + ": " + e.getMessage(), e); + } + } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/AbstractAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/AbstractAggregator.java index deb0838dee..2310070e28 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/AbstractAggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/AbstractAggregator.java @@ -16,13 +16,13 @@ */ package org.apache.eagle.query.aggregate.timeseries; +import org.apache.commons.beanutils.PropertyUtils; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.EntityCreationListener; import org.apache.eagle.log.expression.ExpressionParser; import org.apache.eagle.query.aggregate.AggregateFunctionType; import org.apache.eagle.query.aggregate.IllegalAggregateFieldTypeException; import org.apache.eagle.query.parser.TokenConstant; -import org.apache.commons.beanutils.PropertyUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,161 +32,164 @@ import java.util.ArrayList; import java.util.List; -public abstract class AbstractAggregator implements Aggregator, EntityCreationListener{ - private final static Logger LOG = LoggerFactory.getLogger(AbstractAggregator.class); +public abstract class AbstractAggregator implements Aggregator, EntityCreationListener { + private static final Logger LOG = LoggerFactory.getLogger(AbstractAggregator.class); + + private static final String UNASSIGNED = "unassigned"; + protected List groupbyFields; + protected List aggregateFunctionTypes; + protected List aggregatedFields; + // a cache to know immediately if groupby field should come from tags(true) or qualifiers(false) + private Boolean[] groupbyFieldPlacementCache; + private Method[] aggregateFieldReflectedMethodCache; + + public AbstractAggregator(List groupbyFields, List aggregateFuntionTypes, List aggregatedFields) { + this.groupbyFields = groupbyFields; + this.aggregateFunctionTypes = aggregateFuntionTypes; + this.aggregatedFields = aggregatedFields; + aggregateFieldReflectedMethodCache = new Method[this.aggregatedFields.size()]; + groupbyFieldPlacementCache = new Boolean[this.groupbyFields.size()]; + } + + @Override + public void entityCreated(TaggedLogAPIEntity entity) throws Exception { + accumulate(entity); + } + + public abstract Object result(); - private static final String UNASSIGNED = "unassigned"; - protected List groupbyFields; - protected List aggregateFunctionTypes; - protected List aggregatedFields; - // a cache to know immediately if groupby field should come from tags(true) or qualifiers(false) - private Boolean[] _groupbyFieldPlacementCache; - private Method[] _aggregateFieldReflectedMethodCache; + protected String createGroupFromTags(TaggedLogAPIEntity entity, String groupbyField, int i) { + String groupbyFieldValue = entity.getTags().get(groupbyField); + if (groupbyFieldValue != null) { + groupbyFieldPlacementCache[i] = true; + return groupbyFieldValue; + } + return null; + } - public AbstractAggregator(List groupbyFields, List aggregateFuntionTypes, List aggregatedFields){ - this.groupbyFields = groupbyFields; - this.aggregateFunctionTypes = aggregateFuntionTypes; - this.aggregatedFields = aggregatedFields; - _aggregateFieldReflectedMethodCache = new Method[this.aggregatedFields.size()]; - _groupbyFieldPlacementCache = new Boolean[this.groupbyFields.size()]; - } - - @Override - public void entityCreated(TaggedLogAPIEntity entity) throws Exception{ - accumulate(entity); - } - - public abstract Object result(); - - protected String createGroupFromTags(TaggedLogAPIEntity entity, String groupbyField, int i){ - String groupbyFieldValue = entity.getTags().get(groupbyField); - if(groupbyFieldValue != null){ - _groupbyFieldPlacementCache[i] = true; - return groupbyFieldValue; - } - return null; - } - - protected String createGroupFromQualifiers(TaggedLogAPIEntity entity, String groupbyField, int i){ - try{ - PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(entity, groupbyField); - if(pd == null) - return null; -// _groupbyFieldPlacementCache.put(groupbyField, false); - _groupbyFieldPlacementCache[i] = false; - return (String)(pd.getReadMethod().invoke(entity)); - }catch(NoSuchMethodException ex){ - return null; - }catch(InvocationTargetException ex){ - return null; - }catch(IllegalAccessException ex){ - return null; - } - } - - protected String determineGroupbyFieldValue(TaggedLogAPIEntity entity, String groupbyField, int i){ - Boolean placement = _groupbyFieldPlacementCache[i]; - String groupbyFieldValue = null; - if(placement != null){ - groupbyFieldValue = placement.booleanValue() ? createGroupFromTags(entity, groupbyField, i) : createGroupFromQualifiers(entity, groupbyField, i); - }else{ - groupbyFieldValue = createGroupFromTags(entity, groupbyField, i); - if(groupbyFieldValue == null){ - groupbyFieldValue = createGroupFromQualifiers(entity, groupbyField, i); - } - } - groupbyFieldValue = (groupbyFieldValue == null ? UNASSIGNED : groupbyFieldValue); - return groupbyFieldValue; - } - - /** - * TODO For count aggregation, special treatment is the value is always 0 unless we support count(*) or count() which counts number of rows or - * number of non-null field - * For other aggregation, like sum,min,max,avg, we should resort to qualifiers - * @param entity - * @return - */ - protected List createPreAggregatedValues(TaggedLogAPIEntity entity) throws Exception{ - List values = new ArrayList(); - int functionIndex = 0; - for(AggregateFunctionType type : aggregateFunctionTypes){ - if(type.name().equals(AggregateFunctionType.count.name())){ - values.add(new Double(1)); - }else{ - // find value in qualifier by checking java bean - String aggregatedField = aggregatedFields.get(functionIndex); - if(TokenConstant.isExpression(aggregatedField)){ - try { - String expr = TokenConstant.parseExpressionContent(aggregatedField); - values.add(ExpressionParser.eval(expr, entity)); - }catch (Exception ex){ - LOG.error("Failed to evaluate expression-based aggregation: " + aggregatedField, ex); - throw ex; - } - }else { - try { - Method m = _aggregateFieldReflectedMethodCache[functionIndex]; - if (m == null) { -// pd = PropertyUtils.getPropertyDescriptor(entity, aggregatedField); -// if (pd == null) { -// final String errMsg = "Field/tag " + aggregatedField + " is not defined for entity " + entity.getClass().getSimpleName(); -// logger.error(errMsg); -// throw new Exception(errMsg); -// } -// Object obj = pd.getReadMethod().invoke(entity); - String tmp = aggregatedField.substring(0, 1).toUpperCase() + aggregatedField.substring(1); - m = entity.getClass().getMethod("get" + tmp); - _aggregateFieldReflectedMethodCache[functionIndex] = m; - } - Object obj = m.invoke(entity); - values.add(numberToDouble(obj)); - } catch (Exception ex) { - LOG.error("Cannot do aggregation for field " + aggregatedField, ex); - throw ex; - } - } - } - functionIndex++; - } - return values; - } - - /** - * TODO this is a hack, we need elegant way to convert type to a broad precision + protected String createGroupFromQualifiers(TaggedLogAPIEntity entity, String groupbyField, int i) { + try { + PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(entity, groupbyField); + if (pd == null) { + return null; + } + // _groupbyFieldPlacementCache.put(groupbyField, false); + groupbyFieldPlacementCache[i] = false; + return (String) (pd.getReadMethod().invoke(entity)); + } catch (NoSuchMethodException ex) { + return null; + } catch (InvocationTargetException ex) { + return null; + } catch (IllegalAccessException ex) { + return null; + } + } + + protected String determineGroupbyFieldValue(TaggedLogAPIEntity entity, String groupbyField, int i) { + Boolean placement = groupbyFieldPlacementCache[i]; + String groupbyFieldValue = null; + if (placement != null) { + groupbyFieldValue = placement.booleanValue() ? createGroupFromTags(entity, groupbyField, i) : createGroupFromQualifiers(entity, groupbyField, i); + } else { + groupbyFieldValue = createGroupFromTags(entity, groupbyField, i); + if (groupbyFieldValue == null) { + groupbyFieldValue = createGroupFromQualifiers(entity, groupbyField, i); + } + } + groupbyFieldValue = (groupbyFieldValue == null ? UNASSIGNED : groupbyFieldValue); + return groupbyFieldValue; + } + + /** + * TODO For count aggregation, special treatment is the value is always 0 unless we support count(*) or count(fieldname) which counts number of rows or + * number of non-null field + * For other aggregation, like sum,min,max,avg, we should resort to qualifiers. * - * @param obj - * @return - */ - protected Double numberToDouble(Object obj){ - if(obj instanceof Double) - return (Double)obj; - if(obj instanceof Integer){ - return new Double(((Integer)obj).doubleValue()); - } - if(obj instanceof Long){ - return new Double(((Long)obj).doubleValue()); - } - // TODO hack to support string field for demo purpose, should be removed - if(obj == null){ - return new Double(0.0); - } - if(obj instanceof String){ - try{ - return new Double((String)obj); - }catch(Exception ex){ - LOG.warn("Datapoint ignored because it can not be converted to correct number for " + obj, ex); - return new Double(0.0); - } - } - if(obj instanceof double[]){ - double[] value = (double[]) obj; - if(value.length > 0){ - return new Double(value[0]); - }else{ - return new Double(0.0); - } - } - - throw new IllegalAggregateFieldTypeException(obj.getClass().toString() + " type is not support. The aggregated field must be numeric type, int, long or double"); - } + * @param entity + * @return + */ + protected List createPreAggregatedValues(TaggedLogAPIEntity entity) throws Exception { + List values = new ArrayList(); + int functionIndex = 0; + for (AggregateFunctionType type : aggregateFunctionTypes) { + if (type.name().equals(AggregateFunctionType.count.name())) { + values.add(new Double(1)); + } else { + // find value in qualifier by checking java bean + String aggregatedField = aggregatedFields.get(functionIndex); + if (TokenConstant.isExpression(aggregatedField)) { + try { + String expr = TokenConstant.parseExpressionContent(aggregatedField); + values.add(ExpressionParser.eval(expr, entity)); + } catch (Exception ex) { + LOG.error("Failed to evaluate expression-based aggregation: " + aggregatedField, ex); + throw ex; + } + } else { + try { + Method m = aggregateFieldReflectedMethodCache[functionIndex]; + if (m == null) { + // pd = PropertyUtils.getPropertyDescriptor(entity, aggregatedField); + // if (pd == null) { + // final String errMsg = "Field/tag " + aggregatedField + " is not defined for entity " + entity.getClass().getSimpleName(); + // logger.error(errMsg); + // throw new Exception(errMsg); + // } + // Object obj = pd.getReadMethod().invoke(entity); + String tmp = aggregatedField.substring(0, 1).toUpperCase() + aggregatedField.substring(1); + m = entity.getClass().getMethod("get" + tmp); + aggregateFieldReflectedMethodCache[functionIndex] = m; + } + Object obj = m.invoke(entity); + values.add(numberToDouble(obj)); + } catch (Exception ex) { + LOG.error("Cannot do aggregation for field " + aggregatedField, ex); + throw ex; + } + } + } + functionIndex++; + } + return values; + } + + /** + * TODO this is a hack, we need elegant way to convert type to a broad precision. + * + * @param obj + * @return + */ + protected Double numberToDouble(Object obj) { + if (obj instanceof Double) { + return (Double) obj; + } + if (obj instanceof Integer) { + return new Double(((Integer) obj).doubleValue()); + } + if (obj instanceof Long) { + return new Double(((Long) obj).doubleValue()); + } + // TODO hack to support string field for demo purpose, should be removed + if (obj == null) { + return new Double(0.0); + } + if (obj instanceof String) { + try { + return new Double((String) obj); + } catch (Exception ex) { + LOG.warn("Datapoint ignored because it can not be converted to correct number for " + obj, ex); + return new Double(0.0); + } + } + if (obj instanceof double[]) { + double[] value = (double[]) obj; + if (value.length > 0) { + return new Double(value[0]); + } else { + return new Double(0.0); + } + } + + throw new IllegalAggregateFieldTypeException(obj.getClass().toString() + " type is not support. The aggregated field must be numeric type, int, long or double"); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/Aggregator.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/Aggregator.java index 1e70e9172f..9912a25711 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/Aggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/Aggregator.java @@ -20,10 +20,10 @@ public interface Aggregator { /** - * Accumulate callback + * Accumulate callback. * * @param entity accumulated entity instance * @throws Exception */ - public void accumulate(TaggedLogAPIEntity entity) throws Exception; + public void accumulate(TaggedLogAPIEntity entity) throws Exception; } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/EntityCreationListenerFactory.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/EntityCreationListenerFactory.java index 7e35bec738..9de752b071 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/EntityCreationListenerFactory.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/EntityCreationListenerFactory.java @@ -19,7 +19,7 @@ import org.apache.eagle.log.entity.EntityCreationListener; public class EntityCreationListenerFactory { - public static EntityCreationListener synchronizedEntityCreationListener(EntityCreationListener listener){ - return new SynchronizedEntityCreationListener(listener); - } + public static EntityCreationListener synchronizedEntityCreationListener(EntityCreationListener listener) { + return new SynchronizedEntityCreationListener(listener); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/FlatAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/FlatAggregator.java index e12fea3044..63aebcabc3 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/FlatAggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/FlatAggregator.java @@ -16,46 +16,47 @@ */ package org.apache.eagle.query.aggregate.timeseries; +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; +import org.apache.eagle.query.aggregate.AggregateFunctionType; + import java.util.ArrayList; import java.util.List; import java.util.Map; -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.apache.eagle.query.aggregate.AggregateFunctionType; - /** - * Not thread safe + * Not thread safe. */ -public class FlatAggregator extends AbstractAggregator{ - protected GroupbyBucket bucket; +public class FlatAggregator extends AbstractAggregator { + protected GroupbyBucket bucket; /** + * constructor. * @param groupbyFields * @param aggregateFuntionTypes * @param aggregatedFields */ - public FlatAggregator(List groupbyFields, List aggregateFuntionTypes, List aggregatedFields){ - super(groupbyFields, aggregateFuntionTypes, aggregatedFields); - bucket = new GroupbyBucket(this.aggregateFunctionTypes); - } - - public void accumulate(TaggedLogAPIEntity entity) throws Exception{ - List groupbyFieldValues = createGroup(entity); - List preAggregatedValues = createPreAggregatedValues(entity); - bucket.addDatapoint(groupbyFieldValues, preAggregatedValues); - } - - public Map, List> result(){ - return bucket.result(); - } - - protected List createGroup(TaggedLogAPIEntity entity){ - List groupbyFieldValues = new ArrayList(); - int i = 0; - for(String groupbyField : groupbyFields){ - String groupbyFieldValue = determineGroupbyFieldValue(entity, groupbyField, i++); - groupbyFieldValues.add(groupbyFieldValue); - } - return groupbyFieldValues; - } + public FlatAggregator(List groupbyFields, List aggregateFuntionTypes, List aggregatedFields) { + super(groupbyFields, aggregateFuntionTypes, aggregatedFields); + bucket = new GroupbyBucket(this.aggregateFunctionTypes); + } + + public void accumulate(TaggedLogAPIEntity entity) throws Exception { + List groupbyFieldValues = createGroup(entity); + List preAggregatedValues = createPreAggregatedValues(entity); + bucket.addDatapoint(groupbyFieldValues, preAggregatedValues); + } + + public Map, List> result() { + return bucket.result(); + } + + protected List createGroup(TaggedLogAPIEntity entity) { + List groupbyFieldValues = new ArrayList(); + int i = 0; + for (String groupbyField : groupbyFields) { + String groupbyFieldValue = determineGroupbyFieldValue(entity, groupbyField, i++); + groupbyFieldValues.add(groupbyFieldValue); + } + return groupbyFieldValues; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/GroupbyBucket.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/GroupbyBucket.java index ea57edb0a7..8363254d26 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/GroupbyBucket.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/GroupbyBucket.java @@ -31,211 +31,225 @@ import java.util.Map; public class GroupbyBucket { - private final static Logger LOG = LoggerFactory.getLogger(GroupbyBucket.class); - - public static Map _functionFactories = - new HashMap<>(); - - // TODO put this logic to AggregatorFunctionType - static{ - _functionFactories.put(AggregateFunctionType.count.name(), new CountFactory()); - _functionFactories.put(AggregateFunctionType.sum.name(), new SumFactory()); - _functionFactories.put(AggregateFunctionType.min.name(), new MinFactory()); - _functionFactories.put(AggregateFunctionType.max.name(), new MaxFactory()); - _functionFactories.put(AggregateFunctionType.avg.name(), new AvgFactory()); - } - - private List types; -// private SortedMap, List> group2FunctionMap = -// new TreeMap, List>(new GroupbyFieldsComparator()); - - private Map, List> group2FunctionMap = new HashMap<>(); //new GroupbyFieldsComparator()); - - public GroupbyBucket(List types){ - this.types = types; - } - - public void addDatapoint(List groupbyFieldValues, List values){ - // LOG.info("DEBUG: addDatapoint: groupby=["+StringUtils.join(groupbyFieldValues,",")+"], values=["+StringUtils.join(values, ",")+"]"); - - // locate groupby bucket - List functions = group2FunctionMap.get(groupbyFieldValues); - if(functions == null){ - functions = new ArrayList(); - for(AggregateFunctionType type : types){ - functions.add(_functionFactories.get(type.name()).createFunction()); - } - group2FunctionMap.put(groupbyFieldValues, functions); - } - int functionIndex = 0; - for(Double v : values){ - functions.get(functionIndex).run(v); - functionIndex++; - } - } - - public Map, List> result(){ - Map, List> result = new HashMap, List>(); - for(Map.Entry, List> entry : this.group2FunctionMap.entrySet()){ - List values = new ArrayList(); - for(Function f : entry.getValue()){ - values.add(f.result()); - } - result.put(entry.getKey(), values); - } - return result; - } - - public List getGroupbyKeyValue(){ - List results = new ArrayList(); - - for(Map.Entry, List> entry : this.group2FunctionMap.entrySet()){ - GroupbyKey key = new GroupbyKey(); - for(String keyStr:entry.getKey()){ - try { - key.addValue(keyStr.getBytes(QueryConstants.CHARSET)); - } catch (UnsupportedEncodingException e) { - LOG.error(e.getMessage(),e); - } - } - GroupbyValue value = new GroupbyValue(); - for(Function f : entry.getValue()){ - value.add(f.result()); - value.addMeta(f.count()); - } - results.add(new GroupbyKeyValue(key,value)); - } - - return results; - } - - public static interface FunctionFactory{ - public Function createFunction(); - } - - public static abstract class Function{ - protected int count; - - public abstract void run(double v); - public abstract double result(); - public int count(){ - return count; - } - public void incrCount(){ - count ++; - } - } - - private static class CountFactory implements FunctionFactory{ - @Override - public Function createFunction(){ - return new Count(); - } - } - - - private static class Count extends Sum{ - public Count(){ - super(); - } - } - - private static class SumFactory implements FunctionFactory{ - @Override - public Function createFunction(){ - return new Sum(); - } - } - - private static class Sum extends Function{ - private double summary; - public Sum(){ - this.summary = 0.0; - } - @Override - public void run(double v){ - this.incrCount(); - this.summary += v; - } - - @Override - public double result(){ - return this.summary; - } - } - - private static class MinFactory implements FunctionFactory{ - @Override - public Function createFunction(){ - return new Min(); - } - } - public static class Min extends Function{ - private double minimum; - public Min(){ - // TODO is this a bug, or only positive numeric calculation is supported - this.minimum = Double.MAX_VALUE; - } - - @Override - public void run(double v){ - if(v < minimum){ - minimum = v; - } - this.incrCount(); - } - - @Override - public double result(){ - return minimum; - } - } - - private static class MaxFactory implements FunctionFactory{ - @Override - public Function createFunction(){ - return new Max(); - } - } - public static class Max extends Function{ - private double maximum; - public Max(){ - // TODO is this a bug, or only positive numeric calculation is supported - this.maximum = 0.0; - } - @Override - public void run(double v){ - if(v > maximum){ - maximum = v; - } - this.incrCount(); - } - - @Override - public double result(){ - return maximum; - } - } - - private static class AvgFactory implements FunctionFactory{ - @Override - public Function createFunction(){ - return new Avg(); - } - } - public static class Avg extends Function{ - private double total; - public Avg(){ - this.total = 0.0; - } - @Override - public void run(double v){ - total += v; - this.incrCount(); - } - @Override - public double result(){ - return this.total/this.count; - } - } + private static final Logger LOG = LoggerFactory.getLogger(GroupbyBucket.class); + + public static Map _functionFactories = + new HashMap<>(); + + // TODO put this logic to AggregatorFunctionType + static { + _functionFactories.put(AggregateFunctionType.count.name(), new CountFactory()); + _functionFactories.put(AggregateFunctionType.sum.name(), new SumFactory()); + _functionFactories.put(AggregateFunctionType.min.name(), new MinFactory()); + _functionFactories.put(AggregateFunctionType.max.name(), new MaxFactory()); + _functionFactories.put(AggregateFunctionType.avg.name(), new AvgFactory()); + } + + private List types; + // private SortedMap, List> group2FunctionMap = + // new TreeMap, List>(new GroupbyFieldsComparator()); + + private Map, List> group2FunctionMap = new HashMap<>(); //new GroupbyFieldsComparator()); + + public GroupbyBucket(List types) { + this.types = types; + } + + public void addDatapoint(List groupbyFieldValues, List values) { + // LOG.info("DEBUG: addDatapoint: groupby=["+StringUtils.join(groupbyFieldValues,",")+"], values=["+StringUtils.join(values, ",")+"]"); + + // locate groupby bucket + List functions = group2FunctionMap.get(groupbyFieldValues); + if (functions == null) { + functions = new ArrayList(); + for (AggregateFunctionType type : types) { + functions.add(_functionFactories.get(type.name()).createFunction()); + } + group2FunctionMap.put(groupbyFieldValues, functions); + } + int functionIndex = 0; + for (Double v : values) { + functions.get(functionIndex).run(v); + functionIndex++; + } + } + + public Map, List> result() { + Map, List> result = new HashMap, List>(); + for (Map.Entry, List> entry : this.group2FunctionMap.entrySet()) { + List values = new ArrayList(); + for (Function f : entry.getValue()) { + values.add(f.result()); + } + result.put(entry.getKey(), values); + } + return result; + } + + public List getGroupbyKeyValue() { + List results = new ArrayList(); + + for (Map.Entry, List> entry : this.group2FunctionMap.entrySet()) { + GroupbyKey key = new GroupbyKey(); + for (String keyStr : entry.getKey()) { + try { + key.addValue(keyStr.getBytes(QueryConstants.CHARSET)); + } catch (UnsupportedEncodingException e) { + LOG.error(e.getMessage(), e); + } + } + GroupbyValue value = new GroupbyValue(); + for (Function f : entry.getValue()) { + value.add(f.result()); + value.addMeta(f.count()); + } + results.add(new GroupbyKeyValue(key, value)); + } + + return results; + } + + public static interface FunctionFactory { + public Function createFunction(); + } + + public abstract static class Function { + protected int count; + + public abstract void run(double v); + + public abstract double result(); + + public int count() { + return count; + } + + public void incrCount() { + count++; + } + } + + private static class CountFactory implements FunctionFactory { + @Override + public Function createFunction() { + return new Count(); + } + } + + + private static class Count extends Sum { + public Count() { + super(); + } + } + + private static class SumFactory implements FunctionFactory { + @Override + public Function createFunction() { + return new Sum(); + } + } + + private static class Sum extends Function { + private double summary; + + public Sum() { + this.summary = 0.0; + } + + @Override + public void run(double v) { + this.incrCount(); + this.summary += v; + } + + @Override + public double result() { + return this.summary; + } + } + + private static class MinFactory implements FunctionFactory { + @Override + public Function createFunction() { + return new Min(); + } + } + + public static class Min extends Function { + private double minimum; + + public Min() { + // TODO is this a bug, or only positive numeric calculation is supported + this.minimum = Double.MAX_VALUE; + } + + @Override + public void run(double v) { + if (v < minimum) { + minimum = v; + } + this.incrCount(); + } + + @Override + public double result() { + return minimum; + } + } + + private static class MaxFactory implements FunctionFactory { + @Override + public Function createFunction() { + return new Max(); + } + } + + public static class Max extends Function { + private double maximum; + + public Max() { + // TODO is this a bug, or only positive numeric calculation is supported + this.maximum = 0.0; + } + + @Override + public void run(double v) { + if (v > maximum) { + maximum = v; + } + this.incrCount(); + } + + @Override + public double result() { + return maximum; + } + } + + private static class AvgFactory implements FunctionFactory { + @Override + public Function createFunction() { + return new Avg(); + } + } + + public static class Avg extends Function { + private double total; + + public Avg() { + this.total = 0.0; + } + + @Override + public void run(double v) { + total += v; + this.incrCount(); + } + + @Override + public double result() { + return this.total / this.count; + } + } } \ No newline at end of file diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/GroupbyFieldsComparator.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/GroupbyFieldsComparator.java index 66354836ad..b3b348cb17 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/GroupbyFieldsComparator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/GroupbyFieldsComparator.java @@ -22,18 +22,20 @@ /** * this is default comparator for aggregation. The behavior is to sort by groupby fields ascendantly */ -public class GroupbyFieldsComparator implements Comparator>{ - @Override - public int compare(List list1, List list2){ - if(list1 == null || list2 == null || list1.size() != list2.size()) - throw new IllegalArgumentException("2 list of groupby fields must be non-null and have the same size"); - int r = 0; - int index = 0; - for(String s1 : list1){ - r = s1.compareTo(list2.get(index++)); - if(r != 0) - return r; - } - return r; - } +public class GroupbyFieldsComparator implements Comparator> { + @Override + public int compare(List list1, List list2) { + if (list1 == null || list2 == null || list1.size() != list2.size()) { + throw new IllegalArgumentException("2 list of groupby fields must be non-null and have the same size"); + } + int r = 0; + int index = 0; + for (String s1 : list1) { + r = s1.compareTo(list2.get(index++)); + if (r != 0) { + return r; + } + } + return r; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/HierarchicalAggregateEntity.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/HierarchicalAggregateEntity.java index 9e782330e6..385ced9f6c 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/HierarchicalAggregateEntity.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/HierarchicalAggregateEntity.java @@ -16,52 +16,56 @@ */ package org.apache.eagle.query.aggregate.timeseries; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.SortedMap; -import java.util.SortedSet; -import java.util.TreeMap; - import org.codehaus.jackson.map.annotate.JsonSerialize; -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) +import java.util.*; + +@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) public class HierarchicalAggregateEntity { - private String key; - private List tmpValues = new ArrayList(); - private List values = new ArrayList(); - private SortedMap children = new TreeMap(); - private SortedSet> sortedList = null; + private String key; + private List tmpValues = new ArrayList(); + private List values = new ArrayList(); + private SortedMap children = new TreeMap(); + private SortedSet> sortedList = null; + + public SortedSet> getSortedList() { + return sortedList; + } + + public void setSortedList( + SortedSet> sortedList) { + this.sortedList = sortedList; + } + + public List getTmpValues() { + return tmpValues; + } + + public void setTmpValues(List tmpValues) { + this.tmpValues = tmpValues; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public List getValues() { + return values; + } + + public void setValues(List values) { + this.values = values; + } + + public SortedMap getChildren() { + return children; + } - public SortedSet> getSortedList() { - return sortedList; - } - public void setSortedList( - SortedSet> sortedList) { - this.sortedList = sortedList; - } - public List getTmpValues() { - return tmpValues; - } - public void setTmpValues(List tmpValues) { - this.tmpValues = tmpValues; - } - public String getKey() { - return key; - } - public void setKey(String key) { - this.key = key; - } - public List getValues() { - return values; - } - public void setValues(List values) { - this.values = values; - } - public SortedMap getChildren() { - return children; - } - public void setChildren(SortedMap children) { - this.children = children; - } + public void setChildren(SortedMap children) { + this.children = children; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/HierarchicalAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/HierarchicalAggregator.java index ecb80ac87f..67b251945d 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/HierarchicalAggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/HierarchicalAggregator.java @@ -16,67 +16,67 @@ */ package org.apache.eagle.query.aggregate.timeseries; -import java.util.List; -import java.util.SortedMap; - import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.query.aggregate.AggregateFunctionType; -public class HierarchicalAggregator extends AbstractAggregator{ - private HierarchicalAggregateEntity root = new HierarchicalAggregateEntity(); +import java.util.List; +import java.util.SortedMap; + +public class HierarchicalAggregator extends AbstractAggregator { + private HierarchicalAggregateEntity root = new HierarchicalAggregateEntity(); - public HierarchicalAggregator(List groupbyFields, List aggregateFuntionTypes, List aggregatedFields){ - super(groupbyFields, aggregateFuntionTypes, aggregatedFields); - } + public HierarchicalAggregator(List groupbyFields, List aggregateFuntionTypes, List aggregatedFields) { + super(groupbyFields, aggregateFuntionTypes, aggregatedFields); + } - public void accumulate(TaggedLogAPIEntity entity) throws Exception{ - List preAggregatedValues = createPreAggregatedValues(entity); - // aggregate to root first - addDatapoint(root, preAggregatedValues); - // go through hierarchical tree - HierarchicalAggregateEntity current = root; - int i = 0; - for(String groupbyField : groupbyFields){ - // determine groupbyFieldValue from tag or fields - String groupbyFieldValue = determineGroupbyFieldValue(entity, groupbyField, i); - SortedMap children = current.getChildren(); - if(children.get(groupbyFieldValue) == null){ - HierarchicalAggregateEntity tmp = new HierarchicalAggregateEntity(); - children.put(groupbyFieldValue, tmp); - } - children.get(groupbyFieldValue).setKey(groupbyFieldValue); - addDatapoint(children.get(groupbyFieldValue), preAggregatedValues); - current = children.get(groupbyFieldValue); - } - } + public void accumulate(TaggedLogAPIEntity entity) throws Exception { + List preAggregatedValues = createPreAggregatedValues(entity); + // aggregate to root first + addDatapoint(root, preAggregatedValues); + // go through hierarchical tree + HierarchicalAggregateEntity current = root; + int i = 0; + for (String groupbyField : groupbyFields) { + // determine groupbyFieldValue from tag or fields + String groupbyFieldValue = determineGroupbyFieldValue(entity, groupbyField, i); + SortedMap children = current.getChildren(); + if (children.get(groupbyFieldValue) == null) { + HierarchicalAggregateEntity tmp = new HierarchicalAggregateEntity(); + children.put(groupbyFieldValue, tmp); + } + children.get(groupbyFieldValue).setKey(groupbyFieldValue); + addDatapoint(children.get(groupbyFieldValue), preAggregatedValues); + current = children.get(groupbyFieldValue); + } + } - private void addDatapoint(HierarchicalAggregateEntity entity, List values){ - List functions = entity.getTmpValues(); - // initialize list of function - if(functions.isEmpty()){ - for(AggregateFunctionType type : aggregateFunctionTypes){ - functions.add(GroupbyBucket._functionFactories.get(type.name()).createFunction()); - } - } - int functionIndex = 0; - for(Double v : values){ - functions.get(functionIndex).run(v); - functionIndex++; - } - } + private void addDatapoint(HierarchicalAggregateEntity entity, List values) { + List functions = entity.getTmpValues(); + // initialize list of function + if (functions.isEmpty()) { + for (AggregateFunctionType type : aggregateFunctionTypes) { + functions.add(GroupbyBucket._functionFactories.get(type.name()).createFunction()); + } + } + int functionIndex = 0; + for (Double v : values) { + functions.get(functionIndex).run(v); + functionIndex++; + } + } - private void finalizeHierarchicalAggregateEntity(HierarchicalAggregateEntity entity){ - for(GroupbyBucket.Function f : entity.getTmpValues()){ - entity.getValues().add(f.result()); - } - for(HierarchicalAggregateEntity child : entity.getChildren().values()){ - finalizeHierarchicalAggregateEntity(child); - } - entity.setTmpValues(null); - } + private void finalizeHierarchicalAggregateEntity(HierarchicalAggregateEntity entity) { + for (GroupbyBucket.Function f : entity.getTmpValues()) { + entity.getValues().add(f.result()); + } + for (HierarchicalAggregateEntity child : entity.getChildren().values()) { + finalizeHierarchicalAggregateEntity(child); + } + entity.setTmpValues(null); + } - public HierarchicalAggregateEntity result(){ - finalizeHierarchicalAggregateEntity(root); - return this.root; - } + public HierarchicalAggregateEntity result() { + finalizeHierarchicalAggregateEntity(root); + return this.root; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/PostFlatAggregateSort.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/PostFlatAggregateSort.java index f62d2c258a..f59c6bebc2 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/PostFlatAggregateSort.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/PostFlatAggregateSort.java @@ -16,78 +16,78 @@ */ package org.apache.eagle.query.aggregate.timeseries; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.SortedSet; -import java.util.TreeSet; +import java.util.*; public class PostFlatAggregateSort { - private static SortedSet, List>> sortByValue(Map, List> map, List sortOptions) { - SortedSet, List>> sortedEntries = new TreeSet, List>>(new MapEntryComparator(sortOptions)); - sortedEntries.addAll(map.entrySet()); - return sortedEntries; - } + private static SortedSet, List>> sortByValue(Map, List> map, List sortOptions) { + SortedSet, List>> sortedEntries = new TreeSet, List>>(new MapEntryComparator(sortOptions)); + sortedEntries.addAll(map.entrySet()); + return sortedEntries; + } - /** - * sort aggregated results with sort options - * @param aggregatedResult aggregated result set, but it is not sorted - * @sortOptions sorting options - * @topN top N results will be returned if topN is specified. If it's not specified (as default value 0), all results will be returned - */ - public static List, List>> sort(Map, List> aggregatedResult, List sortOptions, int topN){ - SortedSet, List>> allList = sortByValue(aggregatedResult, sortOptions); - List, List>> result = new ArrayList, List>>(); - for (Map.Entry, List> entry : allList) { - result.add(entry); - if (topN > 0 && result.size() >= topN) { - break; - } - } - return result; - } + /** + * sort aggregated results with sort options + * + * @param aggregatedResult aggregated result set, but it is not sorted + * @sortOptions sorting options + * @topN top N results will be returned if topN is specified. If it's not specified (as default value 0), all results will be returned + */ + public static List, List>> sort(Map, List> aggregatedResult, List sortOptions, int topN) { + SortedSet, List>> allList = sortByValue(aggregatedResult, sortOptions); + List, List>> result = new ArrayList, List>>(); + for (Map.Entry, List> entry : allList) { + result.add(entry); + if (topN > 0 && result.size() >= topN) { + break; + } + } + return result; + } + + private static class MapEntryComparator implements Comparator, List>> { + private List sortOptions; + + public MapEntryComparator(List sortOptions) { + this.sortOptions = sortOptions; + } - private static class MapEntryComparator implements Comparator, List>>{ - private List sortOptions; - public MapEntryComparator(List sortOptions){ - this.sortOptions = sortOptions; - } - /** - * default to sort by all groupby fields - */ - @Override - public int compare(Map.Entry, List> e1, Map.Entry, List> e2){ - int r = 0; - List keyList1 = e1.getKey(); - List valueList1 = e1.getValue(); - List keyList2 = e2.getKey(); - List valueList2 = e2.getValue(); - for(SortOption so : sortOptions){ - int index = so.getIndex(); - if (index == -1) { - continue; - } - if(!so.isInGroupby()){ // sort fields come from functions - Double value1 = valueList1.get(index); - Double value2 = valueList2.get(index); - r = value1.compareTo(value2); - }else{ // sort fields come from groupby fields - String key1 = keyList1.get(index); - String key2 = keyList2.get(index); - r = key1.compareTo(key2); - } - if(r == 0) continue; - if(!so.isAscendant()){ - r = -r; - } - return r; - } - // default to sort by groupby fields ascendently - if(r ==0){ // TODO is this check necessary - return new GroupbyFieldsComparator().compare(keyList1, keyList2); - } - return r; + /** + * default to sort by all groupby fields. + */ + @Override + public int compare(Map.Entry, List> e1, Map.Entry, List> e2) { + int r = 0; + List keyList1 = e1.getKey(); + List valueList1 = e1.getValue(); + List keyList2 = e2.getKey(); + List valueList2 = e2.getValue(); + for (SortOption so : sortOptions) { + int index = so.getIndex(); + if (index == -1) { + continue; + } + if (!so.isInGroupby()) { // sort fields come from functions + Double value1 = valueList1.get(index); + Double value2 = valueList2.get(index); + r = value1.compareTo(value2); + } else { // sort fields come from groupby fields + String key1 = keyList1.get(index); + String key2 = keyList2.get(index); + r = key1.compareTo(key2); + } + if (r == 0) { + continue; + } + if (!so.isAscendant()) { + r = -r; + } + return r; + } + // default to sort by groupby fields ascendently + if (r == 0) { // TODO is this check necessary + return new GroupbyFieldsComparator().compare(keyList1, keyList2); + } + return r; } - } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/PostHierarchicalAggregateSort.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/PostHierarchicalAggregateSort.java index 7b0997b00b..33733df659 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/PostHierarchicalAggregateSort.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/PostHierarchicalAggregateSort.java @@ -16,77 +16,75 @@ */ package org.apache.eagle.query.aggregate.timeseries; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.SortedSet; -import java.util.TreeSet; +import java.util.*; public class PostHierarchicalAggregateSort { - private static SortedSet> sortByValue(HierarchicalAggregateEntity entity, List sortOptions) { - SortedSet> sortedEntries = new TreeSet>(new MapEntryComparator(sortOptions)); - sortedEntries.addAll(entity.getChildren().entrySet()); - return sortedEntries; - } + private static SortedSet> sortByValue(HierarchicalAggregateEntity entity, List sortOptions) { + SortedSet> sortedEntries = new TreeSet>(new MapEntryComparator(sortOptions)); + sortedEntries.addAll(entity.getChildren().entrySet()); + return sortedEntries; + } - /** - * sort aggregated results with sort options + /** + * sort aggregated results with sort options. * * @param result * @param sortOptions * @return */ - public static HierarchicalAggregateEntity sort(HierarchicalAggregateEntity result, List sortOptions){ - SortedSet> tmp = sortByValue(result, sortOptions); - result.setSortedList(tmp); - result.setChildren(null); - for(Map.Entry entry : tmp){ - sort(entry.getValue(), sortOptions); - } - return result; - } + public static HierarchicalAggregateEntity sort(HierarchicalAggregateEntity result, List sortOptions) { + SortedSet> tmp = sortByValue(result, sortOptions); + result.setSortedList(tmp); + result.setChildren(null); + for (Map.Entry entry : tmp) { + sort(entry.getValue(), sortOptions); + } + return result; + } + + private static class MapEntryComparator implements Comparator> { + private List sortOptions; - private static class MapEntryComparator implements Comparator>{ - private List sortOptions; + public MapEntryComparator(List sortOptions) { + this.sortOptions = sortOptions; + } - public MapEntryComparator(List sortOptions){ - this.sortOptions = sortOptions; - } + /** + * default to sort by all groupby fields. + */ + @Override + public int compare(Map.Entry e1, Map.Entry e2) { + int r = 0; + String key1 = e1.getKey(); + List valueList1 = e1.getValue().getValues(); + String key2 = e2.getKey(); + List valueList2 = e2.getValue().getValues(); + for (SortOption so : sortOptions) { + int index = so.getIndex(); + if (index == -1) { + continue; + } + if (!so.isInGroupby()) { // sort fields come from functions + Double value1 = valueList1.get(index); + Double value2 = valueList2.get(index); + r = value1.compareTo(value2); + } + // sort fields come from groupby fields, then silently ignored - /** - * default to sort by all groupby fields - */ - @Override - public int compare(Map.Entry e1, Map.Entry e2){ - int r = 0; - String key1 = e1.getKey(); - List valueList1 = e1.getValue().getValues(); - String key2 = e2.getKey(); - List valueList2 = e2.getValue().getValues(); - for(SortOption so : sortOptions){ - int index = so.getIndex(); - if (index == -1) { - continue; - } - if(!so.isInGroupby()){ // sort fields come from functions - Double value1 = valueList1.get(index); - Double value2 = valueList2.get(index); - r = value1.compareTo(value2); - } - // sort fields come from groupby fields, then silently ignored - - if(r == 0) continue; - if(!so.isAscendant()){ - r = -r; - } - return r; - } - // default to sort by groupby fields ascendently - if(r ==0){ - return key1.compareTo(key2); - } - return r; + if (r == 0) { + continue; + } + if (!so.isAscendant()) { + r = -r; + } + return r; + } + // default to sort by groupby fields ascendently + if (r == 0) { + return key1.compareTo(key2); + } + return r; } - } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SortOption.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SortOption.java index d1578ac6df..151f928c69 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SortOption.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SortOption.java @@ -20,30 +20,34 @@ * sum(field1), max(field2) groupby(field3, field4) sort by field1 asc, field3 desc * There are 2 SortOption object, then * the 1st one is inGroupby=false, index=0, ascendent=true - * the 2nd one is inGroupby=true, index=1, ascendent=false - * + * the 2nd one is inGroupby=true, index=1, ascendent=false. */ public class SortOption { - private boolean inGroupby; // sort field defaultly is not from groupby fields - private int index; // index relative to list of groupby fields or list of functions - private boolean ascendant; //asc or desc + private boolean inGroupby; // sort field defaultly is not from groupby fields + private int index; // index relative to list of groupby fields or list of functions + private boolean ascendant; //asc or desc + + public boolean isInGroupby() { + return inGroupby; + } + + public void setInGroupby(boolean inGroupby) { + this.inGroupby = inGroupby; + } + + public int getIndex() { + return index; + } + + public void setIndex(int index) { + this.index = index; + } + + public boolean isAscendant() { + return ascendant; + } - public boolean isInGroupby() { - return inGroupby; - } - public void setInGroupby(boolean inGroupby) { - this.inGroupby = inGroupby; - } - public int getIndex() { - return index; - } - public void setIndex(int index) { - this.index = index; - } - public boolean isAscendant() { - return ascendant; - } - public void setAscendant(boolean ascendant) { - this.ascendant = ascendant; - } + public void setAscendant(boolean ascendant) { + this.ascendant = ascendant; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SortOptionsParser.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SortOptionsParser.java index 1360e0cc1a..c86f1cc881 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SortOptionsParser.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SortOptionsParser.java @@ -16,54 +16,54 @@ */ package org.apache.eagle.query.aggregate.timeseries; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - public class SortOptionsParser { - private static final Logger LOG = LoggerFactory.getLogger(SortOptionsParser.class); - private static Pattern pattern = Pattern.compile("^(.+)\\s+(asc|desc)$"); - - public static List parse(List groupbyFields, List aggregatedFields, List sortOptions, List sortFields){ - List list = new ArrayList(); - for(String sortOption : sortOptions){ - Matcher m = pattern.matcher(sortOption); - if(!m.find()){ - throw new IllegalArgumentException("sort option must have the format of asc|desc"); - } - String field = m.group(1); - if (sortFields != null) { - sortFields.add(field); - } - SortOption so = new SortOption(); - list.add(so); - so.setAscendant(m.group(2).equals("asc") ? true : false); - int index = aggregatedFields.indexOf(field); - if(index > -1){ - so.setInGroupby(false); - so.setIndex(index); - continue; - } - if(groupbyFields != null){ // if groupbyFields is not provided, ignore this sort field - index = groupbyFields.indexOf(field); - if(index > -1){ - so.setInGroupby(true); - so.setIndex(index); - continue; - } - } - logNonExistingSortByField(field); - so.setInGroupby(false); - so.setIndex(-1); - } - return list; - } - - private static void logNonExistingSortByField(String sortByField){ - LOG.warn("Sortby field is neither in aggregated fields or groupby fields, ignore " + sortByField); - } + private static final Logger LOG = LoggerFactory.getLogger(SortOptionsParser.class); + private static Pattern pattern = Pattern.compile("^(.+)\\s+(asc|desc)$"); + + public static List parse(List groupbyFields, List aggregatedFields, List sortOptions, List sortFields) { + List list = new ArrayList(); + for (String sortOption : sortOptions) { + Matcher m = pattern.matcher(sortOption); + if (!m.find()) { + throw new IllegalArgumentException("sort option must have the format of asc|desc"); + } + String field = m.group(1); + if (sortFields != null) { + sortFields.add(field); + } + SortOption so = new SortOption(); + list.add(so); + so.setAscendant(m.group(2).equals("asc") ? true : false); + int index = aggregatedFields.indexOf(field); + if (index > -1) { + so.setInGroupby(false); + so.setIndex(index); + continue; + } + if (groupbyFields != null) { // if groupbyFields is not provided, ignore this sort field + index = groupbyFields.indexOf(field); + if (index > -1) { + so.setInGroupby(true); + so.setIndex(index); + continue; + } + } + logNonExistingSortByField(field); + so.setInGroupby(false); + so.setIndex(-1); + } + return list; + } + + private static void logNonExistingSortByField(String sortByField) { + LOG.warn("Sortby field is neither in aggregated fields or groupby fields, ignore " + sortByField); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SynchronizedAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SynchronizedAggregator.java index d8b781ed8b..f4eabcd408 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SynchronizedAggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SynchronizedAggregator.java @@ -18,18 +18,18 @@ import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -public class SynchronizedAggregator implements Aggregator{ - private Object mutex = new Object(); - private Aggregator agg; - - public SynchronizedAggregator(Aggregator agg){ - this.agg = agg; - } - - @Override - public void accumulate(TaggedLogAPIEntity entity) throws Exception{ - synchronized(mutex){ - agg.accumulate(entity); - } - } -} +public class SynchronizedAggregator implements Aggregator { + private Object mutex = new Object(); + private Aggregator agg; + + public SynchronizedAggregator(Aggregator agg) { + this.agg = agg; + } + + @Override + public void accumulate(TaggedLogAPIEntity entity) throws Exception { + synchronized (mutex) { + agg.accumulate(entity); + } + } +} diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SynchronizedEntityCreationListener.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SynchronizedEntityCreationListener.java index 7c1412e893..baa89be826 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SynchronizedEntityCreationListener.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/SynchronizedEntityCreationListener.java @@ -19,18 +19,18 @@ import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.EntityCreationListener; -public class SynchronizedEntityCreationListener implements EntityCreationListener{ - private Object mutex = new Object(); - private EntityCreationListener listener; - - public SynchronizedEntityCreationListener(EntityCreationListener listener){ - this.listener = listener; - } - - @Override - public void entityCreated(TaggedLogAPIEntity entity) throws Exception{ - synchronized(mutex){ - listener.entityCreated(entity); - } - } +public class SynchronizedEntityCreationListener implements EntityCreationListener { + private Object mutex = new Object(); + private EntityCreationListener listener; + + public SynchronizedEntityCreationListener(EntityCreationListener listener) { + this.listener = listener; + } + + @Override + public void entityCreated(TaggedLogAPIEntity entity) throws Exception { + synchronized (mutex) { + listener.entityCreated(entity); + } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesAggregator.java index 5bebe13243..df29f875b1 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesAggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesAggregator.java @@ -29,141 +29,144 @@ import java.util.Map; /** - * TODO Assuming that data point comes in the sequence of occurrence time desc or asc would + * TODO Assuming that data point comes in the sequence of occurrence time desc or asc would * save memory for holding all the data in the memory - * *

    Aggregate Bucket Structure

    *
      * {
      *  ["key1","key2",...,(entity.getTimestamp() - startTime)/intervalms]:[value1,value2,...,valuen]
      * }
      * 
    - * */ public class TimeSeriesAggregator extends FlatAggregator implements GroupbyKeyAggregatable { - private final static Logger LOG = LoggerFactory.getLogger(TimeSeriesAggregator.class); - private static final int DEFAULT_DATAPOINT_MAX_COUNT = 1000; - private long startTime; - private long endTime; - private long intervalms; - private int numFunctions; - private int ignoredEntityCounter = 0; - - public TimeSeriesAggregator(List groupbyFields, List aggregateFuntionTypes, List aggregatedFields, - long startTime, long endTime, long intervalms){ - super(groupbyFields, aggregateFuntionTypes, aggregatedFields); - // guard to avoid too many data points returned -// validateTimeRange(startTime, endTime, intervalms); - this.startTime = startTime; - this.endTime = endTime; - this.intervalms = intervalms; - this.numFunctions = aggregateFuntionTypes.size(); - } + private static final Logger LOG = LoggerFactory.getLogger(TimeSeriesAggregator.class); + private static final int DEFAULT_DATAPOINT_MAX_COUNT = 1000; + private long startTime; + private long endTime; + private long intervalms; + private int numFunctions; + private int ignoredEntityCounter = 0; + + public TimeSeriesAggregator(List groupbyFields, List aggregateFuntionTypes, List aggregatedFields, + long startTime, long endTime, long intervalms) { + super(groupbyFields, aggregateFuntionTypes, aggregatedFields); + // guard to avoid too many data points returned + // validateTimeRange(startTime, endTime, intervalms); + this.startTime = startTime; + this.endTime = endTime; + this.intervalms = intervalms; + this.numFunctions = aggregateFuntionTypes.size(); + } + + // @Deprecated + // public static void validateTimeRange(long startTime, long endTime, long intervalms) { + // if (startTime >= endTime || intervalms <= 0) { + // throw new IllegalArgumentException("invalid argument, startTime should be less than endTime and interval must be greater than 0, starTime is " + startTime + " and endTime is " + endTime + // + ", interval is " + intervalms); + // } + // if ((endTime - startTime) / intervalms > DEFAULT_DATAPOINT_MAX_COUNT) { + // throw new IllegalArgumentException("invalid argument, # of datapoints should be less than " + DEFAULT_DATAPOINT_MAX_COUNT + ", current # of datapoints is " + (endTime - startTime) + // / intervalms); + // } + // } + + public void accumulate(TaggedLogAPIEntity entity) throws Exception { + List groupbyFieldValues = createGroup(entity); + // TODO: make sure timestamp be in range of this.startTime to this.endTime in outer side + // guard the time range to avoid to accumulate entities whose timestamp is bigger than endTime + if (entity.getTimestamp() >= this.endTime || entity.getTimestamp() < this.startTime) { + if (LOG.isDebugEnabled()) { + LOG.debug("Ignore in-coming entity whose timestamp > endTime or < startTime, timestamp: " + entity.getTimestamp() + ", startTime:" + startTime + ", endTime:" + endTime); + } + this.ignoredEntityCounter++; + return; + } + // time series bucket index + long located = (entity.getTimestamp() - startTime) / intervalms; + groupbyFieldValues.add(String.valueOf(located)); + List preAggregatedValues = createPreAggregatedValues(entity); + bucket.addDatapoint(groupbyFieldValues, preAggregatedValues); + } + + public Map, List> result() { + if (this.ignoredEntityCounter > 0) { + LOG.warn("Ignored " + this.ignoredEntityCounter + " entities for reason: timestamp > " + this.endTime + " or < " + this.startTime); + } + return bucket.result(); + } -// @Deprecated -// public static void validateTimeRange(long startTime, long endTime, long intervalms){ -// if(startTime >= endTime || intervalms <= 0){ -// throw new IllegalArgumentException("invalid argument, startTime should be less than endTime and interval must be greater than 0, starTime is " + startTime + " and endTime is " + endTime + ", interval is " + intervalms); -// } -// if((endTime-startTime)/intervalms > DEFAULT_DATAPOINT_MAX_COUNT){ -// throw new IllegalArgumentException("invalid argument, # of datapoints should be less than " + DEFAULT_DATAPOINT_MAX_COUNT + ", current # of datapoints is " + (endTime-startTime)/intervalms); -// } -// } - - public void accumulate(TaggedLogAPIEntity entity) throws Exception{ - List groupbyFieldValues = createGroup(entity); - // TODO: make sure timestamp be in range of this.startTime to this.endTime in outer side - // guard the time range to avoid to accumulate entities whose timestamp is bigger than endTime - if(entity.getTimestamp() >= this.endTime || entity.getTimestamp() < this.startTime){ - if(LOG.isDebugEnabled()) LOG.debug("Ignore in-coming entity whose timestamp > endTime or < startTime, timestamp: " + entity.getTimestamp() + ", startTime:" + startTime + ", endTime:" + endTime); - this.ignoredEntityCounter ++; - return; - } - // time series bucket index - long located =(entity.getTimestamp() - startTime)/intervalms; - groupbyFieldValues.add(String.valueOf(located)); - List preAggregatedValues = createPreAggregatedValues(entity); - bucket.addDatapoint(groupbyFieldValues, preAggregatedValues); - } - - public Map, List> result(){ - if(this.ignoredEntityCounter > 0) - LOG.warn("Ignored "+this.ignoredEntityCounter+" entities for reason: timestamp > "+this.endTime+" or < "+this.startTime); - return bucket.result(); - } + /** + * Support new aggregate result. + * + * @return + */ + @Override + public List getGroupbyKeyValues() { + if (this.ignoredEntityCounter > 0) { + LOG.warn("Ignored " + this.ignoredEntityCounter + " entities for reason: timestamp > " + this.endTime + " or < " + this.startTime); + } + return bucket.getGroupbyKeyValue(); + } - /** - * Support new aggregate result - * - * @return - */ - @Override - public List getGroupbyKeyValues(){ - if(this.ignoredEntityCounter > 0) - LOG.warn("Ignored "+this.ignoredEntityCounter+" entities for reason: timestamp > "+this.endTime+" or < "+this.startTime); - return bucket.getGroupbyKeyValue(); - } - - public Map, List> getMetric(){ - // groupbyfields+timeseriesbucket --> aggregatedvalues for different function - Map, List> result = bucket.result(); -// Map, List> timeseriesDatapoints = new HashMap, List>(); -// /** -// * bug fix: startTime is inclusive and endTime is exclusive -// */ -//// int numDatapoints =(int)((endTime-startTime)/intervalms + 1); -// int numDatapoints =(int)((endTime-1-startTime)/intervalms + 1); -// for(Map.Entry, List> entry : result.entrySet()){ -// // get groups -// List groupbyFields = entry.getKey(); -// List copy = new ArrayList(groupbyFields); -// String strTimeseriesIndex = copy.remove(copy.size()-1); -// List functionValues = timeseriesDatapoints.get(copy); -// if(functionValues == null){ -// functionValues = new ArrayList(); -// timeseriesDatapoints.put(copy, functionValues); -// for(int i=0; i, List> getMetric() { + // groupbyfields+timeseriesbucket --> aggregatedvalues for different function + Map, List> result = bucket.result(); + // Map, List> timeseriesDatapoints = new HashMap, List>(); + // /** + // * bug fix: startTime is inclusive and endTime is exclusive + // */ + // // int numDatapoints =(int)((endTime-startTime)/intervalms + 1); + // int numDatapoints =(int)((endTime-1-startTime)/intervalms + 1); + // for(Map.Entry, List> entry : result.entrySet()){ + // List groupbyFields = entry.getKey(); + // List copy = new ArrayList(groupbyFields); + // String strTimeseriesIndex = copy.remove(copy.size()-1); + // List functionValues = timeseriesDatapoints.get(copy); + // if(functionValues == null){ + // functionValues = new ArrayList(); + // timeseriesDatapoints.put(copy, functionValues); + // for(int i=0; i, List> toMetric(Map, List> result,int numDatapoints,int numFunctions){ - Map, List> timeseriesDatapoints = new HashMap, List>(); - /** - * bug fix: startTime is inclusive and endTime is exclusive - */ -// int numDatapoints =(int)((endTime-startTime)/intervalms + 1); -// int numDatapoints =(int)((endTime-1-startTime)/intervalms + 1); - for(Map.Entry, List> entry : result.entrySet()){ - // get groups - List groupbyFields = entry.getKey(); - List copy = new ArrayList(groupbyFields); - String strTimeseriesIndex = copy.remove(copy.size()-1); - List functionValues = timeseriesDatapoints.get(copy); - if(functionValues == null){ - functionValues = new ArrayList(); - timeseriesDatapoints.put(copy, functionValues); - for(int i=0; i, List> toMetric(Map, List> result, int numDatapoints, int numFunctions) { + Map, List> timeseriesDatapoints = new HashMap, List>(); + /** + * bug fix: startTime is inclusive and endTime is exclusive + */ + // int numDatapoints =(int)((endTime-startTime)/intervalms + 1); + // int numDatapoints =(int)((endTime-1-startTime)/intervalms + 1); + for (Map.Entry, List> entry : result.entrySet()) { + // get groups + List groupbyFields = entry.getKey(); + List copy = new ArrayList(groupbyFields); + String strTimeseriesIndex = copy.remove(copy.size() - 1); + List functionValues = timeseriesDatapoints.get(copy); + if (functionValues == null) { + functionValues = new ArrayList(); + timeseriesDatapoints.put(copy, functionValues); + for (int i = 0; i < numFunctions; i++) { + functionValues.add(new double[numDatapoints]); + } + } + int timeseriesIndex = Integer.valueOf(strTimeseriesIndex); + int functionIndex = 0; + for (double[] values : functionValues) { + values[timeseriesIndex] = entry.getValue().get(functionIndex); + functionIndex++; + } + } + return timeseriesDatapoints; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesBucket.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesBucket.java index d662658f52..0e0b72c782 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesBucket.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesBucket.java @@ -16,61 +16,63 @@ */ package org.apache.eagle.query.aggregate.timeseries; -import java.util.ArrayList; -import java.util.List; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.ArrayList; +import java.util.List; + /** - * only numeric aggregation is supported and number type supported is double + * only numeric aggregation is supported and number type supported is double. */ public class TimeSeriesBucket { - private final static Logger LOG = LoggerFactory.getLogger(TimeSeriesBucket.class); - private long startTime; - private long endTime; - private long interval; - - // map of aggregation function to aggregated values - List aggregatedValues = new ArrayList(); - - // align from the startTime - /** - * - * @param startTime milliseconds - * @param endTime milliseconds - * @param intervalMillseconds - * @param aggFunctions - */ - public TimeSeriesBucket(long startTime, long endTime, long intervalms, int numAggFunctions){ - int count =(int)((endTime-startTime)/intervalms); - for(int i=0; i values){ - // locate timeseries bucket - if(timestamp < startTime || timestamp > endTime){ - LOG.warn("timestampendTime, ignore this datapoint." + timestamp + "," + startTime + ":" + endTime); - return; - } - int located =(int)((timestamp - startTime)/interval); - int index = 0; - for(Double src : values){ - double[] timeSeriesValues = aggregatedValues.get(index); - timeSeriesValues[located] += src; - index++; - } - } - - public List aggregatedValues(){ - return this.aggregatedValues; - } + private static final Logger LOG = LoggerFactory.getLogger(TimeSeriesBucket.class); + private long startTime; + private long endTime; + private long interval; + + // map of aggregation function to aggregated values + List aggregatedValues = new ArrayList(); + + // align from the startTime + + /** + * constructor. + * @param startTime milliseconds + * @param endTime milliseconds + * @param intervalMillseconds + * @param aggFunctions + */ + public TimeSeriesBucket(long startTime, long endTime, long intervalms, int numAggFunctions) { + int count = (int) ((endTime - startTime) / intervalms); + for (int i = 0; i < numAggFunctions; i++) { + aggregatedValues.add(new double[count]); + } + } + + /** + * add datapoint which has a list of values for different aggregate functions + * for example, sum(numHosts), count(*), avg(timespan) etc. + * + * @param timestamp + * @param values + */ + public void addDataPoint(long timestamp, List values) { + // locate timeseries bucket + if (timestamp < startTime || timestamp > endTime) { + LOG.warn("timestampendTime, ignore this datapoint." + timestamp + "," + startTime + ":" + endTime); + return; + } + int located = (int) ((timestamp - startTime) / interval); + int index = 0; + for (Double src : values) { + double[] timeSeriesValues = aggregatedValues.get(index); + timeSeriesValues[located] += src; + index++; + } + } + + public List aggregatedValues() { + return this.aggregatedValues; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesPostFlatAggregateSort.java b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesPostFlatAggregateSort.java index c0a6e062d0..92c8ceee6e 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesPostFlatAggregateSort.java +++ b/eagle-core/eagle-query/eagle-query-base/src/main/java/org/apache/eagle/query/aggregate/timeseries/TimeSeriesPostFlatAggregateSort.java @@ -17,135 +17,131 @@ package org.apache.eagle.query.aggregate.timeseries; import java.io.Serializable; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.SortedSet; -import java.util.TreeSet; +import java.util.*; public class TimeSeriesPostFlatAggregateSort { - // private static final Logger logger = - // LoggerFactory.getLogger(PostFlatAggregateSort.class); - - private static SortedSet, List>> sortByValue( - Map, List> mapForSort, - List sortOptions) { - SortedSet, List>> sortedEntries = new TreeSet, List>>( - new MapEntryComparator(sortOptions)); - sortedEntries.addAll(mapForSort.entrySet()); - return sortedEntries; - } - - /** - * sort aggregated results with sort options - * - * @param entity - */ - public static List, List>> sort( - Map, List> mapForSort, - Map, List> valueMap, - List sortOptions, int topN) { - - processIndex(sortOptions); - List, List>> result = new ArrayList, List>>(); - SortedSet, List>> sortedSet = sortByValue( - mapForSort, sortOptions); - for (Map.Entry, List> entry : sortedSet) { - List key = entry.getKey(); - List value = valueMap.get(key); - if (value != null) { - Map.Entry, List> newEntry = new ImmutableEntry, List>(key, value); - result.add(newEntry); - if (topN > 0 && result.size() >= topN) { - break; - } - } - } - return result; - } - - private static void processIndex(List sortOptions) { - for (int i = 0; i < sortOptions.size(); ++i) { - SortOption so = sortOptions.get(i); - so.setIndex(i); - } - } - - private static class MapEntryComparator implements - Comparator, List>> { - private List sortOptions; - - public MapEntryComparator(List sortOptions) { - this.sortOptions = sortOptions; - } - - /** - * default to sort by all groupby fields - */ - @Override - public int compare(Map.Entry, List> e1, - Map.Entry, List> e2) { - int r = 0; - List keyList1 = e1.getKey(); - List valueList1 = e1.getValue(); - List keyList2 = e2.getKey(); - List valueList2 = e2.getValue(); - for (SortOption so : sortOptions) { - int index = so.getIndex(); - if (index == -1) { - continue; - } - if (!so.isInGroupby()) { // sort fields come from functions - Double value1 = valueList1.get(index); - Double value2 = valueList2.get(index); - r = value1.compareTo(value2); - } else { // sort fields come from groupby fields - String key1 = keyList1.get(index); - String key2 = keyList2.get(index); - r = key1.compareTo(key2); - } - if (r == 0) - continue; - if (!so.isAscendant()) { - r = -r; - } - return r; - } - // default to sort by groupby fields ascendently - if (r == 0) { // TODO is this check necessary - return new GroupbyFieldsComparator() - .compare(keyList1, keyList2); - } - return r; - } - } - - static class ImmutableEntry implements Map.Entry, Serializable { - private final K key; - private final V value; - - ImmutableEntry(K key, V value) { - this.key = key; - this.value = value; - } - - @Override - public K getKey() { - return key; - } - - @Override - public V getValue() { - return value; - } - - @Override - public final V setValue(V value) { - throw new UnsupportedOperationException(); - } - - private static final long serialVersionUID = 0; - } + // private static final Logger logger = + // LoggerFactory.getLogger(PostFlatAggregateSort.class); + + private static SortedSet, List>> sortByValue( + Map, List> mapForSort, + List sortOptions) { + SortedSet, List>> sortedEntries = new TreeSet, List>>( + new MapEntryComparator(sortOptions)); + sortedEntries.addAll(mapForSort.entrySet()); + return sortedEntries; + } + + /** + * sort aggregated results with sort options. + * + * @param entity + */ + public static List, List>> sort( + Map, List> mapForSort, + Map, List> valueMap, + List sortOptions, int topN) { + + processIndex(sortOptions); + List, List>> result = new ArrayList, List>>(); + SortedSet, List>> sortedSet = sortByValue( + mapForSort, sortOptions); + for (Map.Entry, List> entry : sortedSet) { + List key = entry.getKey(); + List value = valueMap.get(key); + if (value != null) { + Map.Entry, List> newEntry = new ImmutableEntry, List>(key, value); + result.add(newEntry); + if (topN > 0 && result.size() >= topN) { + break; + } + } + } + return result; + } + + private static void processIndex(List sortOptions) { + for (int i = 0; i < sortOptions.size(); ++i) { + SortOption so = sortOptions.get(i); + so.setIndex(i); + } + } + + private static class MapEntryComparator implements + Comparator, List>> { + private List sortOptions; + + public MapEntryComparator(List sortOptions) { + this.sortOptions = sortOptions; + } + + /** + * default to sort by all groupby fields. + */ + @Override + public int compare(Map.Entry, List> e1, + Map.Entry, List> e2) { + int r = 0; + List keyList1 = e1.getKey(); + List valueList1 = e1.getValue(); + List keyList2 = e2.getKey(); + List valueList2 = e2.getValue(); + for (SortOption so : sortOptions) { + int index = so.getIndex(); + if (index == -1) { + continue; + } + if (!so.isInGroupby()) { // sort fields come from functions + Double value1 = valueList1.get(index); + Double value2 = valueList2.get(index); + r = value1.compareTo(value2); + } else { // sort fields come from groupby fields + String key1 = keyList1.get(index); + String key2 = keyList2.get(index); + r = key1.compareTo(key2); + } + if (r == 0) { + continue; + } + if (!so.isAscendant()) { + r = -r; + } + return r; + } + // default to sort by groupby fields ascendently + if (r == 0) { // TODO is this check necessary + return new GroupbyFieldsComparator() + .compare(keyList1, keyList2); + } + return r; + } + } + + static class ImmutableEntry implements Map.Entry, Serializable { + private final K key; + private final V value; + + ImmutableEntry(K key, V value) { + this.key = key; + this.value = value; + } + + @Override + public K getKey() { + return key; + } + + @Override + public V getValue() { + return value; + } + + @Override + public final V setValue(V value) { + throw new UnsupportedOperationException(); + } + + private static final long serialVersionUID = 0; + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/TestHBaseLogReader2.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/TestHBaseLogReader2.java index e3db5c0730..bf05bec4a8 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/TestHBaseLogReader2.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/TestHBaseLogReader2.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.eagle.query;; +package org.apache.eagle.query; import org.apache.eagle.common.ByteUtil; import org.apache.eagle.common.DateTimeUtil; @@ -24,7 +24,6 @@ import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; import org.apache.eagle.log.entity.test.TestTimeSeriesAPIEntity; -import org.apache.eagle.query.ListQueryCompiler; import org.apache.eagle.service.hbase.EmbeddedHbase; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; @@ -32,129 +31,129 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; +; + public class TestHBaseLogReader2 { - private final static Logger LOG = LoggerFactory.getLogger(TestHBaseLogReader2.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseLogReader2.class); private static EmbeddedHbase hbase = EmbeddedHbase.getInstance(); - - @SuppressWarnings("serial") - @Test - public void testStartTimeInclusiveEndTimeExclusive() throws Exception { - EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestTimeSeriesAPIEntity.class); - hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily()); - - EntityDefinitionManager.registerEntity(TestTimeSeriesAPIEntity.class); - - final String cluster = "cluster1"; - final String datacenter = "dc1"; - String serviceName = "TestTimeSeriesAPIEntity"; - GenericEntityWriter writer = new GenericEntityWriter(serviceName); - List entities = new ArrayList(); - TestTimeSeriesAPIEntity entity = new TestTimeSeriesAPIEntity(); - long timestamp1 = DateTimeUtil.humanDateToSeconds("2014-04-08 03:00:00")*1000; - LOG.info("First entity timestamp:" + timestamp1); - entity.setTimestamp(timestamp1); - entity.setTags(new HashMap(){{ - put("cluster", cluster); - put("datacenter", datacenter); - }}); - entity.setField7("field7"); - entities.add(entity); - - entity = new TestTimeSeriesAPIEntity(); - long timestamp2 = DateTimeUtil.humanDateToSeconds("2014-05-08 04:00:00")*1000; - LOG.info("Second entity timestamp:" + timestamp2); - entity.setTimestamp(timestamp2); - entity.setTags(new HashMap(){{ - put("cluster", cluster); - put("datacenter", datacenter); - }}); - entity.setField7("field7_2"); - entities.add(entity); - writer.write(entities); - - // for timezone difference between UTC & localtime, enlarge the search range - long queryStartTimestamp = timestamp1-24*60*60*1000; - long queryEndTimestamp = timestamp1+24*60*60*1000; - LOG.info("Query start timestamp:" + queryStartTimestamp); - LOG.info("Query end timestamp:" + queryEndTimestamp); - - String format = "%s[@cluster=\"%s\" AND @datacenter=\"%s\"]{%s}"; - String query = String.format(format, serviceName, cluster, datacenter, "@field7"); - ListQueryCompiler comp = new ListQueryCompiler(query); - SearchCondition condition = new SearchCondition(); - condition.setFilter(comp.filter()); - condition.setQueryExpression(comp.getQueryExpression()); - condition.setOutputFields(comp.outputFields()); - - final List partitionValues = comp.getQueryPartitionValues(); - if (partitionValues != null) { - condition.setPartitionValues(Arrays.asList(partitionValues.get(0))); - } - - condition.setStartRowkey(null); - condition.setPageSize(Integer.MAX_VALUE); - condition.setStartTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(0)); - condition.setEndTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(queryEndTimestamp)); - - GenericEntityBatchReader reader = new GenericEntityBatchReader(serviceName, condition); - List list = reader.read(); - - Assert.assertEquals(1, list.size()); - Assert.assertEquals(timestamp1, list.get(0).getTimestamp()); - Assert.assertEquals("field7", list.get(0).getField7()); - - // for timezone difference between UTC & localtime, enlarge the search range - queryStartTimestamp = timestamp1-24*60*60*1000; - queryEndTimestamp = timestamp2+24*60*60*1000; // eagle timestamp is rounded to seconds - condition.setStartTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(queryStartTimestamp)); - condition.setEndTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(queryEndTimestamp)); - reader = new GenericEntityBatchReader(serviceName, condition); - list = reader.read(); - Assert.assertEquals(2, list.size()); - - queryStartTimestamp = timestamp1; - queryEndTimestamp = timestamp1; // eagle timestamp is rounded to seconds - condition.setStartTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(queryStartTimestamp)); - condition.setEndTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(queryEndTimestamp)); - reader = new GenericEntityBatchReader(serviceName, condition); - list = reader.read(); - Assert.assertEquals(0, list.size()); - hbase.deleteTable(entityDefinition.getTable()); - - } - - @Test - public void testByteComparison(){ - byte[] byte1 = new byte[]{-23, 12, 63}; - byte[] byte2 = ByteUtil.concat(byte1, new byte[]{0}); - Assert.assertTrue(Bytes.compareTo(byte1, byte2) < 0); - byte[] byte3 = ByteUtil.concat(byte1, new byte[]{127}); - Assert.assertTrue(Bytes.compareTo(byte2, byte3) < 0); - byte[] byte4 = ByteUtil.concat(byte1, new byte[]{-128}); - Assert.assertTrue(Bytes.compareTo(byte4, byte3) > 0); - } - - @Test - public void testMaxByteInBytesComparision(){ - int max = -1000000; + + @SuppressWarnings("serial") + @Test + public void testStartTimeInclusiveEndTimeExclusive() throws Exception { + EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestTimeSeriesAPIEntity.class); + hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily()); + + EntityDefinitionManager.registerEntity(TestTimeSeriesAPIEntity.class); + + final String cluster = "cluster1"; + final String datacenter = "dc1"; + String serviceName = "TestTimeSeriesAPIEntity"; + GenericEntityWriter writer = new GenericEntityWriter(serviceName); + List entities = new ArrayList(); + TestTimeSeriesAPIEntity entity = new TestTimeSeriesAPIEntity(); + long timestamp1 = DateTimeUtil.humanDateToSeconds("2014-04-08 03:00:00") * 1000; + LOG.info("First entity timestamp:" + timestamp1); + entity.setTimestamp(timestamp1); + entity.setTags(new HashMap() {{ + put("cluster", cluster); + put("datacenter", datacenter); + }}); + entity.setField7("field7"); + entities.add(entity); + + entity = new TestTimeSeriesAPIEntity(); + long timestamp2 = DateTimeUtil.humanDateToSeconds("2014-05-08 04:00:00") * 1000; + LOG.info("Second entity timestamp:" + timestamp2); + entity.setTimestamp(timestamp2); + entity.setTags(new HashMap() {{ + put("cluster", cluster); + put("datacenter", datacenter); + }}); + entity.setField7("field7_2"); + entities.add(entity); + writer.write(entities); + + // for timezone difference between UTC & localtime, enlarge the search range + long queryStartTimestamp = timestamp1 - 24 * 60 * 60 * 1000; + long queryEndTimestamp = timestamp1 + 24 * 60 * 60 * 1000; + LOG.info("Query start timestamp:" + queryStartTimestamp); + LOG.info("Query end timestamp:" + queryEndTimestamp); + + String format = "%s[@cluster=\"%s\" AND @datacenter=\"%s\"]{%s}"; + String query = String.format(format, serviceName, cluster, datacenter, "@field7"); + ListQueryCompiler comp = new ListQueryCompiler(query); + SearchCondition condition = new SearchCondition(); + condition.setFilter(comp.filter()); + condition.setQueryExpression(comp.getQueryExpression()); + condition.setOutputFields(comp.outputFields()); + + final List partitionValues = comp.getQueryPartitionValues(); + if (partitionValues != null) { + condition.setPartitionValues(Arrays.asList(partitionValues.get(0))); + } + + condition.setStartRowkey(null); + condition.setPageSize(Integer.MAX_VALUE); + condition.setStartTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(0)); + condition.setEndTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(queryEndTimestamp)); + + GenericEntityBatchReader reader = new GenericEntityBatchReader(serviceName, condition); + List list = reader.read(); + + Assert.assertEquals(1, list.size()); + Assert.assertEquals(timestamp1, list.get(0).getTimestamp()); + Assert.assertEquals("field7", list.get(0).getField7()); + + // for timezone difference between UTC & localtime, enlarge the search range + queryStartTimestamp = timestamp1 - 24 * 60 * 60 * 1000; + queryEndTimestamp = timestamp2 + 24 * 60 * 60 * 1000; // eagle timestamp is rounded to seconds + condition.setStartTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(queryStartTimestamp)); + condition.setEndTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(queryEndTimestamp)); + reader = new GenericEntityBatchReader(serviceName, condition); + list = reader.read(); + Assert.assertEquals(2, list.size()); + + queryStartTimestamp = timestamp1; + queryEndTimestamp = timestamp1; // eagle timestamp is rounded to seconds + condition.setStartTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(queryStartTimestamp)); + condition.setEndTime(DateTimeUtil.millisecondsToHumanDateWithSeconds(queryEndTimestamp)); + reader = new GenericEntityBatchReader(serviceName, condition); + list = reader.read(); + Assert.assertEquals(0, list.size()); + hbase.deleteTable(entityDefinition.getTable()); + + } + + @Test + public void testByteComparison() { + byte[] byte1 = new byte[] {-23, 12, 63}; + byte[] byte2 = ByteUtil.concat(byte1, new byte[] {0}); + Assert.assertTrue(Bytes.compareTo(byte1, byte2) < 0); + byte[] byte3 = ByteUtil.concat(byte1, new byte[] {127}); + Assert.assertTrue(Bytes.compareTo(byte2, byte3) < 0); + byte[] byte4 = ByteUtil.concat(byte1, new byte[] {-128}); + Assert.assertTrue(Bytes.compareTo(byte4, byte3) > 0); + } + + @Test + public void testMaxByteInBytesComparision() { + int max = -1000000; // int maxb = -1000000; - System.out.println("Byte MaxValue: " + Byte.MAX_VALUE); - System.out.println("Byte MaxValue: " + Byte.MIN_VALUE); - for(int i=-128; i<128; i++){ - byte b = (byte)i; - int tmp = b & 0xff; - max = Math.max(max, tmp); - } - System.out.println(max); - - byte b = -1; - System.out.println(b & 0xff); - } + System.out.println("Byte MaxValue: " + Byte.MAX_VALUE); + System.out.println("Byte MaxValue: " + Byte.MIN_VALUE); + for (int i = -128; i < 128; i++) { + byte b = (byte) i; + int tmp = b & 0xff; + max = Math.max(max, tmp); + } + System.out.println(max); + + byte b = -1; + System.out.println(b & 0xff); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/TestListQueryCompiler.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/TestListQueryCompiler.java index 341c97650a..e3a84d2dba 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/TestListQueryCompiler.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/TestListQueryCompiler.java @@ -15,16 +15,15 @@ * limitations under the License. */ /** - * + * */ package org.apache.eagle.query; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; import org.apache.eagle.log.entity.test.TestLogAPIEntity; -import org.apache.eagle.query.parser.ORExpression; import org.apache.eagle.query.aggregate.AggregateFunctionType; -import org.apache.eagle.query.ListQueryCompiler; +import org.apache.eagle.query.parser.ORExpression; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -38,206 +37,208 @@ */ public class TestListQueryCompiler { - private static final Logger LOG = LoggerFactory.getLogger(TestListQueryCompiler.class); - - @Before - public void prepare() throws Exception{ - String[] partitions = new String[2]; - partitions[0] = "cluster"; - partitions[1] = "datacenter"; - EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); - EntityDefinition entityDef = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity"); - entityDef.setPartitions(partitions); - entityDef.setTimeSeries(true); - } - - /**************************************************************************************************/ - /*********************************** Test Expression In List Query*********************************/ - /**************************************************************************************************/ - - @Test - public void testListQueryWithoutExpression() throws Exception{ - String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND @field5 > 0.05]{@cluster, @field1}"; - ListQueryCompiler compiler = new ListQueryCompiler(query, false); - ORExpression filter = compiler.getQueryExpression(); - Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND @field5>0.05)"); - List aggFields = compiler.aggregateFields(); - Assert.assertTrue(aggFields == null); - List outputFields = compiler.outputFields(); - Assert.assertEquals(outputFields.size(), 2); - Assert.assertTrue(outputFields.contains("cluster")); - Assert.assertTrue(outputFields.contains("field1")); - } - - @Test - public void testListQueryWithExpressionEndWithNumberInFilter() throws Exception{ - String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@field5 + @field6} > 0.05]{@cluster, @field1}"; - ListQueryCompiler compiler = new ListQueryCompiler(query, false); - ORExpression filter = compiler.getQueryExpression(); - Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND field5 + field6>0.05)"); - List aggFields = compiler.aggregateFields(); - Assert.assertTrue(aggFields == null); - List outputFields = compiler.outputFields(); - Assert.assertEquals(outputFields.size(), 2); - Assert.assertTrue(outputFields.contains("cluster")); - Assert.assertTrue(outputFields.contains("field1")); - } - - @Test - public void testListQueryWithExpressionEndWithRPARENInFilter() throws Exception{ - String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND (EXP{@field5 + @field6} > 0.05)]{@cluster, @field1}"; - ListQueryCompiler compiler = new ListQueryCompiler(query, false); - ORExpression filter = compiler.getQueryExpression(); - LOG.info(filter.toString()); - Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND field5 + field6>0.05)"); - List aggFields = compiler.aggregateFields(); - Assert.assertTrue(aggFields == null); - List outputFields = compiler.outputFields(); - Assert.assertEquals(outputFields.size(), 2); - Assert.assertTrue(outputFields.contains("cluster")); - Assert.assertTrue(outputFields.contains("field1")); - - query = "TestLogAPIEntity[(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND (EXP{@field5 + @field6} > 0.05))]{@cluster, @field1}"; - compiler = new ListQueryCompiler(query, false); - filter = compiler.getQueryExpression(); - Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND field5 + field6>0.05)"); - aggFields = compiler.aggregateFields(); - Assert.assertTrue(aggFields == null); - outputFields = compiler.outputFields(); - Assert.assertEquals(outputFields.size(), 2); - Assert.assertTrue(outputFields.contains("cluster")); - Assert.assertTrue(outputFields.contains("field1")); - } - - @Test - public void testListQueryWithExpressionEndWithRBRACEInFilter() throws Exception{ - String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@a + @b} > EXP{0.05 + @c + @d}]{@cluster, EXP{@a + @b}}"; - ListQueryCompiler compiler = new ListQueryCompiler(query, false); - ORExpression filter = compiler.getQueryExpression(); - Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND a + b>0.05 + c + d)"); - List aggFields = compiler.aggregateFields(); - Assert.assertTrue(aggFields == null); - List outputFields = compiler.outputFields(); + private static final Logger LOG = LoggerFactory.getLogger(TestListQueryCompiler.class); + + @Before + public void prepare() throws Exception { + String[] partitions = new String[2]; + partitions[0] = "cluster"; + partitions[1] = "datacenter"; + EntityDefinitionManager.registerEntity(TestLogAPIEntity.class); + EntityDefinition entityDef = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity"); + entityDef.setPartitions(partitions); + entityDef.setTimeSeries(true); + } + + /**************************************************************************************************/ + /*********************************** Test Expression In List Query*********************************/ + /**************************************************************************************************/ + + @Test + public void testListQueryWithoutExpression() throws Exception { + String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND @field5 > 0.05]{@cluster, @field1}"; + ListQueryCompiler compiler = new ListQueryCompiler(query, false); + ORExpression filter = compiler.getQueryExpression(); + Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND @field5>0.05)"); + List aggFields = compiler.aggregateFields(); + Assert.assertTrue(aggFields == null); + List outputFields = compiler.outputFields(); + Assert.assertEquals(outputFields.size(), 2); + Assert.assertTrue(outputFields.contains("cluster")); + Assert.assertTrue(outputFields.contains("field1")); + } + + @Test + public void testListQueryWithExpressionEndWithNumberInFilter() throws Exception { + String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@field5 + @field6} > 0.05]{@cluster, @field1}"; + ListQueryCompiler compiler = new ListQueryCompiler(query, false); + ORExpression filter = compiler.getQueryExpression(); + Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND field5 + field6>0.05)"); + List aggFields = compiler.aggregateFields(); + Assert.assertTrue(aggFields == null); + List outputFields = compiler.outputFields(); + Assert.assertEquals(outputFields.size(), 2); + Assert.assertTrue(outputFields.contains("cluster")); + Assert.assertTrue(outputFields.contains("field1")); + } + + @Test + public void testListQueryWithExpressionEndWithRPARENInFilter() throws Exception { + String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND (EXP{@field5 + @field6} > 0.05)]{@cluster, @field1}"; + ListQueryCompiler compiler = new ListQueryCompiler(query, false); + ORExpression filter = compiler.getQueryExpression(); + LOG.info(filter.toString()); + Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND field5 + field6>0.05)"); + List aggFields = compiler.aggregateFields(); + Assert.assertTrue(aggFields == null); + List outputFields = compiler.outputFields(); + Assert.assertEquals(outputFields.size(), 2); + Assert.assertTrue(outputFields.contains("cluster")); + Assert.assertTrue(outputFields.contains("field1")); + + query = "TestLogAPIEntity[(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND (EXP{@field5 + @field6} > 0.05))]{@cluster, @field1}"; + compiler = new ListQueryCompiler(query, false); + filter = compiler.getQueryExpression(); + Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND field5 + field6>0.05)"); + aggFields = compiler.aggregateFields(); + Assert.assertTrue(aggFields == null); + outputFields = compiler.outputFields(); + Assert.assertEquals(outputFields.size(), 2); + Assert.assertTrue(outputFields.contains("cluster")); + Assert.assertTrue(outputFields.contains("field1")); + } + + @Test + public void testListQueryWithExpressionEndWithRBRACEInFilter() throws Exception { + String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@a + @b} > EXP{0.05 + @c + @d}]{@cluster, EXP{@a + @b}}"; + ListQueryCompiler compiler = new ListQueryCompiler(query, false); + ORExpression filter = compiler.getQueryExpression(); + Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND a + b>0.05 + c + d)"); + List aggFields = compiler.aggregateFields(); + Assert.assertTrue(aggFields == null); + List outputFields = compiler.outputFields(); // Assert.assertEquals(outputFields.size(), 2); - Assert.assertTrue(outputFields.contains("cluster")); - Assert.assertTrue(outputFields.contains("EXP{a + b}")); - } - - /**************************************************************************************************/ - /*********************************** Test Expression In Group By Query*********************************/ - /**************************************************************************************************/ - - @Test - public void testGroupByQueryAggWithoutExpressionInAggFunc() throws Exception{ - String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@a + @b} > EXP{@c + @d} AND EXP{@a + @c} < EXP{@b + @d + 0.05}]<@cluster, @datacenter>{sum(@a), avg(@b)}"; - ListQueryCompiler compiler = new ListQueryCompiler(query, false); - ORExpression filter = compiler.getQueryExpression(); - LOG.info(filter.toString()); - Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND a + b>c + d AND a + c groupByFields = compiler.groupbyFields(); - Assert.assertEquals(groupByFields.size(), 2); - Assert.assertTrue(groupByFields.contains("cluster")); - Assert.assertTrue(groupByFields.contains("datacenter")); - - List functions = compiler.aggregateFunctionTypes(); - Assert.assertEquals(functions.size(), 2); - Assert.assertTrue(functions.contains(AggregateFunctionType.sum)); - Assert.assertTrue(functions.contains(AggregateFunctionType.avg)); - - List aggFields = compiler.aggregateFields(); - Assert.assertEquals(aggFields.size(), 2); - Assert.assertTrue(aggFields.contains("a")); - Assert.assertTrue(aggFields.contains("b")); - } - - @Test - public void testGroupByQueryAggWithExpressionInAggFunc() throws Exception{ - String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@a + @b} > EXP{@c + @d} AND EXP{@a + @c} < EXP{@b + @d + 0.07}]<@cluster, @datacenter>{sum(EXP{@a+@b+20.0}), avg(EXP{(@a+@c + 2.5)/@d}), count}"; - ListQueryCompiler compiler = new ListQueryCompiler(query, false); - ORExpression filter = compiler.getQueryExpression(); - LOG.info(filter.toString()); - Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND a + b>c + d AND a + c groupByFields = compiler.groupbyFields(); - Assert.assertEquals(groupByFields.size(), 2); - Assert.assertTrue(groupByFields.contains("cluster")); - Assert.assertTrue(groupByFields.contains("datacenter")); - - List functions = compiler.aggregateFunctionTypes(); - Assert.assertEquals(functions.size(), 3); - Assert.assertTrue(functions.contains(AggregateFunctionType.sum)); - Assert.assertTrue(functions.contains(AggregateFunctionType.avg)); - Assert.assertTrue(functions.contains(AggregateFunctionType.count)); - - List aggFields = compiler.aggregateFields(); - Assert.assertEquals(aggFields.size(), 3); - Assert.assertTrue(aggFields.contains("EXP{a+b+20.0}")); - Assert.assertTrue(aggFields.contains("EXP{(a+c + 2.5)/d}")); - Assert.assertTrue(aggFields.contains("count")); - } - - /**************************************************************************************************/ - /*********************************** Test Expression In Sort Query*********************************/ - /**************************************************************************************************/ - - @Test - public void testSortQueryWithoutExpressionInSort() throws Exception{ - String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@a + @b} > EXP{@c + @d} AND EXP{@a + @c} < EXP{@b + @d}]<@cluster, @datacenter>" - + "{sum(@a), count}.{sum(@a) asc}"; - ListQueryCompiler compiler = new ListQueryCompiler(query, false); - ORExpression filter = compiler.getQueryExpression(); - LOG.info(filter.toString()); - Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND a + b>c + d AND a + c groupByFields = compiler.groupbyFields(); - Assert.assertEquals(groupByFields.size(), 2); - Assert.assertTrue(groupByFields.contains("cluster")); - Assert.assertTrue(groupByFields.contains("datacenter")); - - List functions = compiler.aggregateFunctionTypes(); - Assert.assertEquals(functions.size(), 2); - Assert.assertTrue(functions.contains(AggregateFunctionType.sum)); - Assert.assertTrue(functions.contains(AggregateFunctionType.count)); - - List aggFields = compiler.aggregateFields(); - Assert.assertEquals(aggFields.size(), 2); - Assert.assertTrue(aggFields.contains("a")); - Assert.assertTrue(aggFields.contains("count")); - - List sortFields = compiler.sortFields(); - Assert.assertEquals(sortFields.size(), 1); - Assert.assertTrue(sortFields.contains("a")); - } - - @Test - public void testSortQuerySortWithExpressionInSort() throws Exception{ - String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@a + @b} > EXP{@c + @d} AND EXP{@a + @c} < EXP{@b + @d + 0.05}]<@cluster, @datacenter>" - + "{sum(EXP{@a+@b+0.07}), max(EXP{(@a+@c)/@d}), min(EXP{@a+@b})}.{sum(EXP{@a+@b+0.07}) asc}"; - ListQueryCompiler compiler = new ListQueryCompiler(query, false); - ORExpression filter = compiler.getQueryExpression(); - LOG.info(filter.toString()); - Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND a + b>c + d AND a + c groupByFields = compiler.groupbyFields(); - Assert.assertEquals(groupByFields.size(), 2); - Assert.assertTrue(groupByFields.contains("cluster")); - Assert.assertTrue(groupByFields.contains("datacenter")); - - List aggFields = compiler.aggregateFields(); - Assert.assertEquals(aggFields.size(), 3); - Assert.assertTrue(aggFields.contains("EXP{a+b+0.07}")); - Assert.assertTrue(aggFields.contains("EXP{(a+c)/d}")); - Assert.assertTrue(aggFields.contains("EXP{a+b}")); - - List functions = compiler.aggregateFunctionTypes(); - Assert.assertEquals(functions.size(), 3); - Assert.assertTrue(functions.contains(AggregateFunctionType.sum)); - Assert.assertTrue(functions.contains(AggregateFunctionType.max)); - Assert.assertTrue(functions.contains(AggregateFunctionType.min)); - - List sortFields = compiler.sortFields(); - Assert.assertEquals(sortFields.size(), 1); - Assert.assertTrue(sortFields.contains("EXP{a+b+0.07}")); - } + Assert.assertTrue(outputFields.contains("cluster")); + Assert.assertTrue(outputFields.contains("EXP{a + b}")); + } + + /**************************************************************************************************/ + /*********************************** Test Expression In Group By Query*********************************/ + /**************************************************************************************************/ + + @Test + public void testGroupByQueryAggWithoutExpressionInAggFunc() throws Exception { + String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@a + @b} > EXP{@c + @d} AND EXP{@a + @c} < EXP{@b + @d + 0.05}]<@cluster, @datacenter>{sum(@a), " + + "avg(@b)}"; + ListQueryCompiler compiler = new ListQueryCompiler(query, false); + ORExpression filter = compiler.getQueryExpression(); + LOG.info(filter.toString()); + Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND a + b>c + d AND a + c groupByFields = compiler.groupbyFields(); + Assert.assertEquals(groupByFields.size(), 2); + Assert.assertTrue(groupByFields.contains("cluster")); + Assert.assertTrue(groupByFields.contains("datacenter")); + + List functions = compiler.aggregateFunctionTypes(); + Assert.assertEquals(functions.size(), 2); + Assert.assertTrue(functions.contains(AggregateFunctionType.sum)); + Assert.assertTrue(functions.contains(AggregateFunctionType.avg)); + + List aggFields = compiler.aggregateFields(); + Assert.assertEquals(aggFields.size(), 2); + Assert.assertTrue(aggFields.contains("a")); + Assert.assertTrue(aggFields.contains("b")); + } + + @Test + public void testGroupByQueryAggWithExpressionInAggFunc() throws Exception { + String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@a + @b} > EXP{@c + @d} AND EXP{@a + @c} < EXP{@b + @d + 0.07}]<@cluster, @datacenter>{sum" + + "(EXP{@a+@b+20.0}), avg(EXP{(@a+@c + 2.5)/@d}), count}"; + ListQueryCompiler compiler = new ListQueryCompiler(query, false); + ORExpression filter = compiler.getQueryExpression(); + LOG.info(filter.toString()); + Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND a + b>c + d AND a + c groupByFields = compiler.groupbyFields(); + Assert.assertEquals(groupByFields.size(), 2); + Assert.assertTrue(groupByFields.contains("cluster")); + Assert.assertTrue(groupByFields.contains("datacenter")); + + List functions = compiler.aggregateFunctionTypes(); + Assert.assertEquals(functions.size(), 3); + Assert.assertTrue(functions.contains(AggregateFunctionType.sum)); + Assert.assertTrue(functions.contains(AggregateFunctionType.avg)); + Assert.assertTrue(functions.contains(AggregateFunctionType.count)); + + List aggFields = compiler.aggregateFields(); + Assert.assertEquals(aggFields.size(), 3); + Assert.assertTrue(aggFields.contains("EXP{a+b+20.0}")); + Assert.assertTrue(aggFields.contains("EXP{(a+c + 2.5)/d}")); + Assert.assertTrue(aggFields.contains("count")); + } + + /**************************************************************************************************/ + /*********************************** Test Expression In Sort Query*********************************/ + /**************************************************************************************************/ + + @Test + public void testSortQueryWithoutExpressionInSort() throws Exception { + String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@a + @b} > EXP{@c + @d} AND EXP{@a + @c} < EXP{@b + @d}]<@cluster, @datacenter>" + + "{sum(@a), count}.{sum(@a) asc}"; + ListQueryCompiler compiler = new ListQueryCompiler(query, false); + ORExpression filter = compiler.getQueryExpression(); + LOG.info(filter.toString()); + Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND a + b>c + d AND a + c groupByFields = compiler.groupbyFields(); + Assert.assertEquals(groupByFields.size(), 2); + Assert.assertTrue(groupByFields.contains("cluster")); + Assert.assertTrue(groupByFields.contains("datacenter")); + + List functions = compiler.aggregateFunctionTypes(); + Assert.assertEquals(functions.size(), 2); + Assert.assertTrue(functions.contains(AggregateFunctionType.sum)); + Assert.assertTrue(functions.contains(AggregateFunctionType.count)); + + List aggFields = compiler.aggregateFields(); + Assert.assertEquals(aggFields.size(), 2); + Assert.assertTrue(aggFields.contains("a")); + Assert.assertTrue(aggFields.contains("count")); + + List sortFields = compiler.sortFields(); + Assert.assertEquals(sortFields.size(), 1); + Assert.assertTrue(sortFields.contains("a")); + } + + @Test + public void testSortQuerySortWithExpressionInSort() throws Exception { + String query = "TestLogAPIEntity[@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND EXP{@a + @b} > EXP{@c + @d} AND EXP{@a + @c} < EXP{@b + @d + 0.05}]<@cluster, @datacenter>" + + "{sum(EXP{@a+@b+0.07}), max(EXP{(@a+@c)/@d}), min(EXP{@a+@b})}.{sum(EXP{@a+@b+0.07}) asc}"; + ListQueryCompiler compiler = new ListQueryCompiler(query, false); + ORExpression filter = compiler.getQueryExpression(); + LOG.info(filter.toString()); + Assert.assertEquals(filter.toString(), "(@cluster=\"cluster\" AND @datacenter=\"datacenter\" AND a + b>c + d AND a + c groupByFields = compiler.groupbyFields(); + Assert.assertEquals(groupByFields.size(), 2); + Assert.assertTrue(groupByFields.contains("cluster")); + Assert.assertTrue(groupByFields.contains("datacenter")); + + List aggFields = compiler.aggregateFields(); + Assert.assertEquals(aggFields.size(), 3); + Assert.assertTrue(aggFields.contains("EXP{a+b+0.07}")); + Assert.assertTrue(aggFields.contains("EXP{(a+c)/d}")); + Assert.assertTrue(aggFields.contains("EXP{a+b}")); + + List functions = compiler.aggregateFunctionTypes(); + Assert.assertEquals(functions.size(), 3); + Assert.assertTrue(functions.contains(AggregateFunctionType.sum)); + Assert.assertTrue(functions.contains(AggregateFunctionType.max)); + Assert.assertTrue(functions.contains(AggregateFunctionType.min)); + + List sortFields = compiler.sortFields(); + Assert.assertEquals(sortFields.size(), 1); + Assert.assertTrue(sortFields.contains("EXP{a+b+0.07}")); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestGroupbyKey.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestGroupbyKey.java index 2683220388..92eee428fa 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestGroupbyKey.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestGroupbyKey.java @@ -20,54 +20,54 @@ import org.junit.Test; public class TestGroupbyKey { - @Test - public void testGroupbyKey(){ - GroupbyKey key1 = new GroupbyKey(); - Assert.assertEquals(0, key1.getValue().size()); - - key1.addValue(new byte[]{1, 3, 5}); - Assert.assertEquals(1, key1.getValue().size()); - - key1.clear(); - Assert.assertEquals(0, key1.getValue().size()); - - key1.addValue(new byte[]{1, 3, 5}); - GroupbyKey key2 = new GroupbyKey(); - key2.addValue(new byte[]{1, 3, 5}); - Assert.assertEquals(key1, key2); - - GroupbyKey key3 = new GroupbyKey(key1); - Assert.assertEquals(key1, key3); - Assert.assertEquals(key2, key3); - } - - @Test - public void testGroupbyKeyComparator(){ - GroupbyKeyComparator comparator = new GroupbyKeyComparator(); - GroupbyKey key1 = new GroupbyKey(); - key1.addValue("hello".getBytes()); - GroupbyKey key2 = new GroupbyKey(); - key2.addValue("world".getBytes()); - int r = comparator.compare(key1, key2); - Assert.assertTrue(r < 0); - - key2.clear(); - key2.addValue("friend".getBytes()); - r = comparator.compare(key1, key2); - Assert.assertTrue(r > 0); - - key2.clear(); - key2.addValue("hello".getBytes()); - r = comparator.compare(key1, key2); - Assert.assertTrue(r == 0); - - key1.clear(); - key2.clear(); - key1.addValue("hello".getBytes()); - key1.addValue("tom".getBytes()); - key2.addValue("hello".getBytes()); - key2.addValue("jackie".getBytes()); - r = comparator.compare(key1, key2); - Assert.assertTrue(r > 0); - } + @Test + public void testGroupbyKey() { + GroupbyKey key1 = new GroupbyKey(); + Assert.assertEquals(0, key1.getValue().size()); + + key1.addValue(new byte[] {1, 3, 5}); + Assert.assertEquals(1, key1.getValue().size()); + + key1.clear(); + Assert.assertEquals(0, key1.getValue().size()); + + key1.addValue(new byte[] {1, 3, 5}); + GroupbyKey key2 = new GroupbyKey(); + key2.addValue(new byte[] {1, 3, 5}); + Assert.assertEquals(key1, key2); + + GroupbyKey key3 = new GroupbyKey(key1); + Assert.assertEquals(key1, key3); + Assert.assertEquals(key2, key3); + } + + @Test + public void testGroupbyKeyComparator() { + GroupbyKeyComparator comparator = new GroupbyKeyComparator(); + GroupbyKey key1 = new GroupbyKey(); + key1.addValue("hello".getBytes()); + GroupbyKey key2 = new GroupbyKey(); + key2.addValue("world".getBytes()); + int r = comparator.compare(key1, key2); + Assert.assertTrue(r < 0); + + key2.clear(); + key2.addValue("friend".getBytes()); + r = comparator.compare(key1, key2); + Assert.assertTrue(r > 0); + + key2.clear(); + key2.addValue("hello".getBytes()); + r = comparator.compare(key1, key2); + Assert.assertTrue(r == 0); + + key1.clear(); + key2.clear(); + key1.addValue("hello".getBytes()); + key1.addValue("tom".getBytes()); + key2.addValue("hello".getBytes()); + key2.addValue("jackie".getBytes()); + r = comparator.compare(key1, key2); + Assert.assertTrue(r > 0); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestRawAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestRawAggregator.java index 41bc18a51c..125a077e09 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestRawAggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestRawAggregator.java @@ -16,12 +16,8 @@ */ package org.apache.eagle.query.aggregate.raw; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - +import org.apache.eagle.common.ByteUtil; +import org.apache.eagle.log.entity.meta.*; import org.apache.eagle.query.aggregate.AggregateFunctionType; import org.junit.Assert; import org.junit.Before; @@ -29,489 +25,485 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.eagle.log.entity.meta.EntityDefinition; -import org.apache.eagle.log.entity.meta.EntitySerDeser; -import org.apache.eagle.log.entity.meta.IntSerDeser; -import org.apache.eagle.log.entity.meta.LongSerDeser; -import org.apache.eagle.log.entity.meta.Qualifier; -import org.apache.eagle.common.ByteUtil; +import java.util.*; public class TestRawAggregator { - private static final Logger LOG = LoggerFactory.getLogger(TestRawAggregator.class); - - private EntityDefinition ed; - @SuppressWarnings("unchecked") - @Before - public void setup(){ - ed = new EntityDefinition(); - Qualifier q = new Qualifier(); - q.setDisplayName("numHosts"); - q.setQualifierName("a"); - EntitySerDeser serDeser = new IntSerDeser(); - q.setSerDeser((EntitySerDeser)(serDeser)); - ed.getDisplayNameMap().put("numHosts", q); - q = new Qualifier(); - q.setDisplayName("numClusters"); - q.setQualifierName("b"); - serDeser = new LongSerDeser(); - q.setSerDeser((EntitySerDeser)(serDeser)); - ed.getDisplayNameMap().put("numClusters", q); - } - - private Map createQualifiers(final String cluster, final String datacenter, final String rack, int numHosts, long numClusters){ - Map qualifiers = new HashMap(); - qualifiers.put("cluster", cluster == null ? null : cluster.getBytes()); - qualifiers.put("datacenter", datacenter == null ? null : datacenter.getBytes()); - qualifiers.put("rack", rack == null ? null : rack.getBytes()); - qualifiers.put("numHosts", ByteUtil.intToBytes(numHosts)); - qualifiers.put("numClusters", ByteUtil.longToBytes(numClusters)); - return qualifiers; - } - - @Test - public void testZeroGroupbyFieldSingleFunctionForSummary(){ - List> entities = new ArrayList>(); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); - entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); - entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); - entities.add(createQualifiers("cluster2", "dc1", "rack126", 15, 2)); - - RawAggregator agg = new RawAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 1); - - double total = 0.0; - for(Map e : entities){ - int a = ByteUtil.bytesToInt(e.get("numHosts")); - total += a; - } - - Assert.assertEquals(result.get(new ArrayList()).get(0).doubleValue(), total, 0.00000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new RawAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 1); - double total = 0.0; - for(Map e : entities){ - long a = ByteUtil.bytesToLong(e.get("numClusters")); - total += a; - } - Assert.assertEquals(result.get(new ArrayList()).get(0).doubleValue(), total, 0.00000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new RawAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 1); - Assert.assertEquals(result.get(new ArrayList()).get(0).doubleValue(), 5, 0.0000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - @Test - public void testSingleGroupbyFieldSingleFunctionForSummary(){ - List> entities = new ArrayList>(); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); - entities.add(createQualifiers("cluster1", "dc2", "rack128", 10, 0)); - entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); - entities.add(createQualifiers("cluster2", "dc1", "rack126", 15, 2)); - - RawAggregator agg = new RawAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - double total1 = 0.0; - total1 += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); - total1 += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); - total1 += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); - - double total2 = 0.0; - total2 += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); - total2 += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0).doubleValue(), total1, 0.0000000000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), total2, 0.00000000000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new RawAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - double total1 = 0.0; - total1 += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); - total1 += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); - total1 += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); - total1 += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); - - double total2 = 0.0; - total2 += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), total1, 0.000000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), total2, 0.000000000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new RawAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - double total1 = 0.0; - total1 += ByteUtil.bytesToLong(entities.get(0).get("numClusters")); - total1 += ByteUtil.bytesToLong(entities.get(1).get("numClusters")); - total1 += ByteUtil.bytesToLong(entities.get(2).get("numClusters")); - - double total2 = 0.0; - total2 += ByteUtil.bytesToLong(entities.get(3).get("numClusters")); - total2 += ByteUtil.bytesToLong(entities.get(4).get("numClusters")); - - Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), total1, 0.0000000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), total2, 0.0000000000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new RawAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - double total1 = 0.0; - total1 += ByteUtil.bytesToLong(entities.get(0).get("numClusters")); - total1 += ByteUtil.bytesToLong(entities.get(1).get("numClusters")); - total1 += ByteUtil.bytesToLong(entities.get(3).get("numClusters")); - total1 += ByteUtil.bytesToLong(entities.get(4).get("numClusters")); - - double total2 = 0.0; - total2 += ByteUtil.bytesToLong(entities.get(2).get("numClusters")); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), total1, 0.00000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), total2, 0.00000000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - - @Test - public void testSingleGroupbyFieldSingleFunctionForCount(){ - List> entities = new ArrayList>(); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); - entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); - entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); - entities.add(createQualifiers("cluster2", "dc2", "rack126", 15, 2)); - - RawAggregator agg = new RawAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - double total1 = 0.0; - total1 += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); - total1 += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); - total1 += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); - - double total2 = 0.0; - total2 += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); - total2 += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); - - Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), total1, 0.0000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), total2, 0.0000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new RawAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(4), 0.00000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double)(1), 0.00000000000000000000001); - }catch(Exception ex){ - LOG.error("can not aggregate", ex); - Assert.fail("can not aggregate"); - } - } - - @Test - public void testMultipleFieldsSingleFunctionForSummary(){ - List> entities = new ArrayList>(); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); - entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); - entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); - entities.add(createQualifiers("cluster2", "dc1", "rack126", 15, 2)); - entities.add(createQualifiers("cluster2", null, "rack126", 1, 3)); - - RawAggregator agg = new RawAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(3, result.size()); - double total = 0.0; - total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), total, 0.00000000000000000000000001); - - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), total, 0.0000000000000000000000001); - - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), total, 0.0000000000000000000000001); - - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(5).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), total, 0.0000000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new RawAggregator(Arrays.asList("cluster", "datacenter", "rack"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(5, result.size()); - double total = 0.0; - total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), total, 0.0000000000000000000000001); - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), total, 0.0000000000000000000000001); - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), total, 0.0000000000000000000000001); - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), total, 0.0000000000000000000000001); - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(5).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), total, 0.0000000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - @Test - public void testMultipleFieldsSingleFunctionForCount(){ - List> entities = new ArrayList>(); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); - entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); - entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); - entities.add(createQualifiers("cluster2", "dc1", "rack126", 15, 2)); - entities.add(createQualifiers("cluster2", null, "rack126", 1, 3)); - - RawAggregator agg = new RawAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(3, result.size()); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), (double)(3), 0.00000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), (double)(2), 0.0000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), (double)(1), 0.000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new RawAggregator(Arrays.asList("cluster", "datacenter", "rack"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(5, result.size()); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), (double)(2), 0.0000000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), (double)(1), 0.0000000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), (double)(1), 0.0000000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), (double)(1), 0.0000000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), (double)(1), 0.0000000000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - @Test - public void testSingleGroupbyFieldMultipleFunctions(){ - List> entities = new ArrayList>(); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); - entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); - entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); - entities.add(createQualifiers("cluster2", "dc2", "rack126", 15, 2)); - - RawAggregator agg = new RawAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum, AggregateFunctionType.count), - Arrays.asList("numHosts", "*"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - double total = 0.0; - total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), total, 0.0000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(1), (double)(3), 0.00000000000000000000000001); - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), total, 0.0000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(1), (double)(2), 0.0000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new RawAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count, AggregateFunctionType.sum), Arrays.asList("*", "numHosts"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - double total = 0.0; - total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(4), 0.00000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), total, 0.00000000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new RawAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count, AggregateFunctionType.sum, AggregateFunctionType.sum), - Arrays.asList("*", "numHosts", "numClusters"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(4), 0.000000000000000000000000001); - double total = 0.0; - total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), total, 0.0000000000000000000000000000001); - total = 0.0; - total += ByteUtil.bytesToLong(entities.get(0).get("numClusters")); - total += ByteUtil.bytesToLong(entities.get(1).get("numClusters")); - total += ByteUtil.bytesToLong(entities.get(2).get("numClusters")); - total += ByteUtil.bytesToLong(entities.get(3).get("numClusters")); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(2), total, 0.00000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(4), 0.000000000000000000001); - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("dc2")).get(1), total, 0.00000000000000000000000000001); - total = 0.0; - total += ByteUtil.bytesToLong(entities.get(4).get("numClusters")); - Assert.assertEquals(result.get(Arrays.asList("dc2")).get(2), total, 0.000000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - @Test - public void testMultipleGroupbyFieldsMultipleFunctions(){ - List> entities = new ArrayList>(); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); - entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); - entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); - entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); - entities.add(createQualifiers("cluster2", "dc1", "rack126", 15, 2)); - - RawAggregator agg = new RawAggregator(Arrays.asList("cluster", "rack"), Arrays.asList(AggregateFunctionType.sum, AggregateFunctionType.count), - Arrays.asList("numHosts", "*"), ed); - try{ - for(Map e : entities){ - agg.qualifierCreated(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 4); - double total = 0.0; - total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); - total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(0), total, 0.000000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(1), (double)(2), 0.00000000000000000000000001); - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(0), total, 0.00000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(1), (double)(1), 0.00000000000000000000000001); - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(0), total, 0.000000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(1), (double)(1), 0.0000000000000000000000001); - total = 0.0; - total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(0), total, 0.00000000000000000000000001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(1), (double)(1), 0.000000000000000000000000001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } + private static final Logger LOG = LoggerFactory.getLogger(TestRawAggregator.class); + + private EntityDefinition ed; + + @SuppressWarnings("unchecked") + @Before + public void setup() { + ed = new EntityDefinition(); + Qualifier q = new Qualifier(); + q.setDisplayName("numHosts"); + q.setQualifierName("a"); + EntitySerDeser serDeser = new IntSerDeser(); + q.setSerDeser((EntitySerDeser) (serDeser)); + ed.getDisplayNameMap().put("numHosts", q); + q = new Qualifier(); + q.setDisplayName("numClusters"); + q.setQualifierName("b"); + serDeser = new LongSerDeser(); + q.setSerDeser((EntitySerDeser) (serDeser)); + ed.getDisplayNameMap().put("numClusters", q); + } + + private Map createQualifiers(final String cluster, final String datacenter, final String rack, int numHosts, long numClusters) { + Map qualifiers = new HashMap(); + qualifiers.put("cluster", cluster == null ? null : cluster.getBytes()); + qualifiers.put("datacenter", datacenter == null ? null : datacenter.getBytes()); + qualifiers.put("rack", rack == null ? null : rack.getBytes()); + qualifiers.put("numHosts", ByteUtil.intToBytes(numHosts)); + qualifiers.put("numClusters", ByteUtil.longToBytes(numClusters)); + return qualifiers; + } + + @Test + public void testZeroGroupbyFieldSingleFunctionForSummary() { + List> entities = new ArrayList>(); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); + entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); + entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); + entities.add(createQualifiers("cluster2", "dc1", "rack126", 15, 2)); + + RawAggregator agg = new RawAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 1); + + double total = 0.0; + for (Map e : entities) { + int a = ByteUtil.bytesToInt(e.get("numHosts")); + total += a; + } + + Assert.assertEquals(result.get(new ArrayList()).get(0).doubleValue(), total, 0.00000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new RawAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 1); + double total = 0.0; + for (Map e : entities) { + long a = ByteUtil.bytesToLong(e.get("numClusters")); + total += a; + } + Assert.assertEquals(result.get(new ArrayList()).get(0).doubleValue(), total, 0.00000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new RawAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 1); + Assert.assertEquals(result.get(new ArrayList()).get(0).doubleValue(), 5, 0.0000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + @Test + public void testSingleGroupbyFieldSingleFunctionForSummary() { + List> entities = new ArrayList>(); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); + entities.add(createQualifiers("cluster1", "dc2", "rack128", 10, 0)); + entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); + entities.add(createQualifiers("cluster2", "dc1", "rack126", 15, 2)); + + RawAggregator agg = new RawAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + double total1 = 0.0; + total1 += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); + total1 += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); + total1 += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); + + double total2 = 0.0; + total2 += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); + total2 += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0).doubleValue(), total1, 0.0000000000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), total2, 0.00000000000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new RawAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + double total1 = 0.0; + total1 += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); + total1 += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); + total1 += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); + total1 += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); + + double total2 = 0.0; + total2 += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), total1, 0.000000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), total2, 0.000000000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new RawAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + double total1 = 0.0; + total1 += ByteUtil.bytesToLong(entities.get(0).get("numClusters")); + total1 += ByteUtil.bytesToLong(entities.get(1).get("numClusters")); + total1 += ByteUtil.bytesToLong(entities.get(2).get("numClusters")); + + double total2 = 0.0; + total2 += ByteUtil.bytesToLong(entities.get(3).get("numClusters")); + total2 += ByteUtil.bytesToLong(entities.get(4).get("numClusters")); + + Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), total1, 0.0000000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), total2, 0.0000000000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new RawAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + double total1 = 0.0; + total1 += ByteUtil.bytesToLong(entities.get(0).get("numClusters")); + total1 += ByteUtil.bytesToLong(entities.get(1).get("numClusters")); + total1 += ByteUtil.bytesToLong(entities.get(3).get("numClusters")); + total1 += ByteUtil.bytesToLong(entities.get(4).get("numClusters")); + + double total2 = 0.0; + total2 += ByteUtil.bytesToLong(entities.get(2).get("numClusters")); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), total1, 0.00000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), total2, 0.00000000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + + @Test + public void testSingleGroupbyFieldSingleFunctionForCount() { + List> entities = new ArrayList>(); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); + entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); + entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); + entities.add(createQualifiers("cluster2", "dc2", "rack126", 15, 2)); + + RawAggregator agg = new RawAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + double total1 = 0.0; + total1 += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); + total1 += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); + total1 += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); + + double total2 = 0.0; + total2 += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); + total2 += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); + + Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), total1, 0.0000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), total2, 0.0000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new RawAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double) (4), 0.00000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double) (1), 0.00000000000000000000001); + } catch (Exception ex) { + LOG.error("can not aggregate", ex); + Assert.fail("can not aggregate"); + } + } + + @Test + public void testMultipleFieldsSingleFunctionForSummary() { + List> entities = new ArrayList>(); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); + entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); + entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); + entities.add(createQualifiers("cluster2", "dc1", "rack126", 15, 2)); + entities.add(createQualifiers("cluster2", null, "rack126", 1, 3)); + + RawAggregator agg = new RawAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(3, result.size()); + double total = 0.0; + total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), total, 0.00000000000000000000000001); + + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), total, 0.0000000000000000000000001); + + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), total, 0.0000000000000000000000001); + + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(5).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), total, 0.0000000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new RawAggregator(Arrays.asList("cluster", "datacenter", "rack"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(5, result.size()); + double total = 0.0; + total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), total, 0.0000000000000000000000001); + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), total, 0.0000000000000000000000001); + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), total, 0.0000000000000000000000001); + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), total, 0.0000000000000000000000001); + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(5).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), total, 0.0000000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + @Test + public void testMultipleFieldsSingleFunctionForCount() { + List> entities = new ArrayList>(); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); + entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); + entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); + entities.add(createQualifiers("cluster2", "dc1", "rack126", 15, 2)); + entities.add(createQualifiers("cluster2", null, "rack126", 1, 3)); + + RawAggregator agg = new RawAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(3, result.size()); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), (double) (3), 0.00000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), (double) (2), 0.0000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), (double) (1), 0.000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new RawAggregator(Arrays.asList("cluster", "datacenter", "rack"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(5, result.size()); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), (double) (2), 0.0000000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), (double) (1), 0.0000000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), (double) (1), 0.0000000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), (double) (1), 0.0000000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), (double) (1), 0.0000000000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + @Test + public void testSingleGroupbyFieldMultipleFunctions() { + List> entities = new ArrayList>(); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); + entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); + entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); + entities.add(createQualifiers("cluster2", "dc2", "rack126", 15, 2)); + + RawAggregator agg = new RawAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum, AggregateFunctionType.count), + Arrays.asList("numHosts", "*"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + double total = 0.0; + total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), total, 0.0000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(1), (double) (3), 0.00000000000000000000000001); + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), total, 0.0000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(1), (double) (2), 0.0000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new RawAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count, AggregateFunctionType.sum), Arrays.asList("*", "numHosts"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + double total = 0.0; + total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double) (4), 0.00000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), total, 0.00000000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new RawAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count, AggregateFunctionType.sum, AggregateFunctionType.sum), + Arrays.asList("*", "numHosts", "numClusters"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double) (4), 0.000000000000000000000000001); + double total = 0.0; + total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), total, 0.0000000000000000000000000000001); + total = 0.0; + total += ByteUtil.bytesToLong(entities.get(0).get("numClusters")); + total += ByteUtil.bytesToLong(entities.get(1).get("numClusters")); + total += ByteUtil.bytesToLong(entities.get(2).get("numClusters")); + total += ByteUtil.bytesToLong(entities.get(3).get("numClusters")); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(2), total, 0.00000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double) (4), 0.000000000000000000001); + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("dc2")).get(1), total, 0.00000000000000000000000000001); + total = 0.0; + total += ByteUtil.bytesToLong(entities.get(4).get("numClusters")); + Assert.assertEquals(result.get(Arrays.asList("dc2")).get(2), total, 0.000000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + @Test + public void testMultipleGroupbyFieldsMultipleFunctions() { + List> entities = new ArrayList>(); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 12, 2)); + entities.add(createQualifiers("cluster1", "dc1", "rack123", 20, 1)); + entities.add(createQualifiers("cluster1", "dc1", "rack128", 10, 0)); + entities.add(createQualifiers("cluster2", "dc1", "rack125", 9, 2)); + entities.add(createQualifiers("cluster2", "dc1", "rack126", 15, 2)); + + RawAggregator agg = new RawAggregator(Arrays.asList("cluster", "rack"), Arrays.asList(AggregateFunctionType.sum, AggregateFunctionType.count), + Arrays.asList("numHosts", "*"), ed); + try { + for (Map e : entities) { + agg.qualifierCreated(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 4); + double total = 0.0; + total += ByteUtil.bytesToInt(entities.get(0).get("numHosts")); + total += ByteUtil.bytesToInt(entities.get(1).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(0), total, 0.000000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(1), (double) (2), 0.00000000000000000000000001); + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(2).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(0), total, 0.00000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(1), (double) (1), 0.00000000000000000000000001); + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(3).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(0), total, 0.000000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(1), (double) (1), 0.0000000000000000000000001); + total = 0.0; + total += ByteUtil.bytesToInt(entities.get(4).get("numHosts")); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(0), total, 0.00000000000000000000000001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(1), (double) (1), 0.000000000000000000000000001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestRawHBaseLogReaderAndAgg.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestRawHBaseLogReaderAndAgg.java index a304ea9527..789ec1796e 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestRawHBaseLogReaderAndAgg.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/raw/TestRawHBaseLogReaderAndAgg.java @@ -19,8 +19,8 @@ import org.junit.Test; public class TestRawHBaseLogReaderAndAgg { - @Test - public void testRawReaderAndAgg(){ - - } + @Test + public void testRawReaderAndAgg() { + + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestAggregator.java index 7b1932a46f..cf2ca8a74c 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestAggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestAggregator.java @@ -16,236 +16,232 @@ */ package org.apache.eagle.query.aggregate.test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; - -import org.junit.Assert; - +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.query.aggregate.*; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.map.ObjectMapper; +import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.apache.eagle.query.aggregate.Aggregator; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; public class TestAggregator { - private final static Logger LOG = LoggerFactory.getLogger(TestAggregator.class); - - public static class AggregatedSampleAPIEntityFactory implements AggregateAPIEntityFactory { - @Override - public AggregateAPIEntity create(){ - return new AggregatedSampleAPIEntity(); - } - } - - - public static class TestAPIEntity extends TaggedLogAPIEntity{ - private String numTotalAlerts; - private String usedCapacity; - private String status; - - public String getStatus() { - return status; - } - - public void setStatus(String status) { - this.status = status; - } - - public String getNumTotalAlerts() { - return numTotalAlerts; - } - - public void setNumTotalAlerts(String numTotalAlerts) { - this.numTotalAlerts = numTotalAlerts; - } - - public String getUsedCapacity() { - return usedCapacity; - } - - public void setUsedCapacity(String usedCapacity) { - this.usedCapacity = usedCapacity; - } - } - - - - public static class AggregatedSampleAPIEntity extends AggregateAPIEntity{ - private long numTotalAlerts; - - @JsonProperty("nTA") - public long getNumTotalAlerts() { - return numTotalAlerts; - } - - public void setNumTotalAlerts(long numTotalAlerts) { - this.numTotalAlerts = numTotalAlerts; - } - } - - @Test - public void testAggregate(){ - try{ - final AggregatedSampleAPIEntity root = new AggregatedSampleAPIEntity(); - List sumFunctionFields = Arrays.asList("numTotalAlerts"); - boolean counting = true; - List groupbys = Arrays.asList(Aggregator.GROUPBY_ROOT_FIELD_NAME, "cluster"); - List sortFieldOrders = new ArrayList(); - sortFieldOrders.add(new AggregateParams.SortFieldOrder("numTotalAlerts", false)); - Aggregator agg = new Aggregator(new AggregatedSampleAPIEntityFactory(), root, groupbys, counting, sumFunctionFields); - List list = new ArrayList(); - TestAPIEntity entity = new TestAPIEntity(); - entity.setTags(new HashMap()); - entity.getTags().put("category", "checkHadoopFS"); - entity.getTags().put("rack", "rack123"); - entity.getTags().put("cluster", "cluster1"); - entity.setNumTotalAlerts("123"); - entity.setUsedCapacity("12.5"); - entity.setStatus("live"); - list.add(entity); - - TestAPIEntity entity2 = new TestAPIEntity(); - entity2.setTags(new HashMap()); - entity2.getTags().put("category", "checkHadoopFS"); - entity2.getTags().put("rack", "rack124"); - entity2.getTags().put("cluster", "cluster2"); - entity2.setNumTotalAlerts("35"); - entity2.setUsedCapacity("32.1"); - entity2.setStatus("dead"); - list.add(entity2); - - TestAPIEntity entity3 = new TestAPIEntity(); - entity3.setTags(new HashMap()); - entity3.getTags().put("category", "checkHadoopFS"); - // entity3.getTags().put("rack", "rack124"); - entity3.getTags().put("cluster", "cluster2"); - entity3.setNumTotalAlerts("11"); - entity3.setUsedCapacity("82.11"); - entity3.setStatus("live"); - list.add(entity3); - - TestAPIEntity entity4 = new TestAPIEntity(); - entity4.setTags(new HashMap()); - entity4.getTags().put("category", "diskfailure"); - entity4.getTags().put("rack", "rack124"); - entity4.getTags().put("cluster", "cluster2"); - entity4.setNumTotalAlerts("61"); - entity4.setUsedCapacity("253.2"); - entity4.setStatus("dead"); - list.add(entity4); - - long numTotalAlerts = 0; - for(TestAPIEntity e : list){ - agg.accumulate(e); - numTotalAlerts += Long.valueOf(e.getNumTotalAlerts()); - } - - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - String result = null; - AggregatedSampleAPIEntity toBeVerified = (AggregatedSampleAPIEntity)root.getEntityList().get(Aggregator.GROUPBY_ROOT_FIELD_VALUE); - result = mapper.writeValueAsString(toBeVerified); - - Assert.assertEquals(2, toBeVerified.getNumDirectDescendants()); - Assert.assertEquals(4, toBeVerified.getNumTotalDescendants()); - Assert.assertEquals(numTotalAlerts, toBeVerified.getNumTotalAlerts()); - - LOG.info(result); - - PostAggregateSorting.sort(root, sortFieldOrders); - toBeVerified = (AggregatedSampleAPIEntity)root.getSortedList().get(0); - result = mapper.writeValueAsString(toBeVerified); - LOG.info(result); - }catch(Exception ex){ - LOG.error("Test aggregator fails", ex); - Assert.fail("Test aggregator fails"); - } - } - - @Test - public void testUnassigned(){ - // rack is unassigned - try{ - final AggregatedSampleAPIEntity root = new AggregatedSampleAPIEntity(); - boolean counting = true; - List groupbys = Arrays.asList(Aggregator.GROUPBY_ROOT_FIELD_NAME, "rack"); - List sortFieldOrders = new ArrayList(); - sortFieldOrders.add(new AggregateParams.SortFieldOrder("count", false)); - sortFieldOrders.add(new AggregateParams.SortFieldOrder("key", false)); - Aggregator agg = new Aggregator(new AggregatedSampleAPIEntityFactory(), root, groupbys, counting, new ArrayList()); - List list = new ArrayList(); - TestAPIEntity entity = new TestAPIEntity(); - entity.setTags(new HashMap()); - entity.getTags().put("category", "checkHadoopFS"); - entity.getTags().put("rack", "rack123"); - entity.getTags().put("cluster", "cluster1"); - entity.setNumTotalAlerts("123"); - entity.setUsedCapacity("12.5"); - entity.setStatus("live"); - list.add(entity); - - TestAPIEntity entity2 = new TestAPIEntity(); - entity2.setTags(new HashMap()); - entity2.getTags().put("category", "checkHadoopFS"); - entity2.getTags().put("rack", "rack124"); - entity2.getTags().put("cluster", "cluster2"); - entity2.setNumTotalAlerts("35"); - entity2.setUsedCapacity("32.1"); - entity2.setStatus("dead"); - list.add(entity2); - - TestAPIEntity entity3 = new TestAPIEntity(); - entity3.setTags(new HashMap()); - entity3.getTags().put("category", "checkHadoopFS"); - // entity3.getTags().put("rack", "rack124"); - entity3.getTags().put("cluster", "cluster2"); - entity3.setNumTotalAlerts("11"); - entity3.setUsedCapacity("82.11"); - entity3.setStatus("live"); - list.add(entity3); - - TestAPIEntity entity4 = new TestAPIEntity(); - entity4.setTags(new HashMap()); - entity4.getTags().put("category", "diskfailure"); - entity4.getTags().put("rack", "rack124"); - entity4.getTags().put("cluster", "cluster2"); - entity4.setNumTotalAlerts("61"); - entity4.setUsedCapacity("253.2"); - entity4.setStatus("dead"); - list.add(entity4); - + private static final Logger LOG = LoggerFactory.getLogger(TestAggregator.class); + + public static class AggregatedSampleAPIEntityFactory implements AggregateAPIEntityFactory { + @Override + public AggregateAPIEntity create() { + return new AggregatedSampleAPIEntity(); + } + } + + + public static class TestAPIEntity extends TaggedLogAPIEntity { + private String numTotalAlerts; + private String usedCapacity; + private String status; + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getNumTotalAlerts() { + return numTotalAlerts; + } + + public void setNumTotalAlerts(String numTotalAlerts) { + this.numTotalAlerts = numTotalAlerts; + } + + public String getUsedCapacity() { + return usedCapacity; + } + + public void setUsedCapacity(String usedCapacity) { + this.usedCapacity = usedCapacity; + } + } + + + public static class AggregatedSampleAPIEntity extends AggregateAPIEntity { + private long numTotalAlerts; + + @JsonProperty("nTA") + public long getNumTotalAlerts() { + return numTotalAlerts; + } + + public void setNumTotalAlerts(long numTotalAlerts) { + this.numTotalAlerts = numTotalAlerts; + } + } + + @Test + public void testAggregate() { + try { + final AggregatedSampleAPIEntity root = new AggregatedSampleAPIEntity(); + List sumFunctionFields = Arrays.asList("numTotalAlerts"); + boolean counting = true; + List groupbys = Arrays.asList(Aggregator.GROUPBY_ROOT_FIELD_NAME, "cluster"); + List sortFieldOrders = new ArrayList(); + sortFieldOrders.add(new AggregateParams.SortFieldOrder("numTotalAlerts", false)); + Aggregator agg = new Aggregator(new AggregatedSampleAPIEntityFactory(), root, groupbys, counting, sumFunctionFields); + List list = new ArrayList(); + TestAPIEntity entity = new TestAPIEntity(); + entity.setTags(new HashMap()); + entity.getTags().put("category", "checkHadoopFS"); + entity.getTags().put("rack", "rack123"); + entity.getTags().put("cluster", "cluster1"); + entity.setNumTotalAlerts("123"); + entity.setUsedCapacity("12.5"); + entity.setStatus("live"); + list.add(entity); + + TestAPIEntity entity2 = new TestAPIEntity(); + entity2.setTags(new HashMap()); + entity2.getTags().put("category", "checkHadoopFS"); + entity2.getTags().put("rack", "rack124"); + entity2.getTags().put("cluster", "cluster2"); + entity2.setNumTotalAlerts("35"); + entity2.setUsedCapacity("32.1"); + entity2.setStatus("dead"); + list.add(entity2); + + TestAPIEntity entity3 = new TestAPIEntity(); + entity3.setTags(new HashMap()); + entity3.getTags().put("category", "checkHadoopFS"); + // entity3.getTags().put("rack", "rack124"); + entity3.getTags().put("cluster", "cluster2"); + entity3.setNumTotalAlerts("11"); + entity3.setUsedCapacity("82.11"); + entity3.setStatus("live"); + list.add(entity3); + + TestAPIEntity entity4 = new TestAPIEntity(); + entity4.setTags(new HashMap()); + entity4.getTags().put("category", "diskfailure"); + entity4.getTags().put("rack", "rack124"); + entity4.getTags().put("cluster", "cluster2"); + entity4.setNumTotalAlerts("61"); + entity4.setUsedCapacity("253.2"); + entity4.setStatus("dead"); + list.add(entity4); + + long numTotalAlerts = 0; + for (TestAPIEntity e : list) { + agg.accumulate(e); + numTotalAlerts += Long.valueOf(e.getNumTotalAlerts()); + } + + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + String result = null; + AggregatedSampleAPIEntity toBeVerified = (AggregatedSampleAPIEntity) root.getEntityList().get(Aggregator.GROUPBY_ROOT_FIELD_VALUE); + result = mapper.writeValueAsString(toBeVerified); + + Assert.assertEquals(2, toBeVerified.getNumDirectDescendants()); + Assert.assertEquals(4, toBeVerified.getNumTotalDescendants()); + Assert.assertEquals(numTotalAlerts, toBeVerified.getNumTotalAlerts()); + + LOG.info(result); + + PostAggregateSorting.sort(root, sortFieldOrders); + toBeVerified = (AggregatedSampleAPIEntity) root.getSortedList().get(0); + result = mapper.writeValueAsString(toBeVerified); + LOG.info(result); + } catch (Exception ex) { + LOG.error("Test aggregator fails", ex); + Assert.fail("Test aggregator fails"); + } + } + + @Test + public void testUnassigned() { + // rack is unassigned + try { + final AggregatedSampleAPIEntity root = new AggregatedSampleAPIEntity(); + boolean counting = true; + List groupbys = Arrays.asList(Aggregator.GROUPBY_ROOT_FIELD_NAME, "rack"); + List sortFieldOrders = new ArrayList(); + sortFieldOrders.add(new AggregateParams.SortFieldOrder("count", false)); + sortFieldOrders.add(new AggregateParams.SortFieldOrder("key", false)); + Aggregator agg = new Aggregator(new AggregatedSampleAPIEntityFactory(), root, groupbys, counting, new ArrayList()); + List list = new ArrayList(); + TestAPIEntity entity = new TestAPIEntity(); + entity.setTags(new HashMap()); + entity.getTags().put("category", "checkHadoopFS"); + entity.getTags().put("rack", "rack123"); + entity.getTags().put("cluster", "cluster1"); + entity.setNumTotalAlerts("123"); + entity.setUsedCapacity("12.5"); + entity.setStatus("live"); + list.add(entity); + + TestAPIEntity entity2 = new TestAPIEntity(); + entity2.setTags(new HashMap()); + entity2.getTags().put("category", "checkHadoopFS"); + entity2.getTags().put("rack", "rack124"); + entity2.getTags().put("cluster", "cluster2"); + entity2.setNumTotalAlerts("35"); + entity2.setUsedCapacity("32.1"); + entity2.setStatus("dead"); + list.add(entity2); + + TestAPIEntity entity3 = new TestAPIEntity(); + entity3.setTags(new HashMap()); + entity3.getTags().put("category", "checkHadoopFS"); + // entity3.getTags().put("rack", "rack124"); + entity3.getTags().put("cluster", "cluster2"); + entity3.setNumTotalAlerts("11"); + entity3.setUsedCapacity("82.11"); + entity3.setStatus("live"); + list.add(entity3); + + TestAPIEntity entity4 = new TestAPIEntity(); + entity4.setTags(new HashMap()); + entity4.getTags().put("category", "diskfailure"); + entity4.getTags().put("rack", "rack124"); + entity4.getTags().put("cluster", "cluster2"); + entity4.setNumTotalAlerts("61"); + entity4.setUsedCapacity("253.2"); + entity4.setStatus("dead"); + list.add(entity4); + // long numTotalAlerts = 0; - for(TestAPIEntity e : list){ - agg.accumulate(e); + for (TestAPIEntity e : list) { + agg.accumulate(e); // numTotalAlerts += Long.valueOf(e.getNumTotalAlerts()); - } - - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - String result = null; - AggregatedSampleAPIEntity toBeVerified = (AggregatedSampleAPIEntity)root.getEntityList().get(Aggregator.GROUPBY_ROOT_FIELD_VALUE); - result = mapper.writeValueAsString(toBeVerified); - - Assert.assertEquals(3, toBeVerified.getNumDirectDescendants()); - Assert.assertEquals(4, toBeVerified.getNumTotalDescendants()); + } + + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + String result = null; + AggregatedSampleAPIEntity toBeVerified = (AggregatedSampleAPIEntity) root.getEntityList().get(Aggregator.GROUPBY_ROOT_FIELD_VALUE); + result = mapper.writeValueAsString(toBeVerified); + + Assert.assertEquals(3, toBeVerified.getNumDirectDescendants()); + Assert.assertEquals(4, toBeVerified.getNumTotalDescendants()); // Assert.assertEquals(numTotalAlerts, toBeVerified.getNumTotalAlerts()); - - LOG.info(result); - - PostAggregateSorting.sort(root, sortFieldOrders); - toBeVerified = (AggregatedSampleAPIEntity)root.getSortedList().get(0); - result = mapper.writeValueAsString(toBeVerified); - LOG.info(result); - }catch(Exception ex){ - LOG.error("Test aggregator fails", ex); - Assert.fail("Test aggregator fails"); - } - } + + LOG.info(result); + + PostAggregateSorting.sort(root, sortFieldOrders); + toBeVerified = (AggregatedSampleAPIEntity) root.getSortedList().get(0); + result = mapper.writeValueAsString(toBeVerified); + LOG.info(result); + } catch (Exception ex) { + LOG.error("Test aggregator fails", ex); + Assert.fail("Test aggregator fails"); + } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestAlertAggService.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestAlertAggService.java index c2d0a265bb..ffb577d6ed 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestAlertAggService.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestAlertAggService.java @@ -16,26 +16,25 @@ */ package org.apache.eagle.query.aggregate.test; +import org.junit.Test; + import java.util.ArrayList; import java.util.List; - -import org.junit.Test; - public class TestAlertAggService { - @Test - public void testCompileAndSplitCondition(){ - List alertTagNameValues = new ArrayList(); - String tagNameValue1 = "cluster=cluster1"; - String tagNameValue2 = "category=checkHadoopFS"; - String tagNameValue3 = "category=highloadDisk"; - String tagNameValue4 = "cluster=dc124"; - String tagNameValue5 = "category=lowloadDisk"; - alertTagNameValues.add(tagNameValue1); - alertTagNameValues.add(tagNameValue2); - alertTagNameValues.add(tagNameValue3); - alertTagNameValues.add(tagNameValue4); - alertTagNameValues.add(tagNameValue5); + @Test + public void testCompileAndSplitCondition() { + List alertTagNameValues = new ArrayList(); + String tagNameValue1 = "cluster=cluster1"; + String tagNameValue2 = "category=checkHadoopFS"; + String tagNameValue3 = "category=highloadDisk"; + String tagNameValue4 = "cluster=dc124"; + String tagNameValue5 = "category=lowloadDisk"; + alertTagNameValues.add(tagNameValue1); + alertTagNameValues.add(tagNameValue2); + alertTagNameValues.add(tagNameValue3); + alertTagNameValues.add(tagNameValue4); + alertTagNameValues.add(tagNameValue5); // AlertAggResource r = new AlertAggResource(); // List> result = r.compileAndSplitConditions(alertTagNameValues); // Assert.assertEquals(result.size(), 3); @@ -51,6 +50,6 @@ public void testCompileAndSplitCondition(){ // Assert.assertTrue(result.get(2).contains(tagNameValue5)); // Assert.assertTrue(result.get(2).contains(tagNameValue1)); // Assert.assertTrue(result.get(2).contains(tagNameValue4)); - } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestBucketQuery.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestBucketQuery.java index e44d73bf25..d312a56fe3 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestBucketQuery.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestBucketQuery.java @@ -29,128 +29,128 @@ import java.util.Map; public class TestBucketQuery { - private static class SampleTaggedLogAPIEntity extends TaggedLogAPIEntity{ - private String description; - - @SuppressWarnings("unused") - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - } - - @SuppressWarnings("unchecked") - @Test - public void testBucketQuery(){ - SampleTaggedLogAPIEntity e1 = new SampleTaggedLogAPIEntity(); - e1.setTags(new HashMap()); - e1.getTags().put("cluster", "cluster1"); - e1.getTags().put("rack", "rack123"); - e1.setDescription("this is description 1"); - - SampleTaggedLogAPIEntity e2 = new SampleTaggedLogAPIEntity(); - e2.setTags(new HashMap()); - e2.getTags().put("cluster", "cluster1"); - e2.getTags().put("rack", "rack123"); - e2.setDescription("this is description 2"); - - List bucketFields = new ArrayList(); - bucketFields.add("cluster"); - int limit = 1; - - BucketQuery query1 = new BucketQuery(bucketFields, limit); - query1.put(e1); - query1.put(e2); - - Map map = query1.get(); - - List o = (List)map.get("cluster1"); - Assert.assertEquals(limit, o.size()); - - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - mapper.setFilters(TaggedLogAPIEntity.getFilterProvider()); - try{ - String result = mapper.writeValueAsString(map); - System.out.println(result); - }catch(Exception ex){ - ex.printStackTrace(); - Assert.fail("can not serialize bucket query result"); - } - - limit = 2; - BucketQuery query2 = new BucketQuery(bucketFields, limit); - query2.put(e1); - query2.put(e2); - Map map2 = query2.get(); - o = (List)map2.get("cluster1"); - try{ - String result = mapper.writeValueAsString(map2); - System.out.println(result); - }catch(Exception ex){ - ex.printStackTrace(); - Assert.fail("can not serialize bucket query result"); - } - Assert.assertEquals(limit, o.size()); - - - SampleTaggedLogAPIEntity e3 = new SampleTaggedLogAPIEntity(); - e3.setTags(new HashMap()); - e3.getTags().put("cluster", "cluster1"); - e3.getTags().put("rack", "rack124"); - e3.setDescription("this is description 3"); - bucketFields.add("rack"); - limit = 2; - BucketQuery query3 = new BucketQuery(bucketFields, limit); - query3.put(e1); - query3.put(e2); - query3.put(e3); - Map map3 = query3.get(); - Map o3 = (Map)map3.get("cluster1"); - List o4 = (List)o3.get("rack124"); - Assert.assertEquals(1, o4.size()); - List o5 = (List)o3.get("rack123"); - Assert.assertEquals(o5.size(), 2); - - try{ - String result = mapper.writeValueAsString(map3); - System.out.println(result); - }catch(Exception ex){ - ex.printStackTrace(); - Assert.fail("can not serialize bucket query result"); - } - - - SampleTaggedLogAPIEntity e4 = new SampleTaggedLogAPIEntity(); - e4.setTags(new HashMap()); - e4.getTags().put("cluster", "cluster1"); - // rack is set to null + private static class SampleTaggedLogAPIEntity extends TaggedLogAPIEntity { + private String description; + + @SuppressWarnings("unused") + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + } + + @SuppressWarnings("unchecked") + @Test + public void testBucketQuery() { + SampleTaggedLogAPIEntity e1 = new SampleTaggedLogAPIEntity(); + e1.setTags(new HashMap()); + e1.getTags().put("cluster", "cluster1"); + e1.getTags().put("rack", "rack123"); + e1.setDescription("this is description 1"); + + SampleTaggedLogAPIEntity e2 = new SampleTaggedLogAPIEntity(); + e2.setTags(new HashMap()); + e2.getTags().put("cluster", "cluster1"); + e2.getTags().put("rack", "rack123"); + e2.setDescription("this is description 2"); + + List bucketFields = new ArrayList(); + bucketFields.add("cluster"); + int limit = 1; + + BucketQuery query1 = new BucketQuery(bucketFields, limit); + query1.put(e1); + query1.put(e2); + + Map map = query1.get(); + + List o = (List) map.get("cluster1"); + Assert.assertEquals(limit, o.size()); + + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + mapper.setFilters(TaggedLogAPIEntity.getFilterProvider()); + try { + String result = mapper.writeValueAsString(map); + System.out.println(result); + } catch (Exception ex) { + ex.printStackTrace(); + Assert.fail("can not serialize bucket query result"); + } + + limit = 2; + BucketQuery query2 = new BucketQuery(bucketFields, limit); + query2.put(e1); + query2.put(e2); + Map map2 = query2.get(); + o = (List) map2.get("cluster1"); + try { + String result = mapper.writeValueAsString(map2); + System.out.println(result); + } catch (Exception ex) { + ex.printStackTrace(); + Assert.fail("can not serialize bucket query result"); + } + Assert.assertEquals(limit, o.size()); + + + SampleTaggedLogAPIEntity e3 = new SampleTaggedLogAPIEntity(); + e3.setTags(new HashMap()); + e3.getTags().put("cluster", "cluster1"); + e3.getTags().put("rack", "rack124"); + e3.setDescription("this is description 3"); + bucketFields.add("rack"); + limit = 2; + BucketQuery query3 = new BucketQuery(bucketFields, limit); + query3.put(e1); + query3.put(e2); + query3.put(e3); + Map map3 = query3.get(); + Map o3 = (Map) map3.get("cluster1"); + List o4 = (List) o3.get("rack124"); + Assert.assertEquals(1, o4.size()); + List o5 = (List) o3.get("rack123"); + Assert.assertEquals(o5.size(), 2); + + try { + String result = mapper.writeValueAsString(map3); + System.out.println(result); + } catch (Exception ex) { + ex.printStackTrace(); + Assert.fail("can not serialize bucket query result"); + } + + + SampleTaggedLogAPIEntity e4 = new SampleTaggedLogAPIEntity(); + e4.setTags(new HashMap()); + e4.getTags().put("cluster", "cluster1"); + // rack is set to null // e4.getTags().put("rack", "rack124"); - e4.setDescription("this is description 3"); - limit = 2; - BucketQuery query4 = new BucketQuery(bucketFields, limit); - query4.put(e1); - query4.put(e2); - query4.put(e3); - query4.put(e4); - Map map4 = query4.get(); - Map o6 = (Map)map4.get("cluster1"); - List o7 = (List)o6.get("rack124"); - Assert.assertEquals(1, o7.size()); - List o8 = (List)o6.get("rack123"); - Assert.assertEquals(o8.size(), 2); - List o9 = (List)o6.get("unassigned"); - Assert.assertEquals(o9.size(), 1); - - try{ - String result = mapper.writeValueAsString(map4); - System.out.println(result); - }catch(Exception ex){ - ex.printStackTrace(); - Assert.fail("can not serialize bucket query result"); - } - } + e4.setDescription("this is description 3"); + limit = 2; + BucketQuery query4 = new BucketQuery(bucketFields, limit); + query4.put(e1); + query4.put(e2); + query4.put(e3); + query4.put(e4); + Map map4 = query4.get(); + Map o6 = (Map) map4.get("cluster1"); + List o7 = (List) o6.get("rack124"); + Assert.assertEquals(1, o7.size()); + List o8 = (List) o6.get("rack123"); + Assert.assertEquals(o8.size(), 2); + List o9 = (List) o6.get("unassigned"); + Assert.assertEquals(o9.size(), 1); + + try { + String result = mapper.writeValueAsString(map4); + System.out.println(result); + } catch (Exception ex) { + ex.printStackTrace(); + Assert.fail("can not serialize bucket query result"); + } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestBucketQuery2.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestBucketQuery2.java index a35ab72a9a..4e82319340 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestBucketQuery2.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestBucketQuery2.java @@ -29,132 +29,132 @@ import java.util.Map; public class TestBucketQuery2 { - private static class SampleTaggedLogAPIEntity extends TaggedLogAPIEntity{ - private String description; + private static class SampleTaggedLogAPIEntity extends TaggedLogAPIEntity { + private String description; - @SuppressWarnings("unused") - public String getDescription() { - return description; - } + @SuppressWarnings("unused") + public String getDescription() { + return description; + } - public void setDescription(String description) { - this.description = description; - } - } + public void setDescription(String description) { + this.description = description; + } + } - @SuppressWarnings("unchecked") + @SuppressWarnings("unchecked") // @Test - public void testBucketQuery(){ - SampleTaggedLogAPIEntity e1 = new SampleTaggedLogAPIEntity(); - e1.setTags(new HashMap()); - e1.getTags().put("cluster", "cluster1"); - e1.getTags().put("rack", "rack123"); - e1.setDescription("this is description 1"); - - SampleTaggedLogAPIEntity e2 = new SampleTaggedLogAPIEntity(); - e2.setTags(new HashMap()); - e2.getTags().put("cluster", "cluster1"); - e2.getTags().put("rack", "rack123"); - e2.setDescription("this is description 2"); - - List bucketFields = new ArrayList(); - bucketFields.add("cluster"); - int limit = 1; - - BucketQuery query1 = new BucketQuery(bucketFields, limit); - query1.put(e1); - query1.put(e2); - - Map map = query1.get(); - - List o = (List)map.get("cluster1"); - Assert.assertEquals(limit, o.size()); - - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - try{ - String result = mapper.writeValueAsString(map); - System.out.println(result); - }catch(Exception ex){ - ex.printStackTrace(); - Assert.fail("can not serialize bucket query result"); - } - - limit = 2; - BucketQuery query2 = new BucketQuery(bucketFields, limit); - query2.put(e1); - query2.put(e2); - Map map2 = query2.get(); - o = (List)map2.get("cluster1"); - try{ - String result = mapper.writeValueAsString(map2); - System.out.println(result); - }catch(Exception ex){ - ex.printStackTrace(); - Assert.fail("can not serialize bucket query result"); - } - Assert.assertEquals(limit, o.size()); - - - SampleTaggedLogAPIEntity e3 = new SampleTaggedLogAPIEntity(); - e3.setTags(new HashMap()); - e3.getTags().put("cluster", "cluster1"); - e3.getTags().put("rack", "rack124"); - e3.setDescription("this is description 3"); - bucketFields.add("rack"); - limit = 2; - BucketQuery query3 = new BucketQuery(bucketFields, limit); - query3.put(e1); - query3.put(e2); - query3.put(e3); - Map map3 = query3.get(); - Map o3 = (Map)map3.get("cluster1"); - List o4 = (List)o3.get("rack124"); - Assert.assertEquals(1, o4.size()); - List o5 = (List)o3.get("rack123"); - Assert.assertEquals(o5.size(), 2); - - try{ - String result = mapper.writeValueAsString(map3); - System.out.println(result); - }catch(Exception ex){ - ex.printStackTrace(); - Assert.fail("can not serialize bucket query result"); - } - - - SampleTaggedLogAPIEntity e4 = new SampleTaggedLogAPIEntity(); - e4.setTags(new HashMap()); - e4.getTags().put("cluster", "cluster1"); - // rack is set to null + public void testBucketQuery() { + SampleTaggedLogAPIEntity e1 = new SampleTaggedLogAPIEntity(); + e1.setTags(new HashMap()); + e1.getTags().put("cluster", "cluster1"); + e1.getTags().put("rack", "rack123"); + e1.setDescription("this is description 1"); + + SampleTaggedLogAPIEntity e2 = new SampleTaggedLogAPIEntity(); + e2.setTags(new HashMap()); + e2.getTags().put("cluster", "cluster1"); + e2.getTags().put("rack", "rack123"); + e2.setDescription("this is description 2"); + + List bucketFields = new ArrayList(); + bucketFields.add("cluster"); + int limit = 1; + + BucketQuery query1 = new BucketQuery(bucketFields, limit); + query1.put(e1); + query1.put(e2); + + Map map = query1.get(); + + List o = (List) map.get("cluster1"); + Assert.assertEquals(limit, o.size()); + + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + try { + String result = mapper.writeValueAsString(map); + System.out.println(result); + } catch (Exception ex) { + ex.printStackTrace(); + Assert.fail("can not serialize bucket query result"); + } + + limit = 2; + BucketQuery query2 = new BucketQuery(bucketFields, limit); + query2.put(e1); + query2.put(e2); + Map map2 = query2.get(); + o = (List) map2.get("cluster1"); + try { + String result = mapper.writeValueAsString(map2); + System.out.println(result); + } catch (Exception ex) { + ex.printStackTrace(); + Assert.fail("can not serialize bucket query result"); + } + Assert.assertEquals(limit, o.size()); + + + SampleTaggedLogAPIEntity e3 = new SampleTaggedLogAPIEntity(); + e3.setTags(new HashMap()); + e3.getTags().put("cluster", "cluster1"); + e3.getTags().put("rack", "rack124"); + e3.setDescription("this is description 3"); + bucketFields.add("rack"); + limit = 2; + BucketQuery query3 = new BucketQuery(bucketFields, limit); + query3.put(e1); + query3.put(e2); + query3.put(e3); + Map map3 = query3.get(); + Map o3 = (Map) map3.get("cluster1"); + List o4 = (List) o3.get("rack124"); + Assert.assertEquals(1, o4.size()); + List o5 = (List) o3.get("rack123"); + Assert.assertEquals(o5.size(), 2); + + try { + String result = mapper.writeValueAsString(map3); + System.out.println(result); + } catch (Exception ex) { + ex.printStackTrace(); + Assert.fail("can not serialize bucket query result"); + } + + + SampleTaggedLogAPIEntity e4 = new SampleTaggedLogAPIEntity(); + e4.setTags(new HashMap()); + e4.getTags().put("cluster", "cluster1"); + // rack is set to null // e4.getTags().put("rack", "rack124"); - e4.setDescription("this is description 3"); - limit = 2; - BucketQuery query4 = new BucketQuery(bucketFields, limit); - query4.put(e1); - query4.put(e2); - query4.put(e3); - query4.put(e4); - Map map4 = query4.get(); - Map o6 = (Map)map4.get("cluster1"); - List o7 = (List)o6.get("rack124"); - Assert.assertEquals(1, o7.size()); - List o8 = (List)o6.get("rack123"); - Assert.assertEquals(o8.size(), 2); - List o9 = (List)o6.get("unassigned"); - Assert.assertEquals(o9.size(), 1); - - try{ - String result = mapper.writeValueAsString(map4); - System.out.println(result); - }catch(Exception ex){ - ex.printStackTrace(); - Assert.fail("can not serialize bucket query result"); - } - } - - @Test - public void test() { - - } + e4.setDescription("this is description 3"); + limit = 2; + BucketQuery query4 = new BucketQuery(bucketFields, limit); + query4.put(e1); + query4.put(e2); + query4.put(e3); + query4.put(e4); + Map map4 = query4.get(); + Map o6 = (Map) map4.get("cluster1"); + List o7 = (List) o6.get("rack124"); + Assert.assertEquals(1, o7.size()); + List o8 = (List) o6.get("rack123"); + Assert.assertEquals(o8.size(), 2); + List o9 = (List) o6.get("unassigned"); + Assert.assertEquals(o9.size(), 1); + + try { + String result = mapper.writeValueAsString(map4); + System.out.println(result); + } catch (Exception ex) { + ex.printStackTrace(); + Assert.fail("can not serialize bucket query result"); + } + } + + @Test + public void test() { + + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestFlatAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestFlatAggregator.java index b6b943984e..d31c3ca12d 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestFlatAggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestFlatAggregator.java @@ -16,384 +16,383 @@ */ package org.apache.eagle.query.aggregate.test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.eagle.query.aggregate.timeseries.FlatAggregator; +import org.apache.eagle.log.entity.test.TestEntity; import org.apache.eagle.query.aggregate.AggregateFunctionType; +import org.apache.eagle.query.aggregate.timeseries.FlatAggregator; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.eagle.log.entity.test.TestEntity; +import java.util.*; public class TestFlatAggregator { - private static final Logger LOG = LoggerFactory.getLogger(TestFlatAggregator.class); - @Test - public void testCounting(){ - - } - - @Test - public void testSummary(){ - - } - - @Test - public void testAverage(){ - - } - - @Test - public void testIterativeAggregation(){ - - } - - @SuppressWarnings("serial") - private TestEntity createEntity(final String cluster, final String datacenter, final String rack, int numHosts, long numClusters){ - TestEntity entity = new TestEntity(); - Map tags = new HashMap(){{ - put("cluster", cluster); - put("datacenter", datacenter); - put("rack", rack); - }}; - entity.setTags(tags); - entity.setNumHosts(numHosts); - entity.setNumClusters(numClusters); - return entity; - } - - @Test - public void testZeroGroupbyFieldSingleFunctionForSummary(){ - TestEntity[] entities = new TestEntity[5]; - entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); - entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); - entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); - entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); - - FlatAggregator agg = new FlatAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 1); - Assert.assertEquals(result.get(new ArrayList()).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+ - entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new FlatAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 1); - Assert.assertEquals(result.get(new ArrayList()).get(0), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+ - entities[2].getNumClusters()+entities[3].getNumClusters()+entities[4].getNumClusters()), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new FlatAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 1); - Assert.assertEquals(result.get(new ArrayList()).get(0), (double)(5), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - @Test - public void testSingleGroupbyFieldSingleFunctionForSummary(){ - TestEntity[] entities = new TestEntity[5]; - entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); - entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); - entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); - entities[4] = createEntity("cluster2", "dc2", "rack126", 15, 2); - - FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new FlatAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts())+entities[3].getNumHosts(), 0.001); - Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double)(entities[4].getNumHosts()), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new FlatAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(entities[3].getNumClusters()+entities[4].getNumClusters()), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new FlatAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters())+entities[3].getNumClusters(), 0.001); - Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double)(entities[4].getNumClusters()), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - - @Test - public void testSingleGroupbyFieldSingleFunctionForCount(){ - TestEntity[] entities = new TestEntity[5]; - entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); - entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); - entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); - entities[4] = createEntity("cluster2", "dc2", "rack126", 15, 2); - - FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(3), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(2), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new FlatAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(4), 0.001); - Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double)(1), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - @Test - public void testMultipleFieldsSingleFunctionForSummary(){ - TestEntity[] entities = new TestEntity[6]; - entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); - entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); - entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); - entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); - entities[5] = createEntity("cluster2", null, "rack126", 1, 3); - - FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(3, result.size()); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), (double)(entities[5].getNumHosts()), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new FlatAggregator(Arrays.asList("cluster", "datacenter", "rack"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(5, result.size()); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), (double)(entities[2].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), (double)(entities[3].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), (double)(entities[4].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), (double)(entities[5].getNumHosts()), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - @Test - public void testMultipleFieldsSingleFunctionForCount(){ - TestEntity[] entities = new TestEntity[6]; - entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); - entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); - entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); - entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); - entities[5] = createEntity("cluster2", null, "rack126", 1, 3); - - FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(3, result.size()); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), (double)(3), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), (double)(2), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), (double)(1), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new FlatAggregator(Arrays.asList("cluster", "datacenter", "rack"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(5, result.size()); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), (double)(2), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), (double)(1), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), (double)(1), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), (double)(1), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), (double)(1), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - @Test - public void testSingleGroupbyFieldMultipleFunctions(){ - TestEntity[] entities = new TestEntity[5]; - entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); - entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); - entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); - entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); - - FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum, AggregateFunctionType.count), - Arrays.asList("numHosts", "*")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 2); - Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(1), (double)(3), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(1), (double)(2), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new FlatAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count, AggregateFunctionType.sum), Arrays.asList("*", "numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 1); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(5), 0.001); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts())+entities[4].getNumHosts(), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new FlatAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count, AggregateFunctionType.sum, AggregateFunctionType.sum), - Arrays.asList("*", "numHosts", "numClusters")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 1); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(5), 0.001); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts())+entities[4].getNumHosts(), 0.001); - Assert.assertEquals(result.get(Arrays.asList("dc1")).get(2), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()+entities[3].getNumClusters())+entities[4].getNumClusters(), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - @Test - public void testMultipleGroupbyFieldsMultipleFunctions(){ - TestEntity[] entities = new TestEntity[5]; - entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); - entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); - entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); - entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); - - FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster", "rack"), Arrays.asList(AggregateFunctionType.sum, AggregateFunctionType.count), - Arrays.asList("numHosts", "*")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - Map, List> result = agg.result(); - Assert.assertEquals(result.size(), 4); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(1), (double)(2), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(0), (double)(entities[2].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(1), (double)(1), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(0), (double)(entities[3].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(1), (double)(1), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(0), (double)(entities[4].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(1), (double)(1), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } + private static final Logger LOG = LoggerFactory.getLogger(TestFlatAggregator.class); + + @Test + public void testCounting() { + + } + + @Test + public void testSummary() { + + } + + @Test + public void testAverage() { + + } + + @Test + public void testIterativeAggregation() { + + } + + @SuppressWarnings("serial") + private TestEntity createEntity(final String cluster, final String datacenter, final String rack, int numHosts, long numClusters) { + TestEntity entity = new TestEntity(); + Map tags = new HashMap() {{ + put("cluster", cluster); + put("datacenter", datacenter); + put("rack", rack); + }}; + entity.setTags(tags); + entity.setNumHosts(numHosts); + entity.setNumClusters(numClusters); + return entity; + } + + @Test + public void testZeroGroupbyFieldSingleFunctionForSummary() { + TestEntity[] entities = new TestEntity[5]; + entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); + entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); + entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); + entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); + + FlatAggregator agg = new FlatAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 1); + Assert.assertEquals(result.get(new ArrayList()).get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + + entities[2].getNumHosts() + entities[3].getNumHosts() + entities[4].getNumHosts()), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new FlatAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 1); + Assert.assertEquals(result.get(new ArrayList()).get(0), (double) (entities[0].getNumClusters() + entities[1].getNumClusters() + + entities[2].getNumClusters() + entities[3].getNumClusters() + entities[4].getNumClusters()), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new FlatAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 1); + Assert.assertEquals(result.get(new ArrayList()).get(0), (double) (5), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + @Test + public void testSingleGroupbyFieldSingleFunctionForSummary() { + TestEntity[] entities = new TestEntity[5]; + entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); + entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); + entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); + entities[4] = createEntity("cluster2", "dc2", "rack126", 15, 2); + + FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double) (entities[3].getNumHosts() + entities[4].getNumHosts()), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new FlatAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts()) + entities[3].getNumHosts(), + 0.001); + Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double) (entities[4].getNumHosts()), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new FlatAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double) (entities[0].getNumClusters() + entities[1].getNumClusters() + entities[2].getNumClusters()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double) (entities[3].getNumClusters() + entities[4].getNumClusters()), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new FlatAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numClusters")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double) (entities[0].getNumClusters() + entities[1].getNumClusters() + entities[2].getNumClusters()) + entities[3] + .getNumClusters(), 0.001); + Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double) (entities[4].getNumClusters()), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + + @Test + public void testSingleGroupbyFieldSingleFunctionForCount() { + TestEntity[] entities = new TestEntity[5]; + entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); + entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); + entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); + entities[4] = createEntity("cluster2", "dc2", "rack126", 15, 2); + + FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double) (3), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double) (2), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new FlatAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double) (4), 0.001); + Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double) (1), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + @Test + public void testMultipleFieldsSingleFunctionForSummary() { + TestEntity[] entities = new TestEntity[6]; + entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); + entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); + entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); + entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); + entities[5] = createEntity("cluster2", null, "rack126", 1, 3); + + FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(3, result.size()); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), (double) (entities[3].getNumHosts() + entities[4].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), (double) (entities[5].getNumHosts()), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new FlatAggregator(Arrays.asList("cluster", "datacenter", "rack"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(5, result.size()); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), (double) (entities[2].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), (double) (entities[3].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), (double) (entities[4].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), (double) (entities[5].getNumHosts()), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + @Test + public void testMultipleFieldsSingleFunctionForCount() { + TestEntity[] entities = new TestEntity[6]; + entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); + entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); + entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); + entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); + entities[5] = createEntity("cluster2", null, "rack126", 1, 3); + + FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(3, result.size()); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), (double) (3), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), (double) (2), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), (double) (1), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new FlatAggregator(Arrays.asList("cluster", "datacenter", "rack"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(5, result.size()); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), (double) (2), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), (double) (1), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), (double) (1), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), (double) (1), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), (double) (1), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + @Test + public void testSingleGroupbyFieldMultipleFunctions() { + TestEntity[] entities = new TestEntity[5]; + entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); + entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); + entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); + entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); + + FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum, AggregateFunctionType.count), + Arrays.asList("numHosts", "*")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 2); + Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(1), (double) (3), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double) (entities[3].getNumHosts() + entities[4].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(1), (double) (2), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new FlatAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count, AggregateFunctionType.sum), Arrays.asList("*", "numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 1); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double) (5), 0.001); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts() + entities[3].getNumHosts()) + + entities[4].getNumHosts(), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new FlatAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.count, AggregateFunctionType.sum, AggregateFunctionType.sum), + Arrays.asList("*", "numHosts", "numClusters")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 1); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double) (5), 0.001); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts() + entities[3].getNumHosts()) + entities[4].getNumHosts(), 0.001); + Assert.assertEquals(result.get(Arrays.asList("dc1")).get(2), (double) (entities[0].getNumClusters() + entities[1].getNumClusters() + entities[2].getNumClusters() + entities[3].getNumClusters()) + entities[4].getNumClusters(), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + @Test + public void testMultipleGroupbyFieldsMultipleFunctions() { + TestEntity[] entities = new TestEntity[5]; + entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); + entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); + entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); + entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); + + FlatAggregator agg = new FlatAggregator(Arrays.asList("cluster", "rack"), Arrays.asList(AggregateFunctionType.sum, AggregateFunctionType.count), + Arrays.asList("numHosts", "*")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + Map, List> result = agg.result(); + Assert.assertEquals(result.size(), 4); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(1), (double) (2), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(0), (double) (entities[2].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(1), (double) (1), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(0), (double) (entities[3].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(1), (double) (1), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(0), (double) (entities[4].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(1), (double) (1), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestGroupbyFieldComparator.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestGroupbyFieldComparator.java index 5fec9505c9..1b84de7cfe 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestGroupbyFieldComparator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestGroupbyFieldComparator.java @@ -16,20 +16,20 @@ */ package org.apache.eagle.query.aggregate.test; -import java.util.Arrays; - import org.apache.eagle.query.aggregate.timeseries.GroupbyFieldsComparator; import org.junit.Assert; import org.junit.Test; +import java.util.Arrays; + public class TestGroupbyFieldComparator { - @Test - public void testStringListCompare(){ - GroupbyFieldsComparator c = new GroupbyFieldsComparator(); - Assert.assertTrue(c.compare(Arrays.asList("ab"), Arrays.asList("ac"))<0); - Assert.assertTrue(c.compare(Arrays.asList("xy"), Arrays.asList("cd"))>0); - Assert.assertTrue(c.compare(Arrays.asList("xy"), Arrays.asList("xy"))==0); - Assert.assertTrue(c.compare(Arrays.asList("xy", "ab"), Arrays.asList("xy", "ac"))<0); - } + @Test + public void testStringListCompare() { + GroupbyFieldsComparator c = new GroupbyFieldsComparator(); + Assert.assertTrue(c.compare(Arrays.asList("ab"), Arrays.asList("ac")) < 0); + Assert.assertTrue(c.compare(Arrays.asList("xy"), Arrays.asList("cd")) > 0); + Assert.assertTrue(c.compare(Arrays.asList("xy"), Arrays.asList("xy")) == 0); + Assert.assertTrue(c.compare(Arrays.asList("xy", "ab"), Arrays.asList("xy", "ac")) < 0); + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java index 96b36e9eb4..e5c162ea53 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java @@ -16,316 +16,312 @@ */ package org.apache.eagle.query.aggregate.test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import org.apache.eagle.query.aggregate.timeseries.PostHierarchicalAggregateSort; +import org.apache.eagle.log.entity.test.TestEntity; +import org.apache.eagle.query.aggregate.AggregateFunctionType; import org.apache.eagle.query.aggregate.timeseries.HierarchicalAggregateEntity; import org.apache.eagle.query.aggregate.timeseries.HierarchicalAggregator; -import org.apache.eagle.query.aggregate.AggregateFunctionType; -import org.junit.Assert; - +import org.apache.eagle.query.aggregate.timeseries.PostHierarchicalAggregateSort; import org.apache.eagle.query.aggregate.timeseries.SortOption; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.map.ObjectMapper; +import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.eagle.log.entity.test.TestEntity; +import java.util.*; public class TestHierarchicalAggregator { -private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggregator.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggregator.class); + + @SuppressWarnings("serial") + private TestEntity createEntity(final String cluster, final String datacenter, final String rack, int numHosts, long numClusters) { + TestEntity entity = new TestEntity(); + Map tags = new HashMap() {{ + put("cluster", cluster); + put("datacenter", datacenter); + put("rack", rack); + }}; + entity.setTags(tags); + entity.setNumHosts(numHosts); + entity.setNumClusters(numClusters); + return entity; + } + + @SuppressWarnings("serial") + private TestEntity createEntityWithoutDatacenter(final String cluster, final String rack, int numHosts, long numClusters) { + TestEntity entity = new TestEntity(); + Map tags = new HashMap() {{ + put("cluster", cluster); + put("rack", rack); + }}; + entity.setTags(tags); + entity.setNumHosts(numHosts); + entity.setNumClusters(numClusters); + return entity; + } + + private void writeToJson(String message, Object obj) { + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + try { + String result = mapper.writeValueAsString(obj); + LOG.info(message + ":\n" + result); + } catch (Exception ex) { + LOG.error("Can not write json", ex); + Assert.fail("Can not write json"); + } + } + + @Test + public void testZeroGropubyFieldHierarchicalAggregator() { + TestEntity[] entities = new TestEntity[5]; + entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); + entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); + entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); + entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); + HierarchicalAggregator agg = new HierarchicalAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + HierarchicalAggregateEntity result = agg.result(); + writeToJson("After aggregate", result); + Assert.assertEquals(result.getChildren().size(), 0); + Assert.assertEquals(result.getValues().get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts() + entities[3].getNumHosts() + entities[4] + .getNumHosts()), 0.0001); + + // test sort by function1 + SortOption so = new SortOption(); + so.setIndex(0); + so.setAscendant(true); + List sortOptions = Arrays.asList(so); + PostHierarchicalAggregateSort.sort(result, sortOptions); + writeToJson("After sort", result); + Assert.assertEquals(null, result.getChildren()); + Assert.assertEquals(0, result.getSortedList().size()); + Assert.assertEquals(result.getValues().get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts() + entities[3].getNumHosts() + entities[4] + .getNumHosts()), 0.0001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + @Test + public void testSingleGropubyFieldHierarchicalAggregator() { + TestEntity[] entities = new TestEntity[5]; + entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); + entities[2] = createEntity("cluster1", "dc2", "rack128", 10, 0); + entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); + entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); + HierarchicalAggregator agg = new HierarchicalAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + HierarchicalAggregateEntity result = agg.result(); + writeToJson("After aggregate", result); + Assert.assertEquals(result.getChildren().size(), 2); + Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts()), 0.0001); + Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double) (entities[3].getNumHosts() + entities[4].getNumHosts()), 0.0001); + + // test sort by function 1 + SortOption so = new SortOption(); + so.setIndex(0); + so.setAscendant(true); + List sortOptions = Arrays.asList(so); + PostHierarchicalAggregateSort.sort(result, sortOptions); + writeToJson("After sort", result); + Assert.assertEquals(null, result.getChildren()); + Assert.assertEquals(2, result.getSortedList().size(), 2); + Iterator> it = result.getSortedList().iterator(); + Assert.assertEquals(true, it.hasNext()); + Map.Entry entry = it.next(); + Assert.assertEquals("cluster2", entry.getKey()); + Assert.assertEquals(entry.getValue().getValues().get(0), (double) (entities[3].getNumHosts() + entities[4].getNumHosts()), 0.0001); + + Assert.assertEquals(true, it.hasNext()); + entry = it.next(); + Assert.assertEquals("cluster1", entry.getKey()); + Assert.assertEquals(entry.getValue().getValues().get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts()), 0.0001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new HierarchicalAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + HierarchicalAggregateEntity result = agg.result(); + writeToJson("After aggregate", result); + Assert.assertEquals(result.getChildren().size(), 2); + Assert.assertEquals(result.getChildren().get("dc1").getValues().get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[3].getNumHosts() + entities[4] + .getNumHosts()), 0.0001); + Assert.assertEquals(result.getChildren().get("dc2").getValues().get(0), (double) (entities[2].getNumHosts()), 0.0001); + + // test sort by function 1 + SortOption so = new SortOption(); + so.setIndex(0); + so.setAscendant(true); + List sortOptions = Arrays.asList(so); + PostHierarchicalAggregateSort.sort(result, sortOptions); + writeToJson("After sort", result); + Assert.assertEquals(null, result.getChildren()); + Assert.assertEquals(2, result.getSortedList().size(), 2); + Iterator> it = result.getSortedList().iterator(); + Assert.assertEquals(true, it.hasNext()); + Map.Entry entry = it.next(); + Assert.assertEquals("dc2", entry.getKey()); + Assert.assertEquals(entry.getValue().getValues().get(0), (double) (entities[2].getNumHosts()), 0.0001); + + Assert.assertEquals(true, it.hasNext()); + entry = it.next(); + Assert.assertEquals("dc1", entry.getKey()); + Assert.assertEquals(entry.getValue().getValues().get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[3].getNumHosts() + entities[4].getNumHosts()), 0.0001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + agg = new HierarchicalAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum, AggregateFunctionType.sum), Arrays.asList("numHosts", "numClusters")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + HierarchicalAggregateEntity result = agg.result(); + writeToJson("After aggregate", result); + Assert.assertEquals(result.getChildren().size(), 2); + Assert.assertEquals(2, result.getChildren().get("cluster1").getValues().size()); + Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts()), 0.0001); + Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(1), (double) (entities[0].getNumClusters() + entities[1].getNumClusters() + entities[2].getNumClusters()), 0.0001); + Assert.assertEquals(2, result.getChildren().get("cluster2").getValues().size()); + Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double) (entities[3].getNumHosts() + entities[4].getNumHosts()), 0.0001); + Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(1), (double) (entities[3].getNumClusters() + entities[4].getNumClusters()), 0.0001); + + // test sort by function 2 + SortOption so = new SortOption(); + so.setIndex(1); + so.setAscendant(true); + List sortOptions = Arrays.asList(so); + PostHierarchicalAggregateSort.sort(result, sortOptions); + writeToJson("After sort", result); + Assert.assertEquals(null, result.getChildren()); + Assert.assertEquals(2, result.getSortedList().size(), 2); + Iterator> it = result.getSortedList().iterator(); + Assert.assertEquals(true, it.hasNext()); + Map.Entry entry = it.next(); + Assert.assertEquals("cluster1", entry.getKey()); + Assert.assertEquals(entry.getValue().getValues().get(1), (double) (entities[0].getNumClusters() + entities[1].getNumClusters() + entities[2].getNumClusters()), 0.0001); + + Assert.assertEquals(true, it.hasNext()); + entry = it.next(); + Assert.assertEquals("cluster2", entry.getKey()); + Assert.assertEquals(entry.getValue().getValues().get(1), (double) (entities[3].getNumClusters() + entities[4].getNumClusters()), 0.0001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } + + + @Test + public void testMultipleGropubyFieldsHierarchicalAggregator() { + TestEntity[] entities = new TestEntity[5]; + entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); + entities[2] = createEntity("cluster1", "dc2", "rack128", 10, 0); + entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); + entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); + HierarchicalAggregator agg = new HierarchicalAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + HierarchicalAggregateEntity result = agg.result(); + writeToJson("After aggregate", result); + Assert.assertEquals(2, result.getChildren().size()); + Assert.assertEquals(66.0, (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts() + entities[3].getNumHosts() + entities[4].getNumHosts()), 0.0001); + Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts()), 0.0001); + Assert.assertEquals(2, result.getChildren().get("cluster1").getChildren().size()); + Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc1").getValues().get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts()), 0.0001); + Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc2").getValues().get(0), (double) (entities[2].getNumHosts()), 0.0001); + + Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double) (entities[3].getNumHosts() + entities[4].getNumHosts()), 0.0001); + Assert.assertEquals(1, result.getChildren().get("cluster2").getChildren().size()); + Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("dc1").getValues().get(0), (double) (entities[3].getNumHosts() + entities[4].getNumHosts()), 0.0001); + + // test sort by function 2 + SortOption so = new SortOption(); + so.setIndex(0); + so.setAscendant(true); + List sortOptions = Arrays.asList(so); + PostHierarchicalAggregateSort.sort(result, sortOptions); + writeToJson("After sort", result); + Assert.assertEquals(null, result.getChildren()); + Assert.assertEquals(2, result.getSortedList().size()); + Iterator> it = result.getSortedList().iterator(); + Assert.assertEquals(true, it.hasNext()); + Map.Entry entry = it.next(); + Assert.assertEquals("cluster2", entry.getKey()); + Assert.assertEquals(entry.getValue().getValues().get(0), (double) (entities[3].getNumHosts() + entities[4].getNumHosts()), 0.0001); + + Assert.assertEquals(true, it.hasNext()); + entry = it.next(); + Assert.assertEquals("cluster1", entry.getKey()); + Assert.assertEquals(entry.getValue().getValues().get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts()), 0.0001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } - @SuppressWarnings("serial") - private TestEntity createEntity(final String cluster, final String datacenter, final String rack, int numHosts, long numClusters){ - TestEntity entity = new TestEntity(); - Map tags = new HashMap(){{ - put("cluster", cluster); - put("datacenter", datacenter); - put("rack", rack); - }}; - entity.setTags(tags); - entity.setNumHosts(numHosts); - entity.setNumClusters(numClusters); - return entity; - } - - @SuppressWarnings("serial") - private TestEntity createEntityWithoutDatacenter(final String cluster, final String rack, int numHosts, long numClusters){ - TestEntity entity = new TestEntity(); - Map tags = new HashMap(){{ - put("cluster", cluster); - put("rack", rack); - }}; - entity.setTags(tags); - entity.setNumHosts(numHosts); - entity.setNumClusters(numClusters); - return entity; - } + @Test + public void testUnassigned() { + TestEntity[] entities = new TestEntity[5]; + entities[0] = createEntityWithoutDatacenter("cluster1", "rack123", 12, 2); + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); + entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); + entities[3] = createEntityWithoutDatacenter("cluster2", "rack125", 9, 2); + entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); + HierarchicalAggregator agg = new HierarchicalAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + HierarchicalAggregateEntity result = agg.result(); + writeToJson("After aggregate", result); + Assert.assertEquals(result.getChildren().size(), 2); + Assert.assertEquals(result.getChildren().get("dc1").getValues().get(0), (double) (entities[1].getNumHosts() + entities[2].getNumHosts()) + entities[4].getNumHosts(), 0.0001); + Assert.assertEquals(result.getChildren().get("unassigned").getValues().get(0), (double) (entities[0].getNumHosts() + entities[3].getNumHosts()), 0.0001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } - private void writeToJson(String message, Object obj){ - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - try{ - String result = mapper.writeValueAsString(obj); - LOG.info(message + ":\n" + result); - }catch(Exception ex){ - LOG.error("Can not write json", ex); - Assert.fail("Can not write json"); - } - } - - @Test - public void testZeroGropubyFieldHierarchicalAggregator(){ - TestEntity[] entities = new TestEntity[5]; - entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); - entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); - entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); - entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); - HierarchicalAggregator agg = new HierarchicalAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - HierarchicalAggregateEntity result = agg.result(); - writeToJson("After aggregate", result); - Assert.assertEquals(result.getChildren().size(), 0); - Assert.assertEquals(result.getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001); + agg = new HierarchicalAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); + try { + for (TestEntity e : entities) { + agg.accumulate(e); + } + HierarchicalAggregateEntity result = agg.result(); + writeToJson("After aggregate", result); + Assert.assertEquals(result.getChildren().size(), 2); + Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double) (entities[0].getNumHosts() + entities[1].getNumHosts() + entities[2].getNumHosts()), 0.0001); + Assert.assertEquals(2, result.getChildren().get("cluster1").getChildren().size()); + Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc1").getValues().get(0), (double) (entities[1].getNumHosts() + entities[2].getNumHosts()), 0.0001); + Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("unassigned").getValues().get(0), (double) (entities[0].getNumHosts()), 0.0001); - // test sort by function1 - SortOption so = new SortOption(); - so.setIndex(0); - so.setAscendant(true); - List sortOptions = Arrays.asList(so); - PostHierarchicalAggregateSort.sort(result, sortOptions); - writeToJson("After sort" ,result); - Assert.assertEquals(null, result.getChildren()); - Assert.assertEquals(0, result.getSortedList().size()); - Assert.assertEquals(result.getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - @Test - public void testSingleGropubyFieldHierarchicalAggregator(){ - TestEntity[] entities = new TestEntity[5]; - entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); - entities[2] = createEntity("cluster1", "dc2", "rack128", 10, 0); - entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); - entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); - HierarchicalAggregator agg = new HierarchicalAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - HierarchicalAggregateEntity result = agg.result(); - writeToJson("After aggregate" ,result); - Assert.assertEquals(result.getChildren().size(), 2); - Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001); - Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001); - - // test sort by function 1 - SortOption so = new SortOption(); - so.setIndex(0); - so.setAscendant(true); - List sortOptions = Arrays.asList(so); - PostHierarchicalAggregateSort.sort(result, sortOptions); - writeToJson("After sort" ,result); - Assert.assertEquals(null, result.getChildren()); - Assert.assertEquals(2, result.getSortedList().size(), 2); - Iterator> it = result.getSortedList().iterator(); - Assert.assertEquals(true, it.hasNext()); - Map.Entry entry = it.next(); - Assert.assertEquals("cluster2", entry.getKey()); - Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001); - - Assert.assertEquals(true, it.hasNext()); - entry = it.next(); - Assert.assertEquals("cluster1", entry.getKey()); - Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new HierarchicalAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - HierarchicalAggregateEntity result = agg.result(); - writeToJson("After aggregate" , result); - Assert.assertEquals(result.getChildren().size(), 2); - Assert.assertEquals(result.getChildren().get("dc1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001); - Assert.assertEquals(result.getChildren().get("dc2").getValues().get(0), (double)(entities[2].getNumHosts()), 0.0001); - - // test sort by function 1 - SortOption so = new SortOption(); - so.setIndex(0); - so.setAscendant(true); - List sortOptions = Arrays.asList(so); - PostHierarchicalAggregateSort.sort(result, sortOptions); - writeToJson("After sort" ,result); - Assert.assertEquals(null, result.getChildren()); - Assert.assertEquals(2, result.getSortedList().size(), 2); - Iterator> it = result.getSortedList().iterator(); - Assert.assertEquals(true, it.hasNext()); - Map.Entry entry = it.next(); - Assert.assertEquals("dc2", entry.getKey()); - Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[2].getNumHosts()), 0.0001); - - Assert.assertEquals(true, it.hasNext()); - entry = it.next(); - Assert.assertEquals("dc1", entry.getKey()); - Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new HierarchicalAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum, AggregateFunctionType.sum), Arrays.asList("numHosts", "numClusters")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - HierarchicalAggregateEntity result = agg.result(); - writeToJson("After aggregate" , result); - Assert.assertEquals(result.getChildren().size(), 2); - Assert.assertEquals(2, result.getChildren().get("cluster1").getValues().size()); - Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001); - Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(1), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()), 0.0001); - Assert.assertEquals(2, result.getChildren().get("cluster2").getValues().size()); - Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001); - Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(1), (double)(entities[3].getNumClusters()+entities[4].getNumClusters()), 0.0001); - - // test sort by function 2 - SortOption so = new SortOption(); - so.setIndex(1); - so.setAscendant(true); - List sortOptions = Arrays.asList(so); - PostHierarchicalAggregateSort.sort(result, sortOptions); - writeToJson("After sort" ,result); - Assert.assertEquals(null, result.getChildren()); - Assert.assertEquals(2, result.getSortedList().size(), 2); - Iterator> it = result.getSortedList().iterator(); - Assert.assertEquals(true, it.hasNext()); - Map.Entry entry = it.next(); - Assert.assertEquals("cluster1", entry.getKey()); - Assert.assertEquals(entry.getValue().getValues().get(1), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()), 0.0001); - - Assert.assertEquals(true, it.hasNext()); - entry = it.next(); - Assert.assertEquals("cluster2", entry.getKey()); - Assert.assertEquals(entry.getValue().getValues().get(1), (double)(entities[3].getNumClusters()+entities[4].getNumClusters()), 0.0001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - - @Test - public void testMultipleGropubyFieldsHierarchicalAggregator(){ - TestEntity[] entities = new TestEntity[5]; - entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2); - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); - entities[2] = createEntity("cluster1", "dc2", "rack128", 10, 0); - entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2); - entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); - HierarchicalAggregator agg = new HierarchicalAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - HierarchicalAggregateEntity result = agg.result(); - writeToJson("After aggregate", result); - Assert.assertEquals(2, result.getChildren().size()); - Assert.assertEquals(66.0, (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001); - Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001); - Assert.assertEquals(2, result.getChildren().get("cluster1").getChildren().size()); - Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()), 0.0001); - Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc2").getValues().get(0), (double)(entities[2].getNumHosts()), 0.0001); - - Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001); - Assert.assertEquals(1, result.getChildren().get("cluster2").getChildren().size()); - Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("dc1").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001); - - // test sort by function 2 - SortOption so = new SortOption(); - so.setIndex(0); - so.setAscendant(true); - List sortOptions = Arrays.asList(so); - PostHierarchicalAggregateSort.sort(result, sortOptions); - writeToJson("After sort" ,result); - Assert.assertEquals(null, result.getChildren()); - Assert.assertEquals(2, result.getSortedList().size()); - Iterator> it = result.getSortedList().iterator(); - Assert.assertEquals(true, it.hasNext()); - Map.Entry entry = it.next(); - Assert.assertEquals("cluster2", entry.getKey()); - Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001); - - Assert.assertEquals(true, it.hasNext()); - entry = it.next(); - Assert.assertEquals("cluster1", entry.getKey()); - Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } - - @Test - public void testUnassigned(){ - TestEntity[] entities = new TestEntity[5]; - entities[0] = createEntityWithoutDatacenter("cluster1", "rack123", 12, 2); - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1); - entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0); - entities[3] = createEntityWithoutDatacenter("cluster2", "rack125", 9, 2); - entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2); - HierarchicalAggregator agg = new HierarchicalAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - HierarchicalAggregateEntity result = agg.result(); - writeToJson("After aggregate", result); - Assert.assertEquals(result.getChildren().size(), 2); - Assert.assertEquals(result.getChildren().get("dc1").getValues().get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts())+entities[4].getNumHosts(), 0.0001); - Assert.assertEquals(result.getChildren().get("unassigned").getValues().get(0), (double)(entities[0].getNumHosts()+entities[3].getNumHosts()), 0.0001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - agg = new HierarchicalAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts")); - try{ - for(TestEntity e : entities){ - agg.accumulate(e); - } - HierarchicalAggregateEntity result = agg.result(); - writeToJson("After aggregate", result); - Assert.assertEquals(result.getChildren().size(), 2); - Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001); - Assert.assertEquals(2, result.getChildren().get("cluster1").getChildren().size()); - Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc1").getValues().get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001); - Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("unassigned").getValues().get(0), (double)(entities[0].getNumHosts()), 0.0001); - - Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("dc1").getValues().get(0), (double)(entities[4].getNumHosts()), 0.0001); - Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("unassigned").getValues().get(0), (double)(entities[3].getNumHosts()), 0.0001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } + Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("dc1").getValues().get(0), (double) (entities[4].getNumHosts()), 0.0001); + Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("unassigned").getValues().get(0), (double) (entities[3].getNumHosts()), 0.0001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestListQueryCompiler.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestListQueryCompiler.java index 10f96dcb9b..6527f50936 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestListQueryCompiler.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestListQueryCompiler.java @@ -19,14 +19,14 @@ package org.apache.eagle.query.aggregate.test; import org.apache.eagle.query.ListQueryCompiler; -import org.apache.eagle.query.parser.EagleQueryParseException; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TestListQueryCompiler { - private final static Logger LOG = LoggerFactory.getLogger(TestListQueryCompiler.class); + private static final Logger LOG = LoggerFactory.getLogger(TestListQueryCompiler.class); + @Test public void test() throws Exception { try { diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java index 6850d2c64c..7d1ea35dd9 100644 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java @@ -16,128 +16,123 @@ */ package org.apache.eagle.query.aggregate.test; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - import org.apache.eagle.query.aggregate.timeseries.PostFlatAggregateSort; - +import org.apache.eagle.query.aggregate.timeseries.SortOption; import org.apache.log4j.Logger; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.map.ObjectMapper; import org.junit.Assert; import org.junit.Test; -import org.apache.eagle.query.aggregate.timeseries.SortOption; +import java.util.*; public class TestPostFlatAggregateSort { - private static final Logger logger = Logger.getLogger(TestPostFlatAggregateSort.class); - @Test - public void testSort(){ - final String aggField1Value1 = "field1value1"; - final String aggField1Value2 = "field1value2"; - final String aggField2Value1 = "field2value1"; - final String aggField2Value2 = "field2value2"; - final Double d1 = new Double(1); - final Double d2 = new Double(2); - final Double d3 = new Double(3); - final Double d4 = new Double(4); - @SuppressWarnings("serial") - Map, List> result = new HashMap, List>(){{ - put(Arrays.asList(aggField1Value1, aggField2Value1), Arrays.asList(d2, d3)); - put(Arrays.asList(aggField1Value2, aggField2Value2), Arrays.asList(d1, d4)); - }}; - - // sort by function1 - SortOption so = new SortOption(); - so.setIndex(0); - so.setAscendant(true); - List sortOptions = Arrays.asList(so); - List, List>> set = - PostFlatAggregateSort.sort(result, sortOptions, 0); - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - Assert.assertEquals(2, set.size()); - Iterator, List>> it = set.iterator(); - Map.Entry, List> e = it.next(); - Assert.assertTrue(e.getKey().get(0).equals(aggField1Value2)); - Assert.assertTrue(e.getValue().get(0).equals(d1)); - e = it.next(); - Assert.assertTrue(e.getKey().get(0).equals(aggField1Value1)); - Assert.assertTrue(e.getValue().get(0).equals(d2)); - try{ - String value = mapper.writeValueAsString(set); - logger.info(value); - }catch(Exception ex){ - logger.error("fail with mapping", ex); - Assert.fail("fail with mapping"); - } - - - // sort by function2 - so = new SortOption(); - so.setIndex(1); - so.setAscendant(true); - sortOptions = Arrays.asList(so); - set = PostFlatAggregateSort.sort(result, sortOptions, 0); - factory = new JsonFactory(); - mapper = new ObjectMapper(factory); - Assert.assertEquals(2, set.size()); - it = set.iterator(); - e = it.next(); - Assert.assertTrue(e.getKey().get(0).equals(aggField1Value1)); - Assert.assertTrue(e.getValue().get(0).equals(d2)); - e = it.next(); - Assert.assertTrue(e.getKey().get(0).equals(aggField1Value2)); - Assert.assertTrue(e.getValue().get(0).equals(d1)); - try{ - String value = mapper.writeValueAsString(set); - logger.info(value); - }catch(Exception ex){ - logger.error("fail with mapping", ex); - Assert.fail("fail with mapping"); - } - } - - @Test - public void testDefaultSort(){ - final String aggField1Value1 = "xyz"; - final String aggField1Value2 = "xyz"; - final String aggField2Value1 = "abd"; - final String aggField2Value2 = "abc"; - final Double d1 = new Double(1); - final Double d2 = new Double(1); - @SuppressWarnings("serial") - Map, List> result = new HashMap, List>(){{ - put(Arrays.asList(aggField1Value1, aggField2Value1), Arrays.asList(d2)); - put(Arrays.asList(aggField1Value2, aggField2Value2), Arrays.asList(d1)); - }}; - - // sort by function1 - SortOption so = new SortOption(); - so.setIndex(0); - so.setAscendant(true); - List sortOptions = Arrays.asList(so); - List, List>> set = - PostFlatAggregateSort.sort(result, sortOptions, 0); - JsonFactory factory = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(factory); - Assert.assertEquals(2, set.size()); - Iterator, List>> it = set.iterator(); - Map.Entry, List> e = it.next(); - Assert.assertTrue(e.getKey().get(0).equals(aggField1Value2)); - Assert.assertTrue(e.getValue().get(0).equals(d1)); - e = it.next(); - Assert.assertTrue(e.getKey().get(0).equals(aggField1Value1)); - Assert.assertTrue(e.getValue().get(0).equals(d2)); - try{ - String value = mapper.writeValueAsString(set); - logger.info(value); - }catch(Exception ex){ - logger.error("fail with mapping", ex); - Assert.fail("fail with mapping"); - } - } + private static final Logger logger = Logger.getLogger(TestPostFlatAggregateSort.class); + + @Test + public void testSort() { + final String aggField1Value1 = "field1value1"; + final String aggField1Value2 = "field1value2"; + final String aggField2Value1 = "field2value1"; + final String aggField2Value2 = "field2value2"; + final Double d1 = new Double(1); + final Double d2 = new Double(2); + final Double d3 = new Double(3); + final Double d4 = new Double(4); + @SuppressWarnings("serial") + Map, List> result = new HashMap, List>() {{ + put(Arrays.asList(aggField1Value1, aggField2Value1), Arrays.asList(d2, d3)); + put(Arrays.asList(aggField1Value2, aggField2Value2), Arrays.asList(d1, d4)); + }}; + + // sort by function1 + SortOption so = new SortOption(); + so.setIndex(0); + so.setAscendant(true); + List sortOptions = Arrays.asList(so); + List, List>> set = + PostFlatAggregateSort.sort(result, sortOptions, 0); + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + Assert.assertEquals(2, set.size()); + Iterator, List>> it = set.iterator(); + Map.Entry, List> e = it.next(); + Assert.assertTrue(e.getKey().get(0).equals(aggField1Value2)); + Assert.assertTrue(e.getValue().get(0).equals(d1)); + e = it.next(); + Assert.assertTrue(e.getKey().get(0).equals(aggField1Value1)); + Assert.assertTrue(e.getValue().get(0).equals(d2)); + try { + String value = mapper.writeValueAsString(set); + logger.info(value); + } catch (Exception ex) { + logger.error("fail with mapping", ex); + Assert.fail("fail with mapping"); + } + + + // sort by function2 + so = new SortOption(); + so.setIndex(1); + so.setAscendant(true); + sortOptions = Arrays.asList(so); + set = PostFlatAggregateSort.sort(result, sortOptions, 0); + factory = new JsonFactory(); + mapper = new ObjectMapper(factory); + Assert.assertEquals(2, set.size()); + it = set.iterator(); + e = it.next(); + Assert.assertTrue(e.getKey().get(0).equals(aggField1Value1)); + Assert.assertTrue(e.getValue().get(0).equals(d2)); + e = it.next(); + Assert.assertTrue(e.getKey().get(0).equals(aggField1Value2)); + Assert.assertTrue(e.getValue().get(0).equals(d1)); + try { + String value = mapper.writeValueAsString(set); + logger.info(value); + } catch (Exception ex) { + logger.error("fail with mapping", ex); + Assert.fail("fail with mapping"); + } + } + + @Test + public void testDefaultSort() { + final String aggField1Value1 = "xyz"; + final String aggField1Value2 = "xyz"; + final String aggField2Value1 = "abd"; + final String aggField2Value2 = "abc"; + final Double d1 = new Double(1); + final Double d2 = new Double(1); + @SuppressWarnings("serial") + Map, List> result = new HashMap, List>() {{ + put(Arrays.asList(aggField1Value1, aggField2Value1), Arrays.asList(d2)); + put(Arrays.asList(aggField1Value2, aggField2Value2), Arrays.asList(d1)); + }}; + + // sort by function1 + SortOption so = new SortOption(); + so.setIndex(0); + so.setAscendant(true); + List sortOptions = Arrays.asList(so); + List, List>> set = + PostFlatAggregateSort.sort(result, sortOptions, 0); + JsonFactory factory = new JsonFactory(); + ObjectMapper mapper = new ObjectMapper(factory); + Assert.assertEquals(2, set.size()); + Iterator, List>> it = set.iterator(); + Map.Entry, List> e = it.next(); + Assert.assertTrue(e.getKey().get(0).equals(aggField1Value2)); + Assert.assertTrue(e.getValue().get(0).equals(d1)); + e = it.next(); + Assert.assertTrue(e.getKey().get(0).equals(aggField1Value1)); + Assert.assertTrue(e.getValue().get(0).equals(d2)); + try { + String value = mapper.writeValueAsString(set); + logger.info(value); + } catch (Exception ex) { + logger.error("fail with mapping", ex); + Assert.fail("fail with mapping"); + } + } } diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java index b72bdb7e7e..752e973f62 100755 --- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java +++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java @@ -16,160 +16,156 @@ */ package org.apache.eagle.query.aggregate.test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.eagle.query.aggregate.timeseries.TimeSeriesAggregator; +import org.apache.eagle.log.entity.test.TestEntity; import org.apache.eagle.query.aggregate.AggregateFunctionType; +import org.apache.eagle.query.aggregate.timeseries.TimeSeriesAggregator; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.eagle.log.entity.test.TestEntity; +import java.util.*; public class TestTimeSeriesAggregator { - private static final Logger LOG = LoggerFactory.getLogger(TestFlatAggregator.class); - @SuppressWarnings("serial") - private TestEntity createEntity(final String cluster, final String datacenter, final String rack, int numHosts, long numClusters, long timestamp){ - TestEntity entity = new TestEntity(); - Map tags = new HashMap(){{ - put("cluster", cluster); - put("datacenter", datacenter); - put("rack", rack); - }}; - entity.setTags(tags); - entity.setNumHosts(numHosts); - entity.setNumClusters(numClusters); - entity.setTimestamp(timestamp); - return entity; - } - - @Test - public void testTimeSeriesAggregator(){ - TestEntity[] entities = new TestEntity[8]; - entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2, 1386120000*1000); // bucket 0 - entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1, 1386121060*1000); // bucket 17 - entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0, 1386121070*1000); // bucket 17 - entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2, 1386122122*1000); // bucket 35 - entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 5, 1386123210*1000); // bucket 53 - entities[5] = createEntity("cluster2", "dc1", "rack234", 25, 1, 1386123480*1000); // bucket 58 - entities[6] = createEntity("cluster2", "dc1", "rack234", 12, 0, 1386123481*1000); // bucket 58 - entities[7] = createEntity("cluster1", "dc1", "rack123", 3, 2, 1386123482*1000); // bucket 58 - - TimeSeriesAggregator tsAgg = new TimeSeriesAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), - 1386120000*1000, 1386123600*1000, 60*1000); - try{ - for(TestEntity e : entities){ - tsAgg.accumulate(e); - } - Map, List> result = tsAgg.result(); - Assert.assertEquals(result.size(), 6); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "0")).get(0), (double)(entities[0].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "17")).get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "35")).get(0), (double)(entities[3].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "53")).get(0), (double)(entities[4].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "58")).get(0), (double)(entities[5].getNumHosts()+entities[6].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "58")).get(0), (double)(entities[7].getNumHosts()), 0.001); - - Map, List> tsResult = tsAgg.getMetric(); - Assert.assertEquals(tsResult.size(), 2); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0).length, 60); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[0], (double)(entities[0].getNumHosts()), 0.001); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[17], (double)(entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[35], (double)(entities[3].getNumHosts()), 0.001); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[53], (double)(entities[4].getNumHosts()), 0.001); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[58], (double)(entities[5].getNumHosts()+entities[6].getNumHosts()), 0.001); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[58], (double)(entities[7].getNumHosts()), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - tsAgg = new TimeSeriesAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), - 1386120000*1000, 1386123600*1000, 60*1000); - try{ - for(TestEntity e : entities){ - tsAgg.accumulate(e); - } - Map, List> result = tsAgg.result(); - Assert.assertEquals(result.size(), 5); - Assert.assertEquals(result.get(Arrays.asList("0")).get(0), (double)(entities[0].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("17")).get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("35")).get(0), (double)(entities[3].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("53")).get(0), (double)(entities[4].getNumHosts()), 0.001); - Assert.assertEquals(result.get(Arrays.asList("58")).get(0), (double)(entities[5].getNumHosts()+entities[6].getNumHosts()+entities[7].getNumHosts()), 0.001); - - Map, List> tsResult = tsAgg.getMetric(); - Assert.assertEquals(tsResult.size(), 1); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0).length, 60); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[0], (double)(entities[0].getNumHosts()), 0.001); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[17], (double)(entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[35], (double)(entities[3].getNumHosts()), 0.001); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[53], (double)(entities[4].getNumHosts()), 0.001); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[58], (double)(entities[5].getNumHosts()+entities[6].getNumHosts()+entities[7].getNumHosts()), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - tsAgg = new TimeSeriesAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), - 1386120000*1000, 1386123600*1000, 60*1000); - try{ - for(TestEntity e : entities){ - tsAgg.accumulate(e); - } - Map, List> result = tsAgg.result(); - Assert.assertEquals(result.size(), 6); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "0")).get(0), (double)(1), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "17")).get(0), (double)(2), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "35")).get(0), (double)(1), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "53")).get(0), (double)(1), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster2", "58")).get(0), (double)(2), 0.001); - Assert.assertEquals(result.get(Arrays.asList("cluster1", "58")).get(0), (double)(1), 0.001); - - Map, List> tsResult = tsAgg.getMetric(); - Assert.assertEquals(tsResult.size(), 2); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0).length, 60); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[0], (double)(1), 0.001); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[17], (double)(2), 0.001); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[35], (double)(1), 0.001); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[53], (double)(1), 0.001); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[58], (double)(2), 0.001); - Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[58], (double)(1), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - - tsAgg = new TimeSeriesAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), - 1386120000*1000, 1386123600*1000, 60*1000); - try{ - for(TestEntity e : entities){ - tsAgg.accumulate(e); - } - Map, List> result = tsAgg.result(); - Assert.assertEquals(result.size(), 5); - Assert.assertEquals(result.get(Arrays.asList("0")).get(0), (double)(1), 0.001); - Assert.assertEquals(result.get(Arrays.asList("17")).get(0), (double)(2), 0.001); - Assert.assertEquals(result.get(Arrays.asList("35")).get(0), (double)(1), 0.001); - Assert.assertEquals(result.get(Arrays.asList("53")).get(0), (double)(1), 0.001); - Assert.assertEquals(result.get(Arrays.asList("58")).get(0), (double)(3), 0.001); - - Map, List> tsResult = tsAgg.getMetric(); - Assert.assertEquals(tsResult.size(), 1); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0).length, 60); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[0], (double)(1), 0.001); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[17], (double)(2), 0.001); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[35], (double)(1), 0.001); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[53], (double)(1), 0.001); - Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[58], (double)(3), 0.001); - }catch(Exception ex){ - LOG.error("Can not aggregate", ex); - Assert.fail("Can not aggregate"); - } - } + private static final Logger LOG = LoggerFactory.getLogger(TestFlatAggregator.class); + + @SuppressWarnings("serial") + private TestEntity createEntity(final String cluster, final String datacenter, final String rack, int numHosts, long numClusters, long timestamp) { + TestEntity entity = new TestEntity(); + Map tags = new HashMap() {{ + put("cluster", cluster); + put("datacenter", datacenter); + put("rack", rack); + }}; + entity.setTags(tags); + entity.setNumHosts(numHosts); + entity.setNumClusters(numClusters); + entity.setTimestamp(timestamp); + return entity; + } + + @Test + public void testTimeSeriesAggregator() { + TestEntity[] entities = new TestEntity[8]; + entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2, 1386120000 * 1000); // bucket 0 + entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1, 1386121060 * 1000); // bucket 17 + entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0, 1386121070 * 1000); // bucket 17 + entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2, 1386122122 * 1000); // bucket 35 + entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 5, 1386123210 * 1000); // bucket 53 + entities[5] = createEntity("cluster2", "dc1", "rack234", 25, 1, 1386123480 * 1000); // bucket 58 + entities[6] = createEntity("cluster2", "dc1", "rack234", 12, 0, 1386123481 * 1000); // bucket 58 + entities[7] = createEntity("cluster1", "dc1", "rack123", 3, 2, 1386123482 * 1000); // bucket 58 + + TimeSeriesAggregator tsAgg = new TimeSeriesAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), + 1386120000 * 1000, 1386123600 * 1000, 60 * 1000); + try { + for (TestEntity e : entities) { + tsAgg.accumulate(e); + } + Map, List> result = tsAgg.result(); + Assert.assertEquals(result.size(), 6); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "0")).get(0), (double) (entities[0].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "17")).get(0), (double) (entities[1].getNumHosts() + entities[2].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "35")).get(0), (double) (entities[3].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "53")).get(0), (double) (entities[4].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "58")).get(0), (double) (entities[5].getNumHosts() + entities[6].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "58")).get(0), (double) (entities[7].getNumHosts()), 0.001); + + Map, List> tsResult = tsAgg.getMetric(); + Assert.assertEquals(tsResult.size(), 2); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0).length, 60); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[0], (double) (entities[0].getNumHosts()), 0.001); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[17], (double) (entities[1].getNumHosts() + entities[2].getNumHosts()), 0.001); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[35], (double) (entities[3].getNumHosts()), 0.001); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[53], (double) (entities[4].getNumHosts()), 0.001); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[58], (double) (entities[5].getNumHosts() + entities[6].getNumHosts()), 0.001); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[58], (double) (entities[7].getNumHosts()), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + tsAgg = new TimeSeriesAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), + 1386120000 * 1000, 1386123600 * 1000, 60 * 1000); + try { + for (TestEntity e : entities) { + tsAgg.accumulate(e); + } + Map, List> result = tsAgg.result(); + Assert.assertEquals(result.size(), 5); + Assert.assertEquals(result.get(Arrays.asList("0")).get(0), (double) (entities[0].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("17")).get(0), (double) (entities[1].getNumHosts() + entities[2].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("35")).get(0), (double) (entities[3].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("53")).get(0), (double) (entities[4].getNumHosts()), 0.001); + Assert.assertEquals(result.get(Arrays.asList("58")).get(0), (double) (entities[5].getNumHosts() + entities[6].getNumHosts() + entities[7].getNumHosts()), 0.001); + + Map, List> tsResult = tsAgg.getMetric(); + Assert.assertEquals(tsResult.size(), 1); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0).length, 60); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[0], (double) (entities[0].getNumHosts()), 0.001); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[17], (double) (entities[1].getNumHosts() + entities[2].getNumHosts()), 0.001); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[35], (double) (entities[3].getNumHosts()), 0.001); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[53], (double) (entities[4].getNumHosts()), 0.001); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[58], (double) (entities[5].getNumHosts() + entities[6].getNumHosts() + entities[7].getNumHosts()), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + tsAgg = new TimeSeriesAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), + 1386120000 * 1000, 1386123600 * 1000, 60 * 1000); + try { + for (TestEntity e : entities) { + tsAgg.accumulate(e); + } + Map, List> result = tsAgg.result(); + Assert.assertEquals(result.size(), 6); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "0")).get(0), (double) (1), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "17")).get(0), (double) (2), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "35")).get(0), (double) (1), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "53")).get(0), (double) (1), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster2", "58")).get(0), (double) (2), 0.001); + Assert.assertEquals(result.get(Arrays.asList("cluster1", "58")).get(0), (double) (1), 0.001); + + Map, List> tsResult = tsAgg.getMetric(); + Assert.assertEquals(tsResult.size(), 2); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0).length, 60); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[0], (double) (1), 0.001); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[17], (double) (2), 0.001); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[35], (double) (1), 0.001); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[53], (double) (1), 0.001); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[58], (double) (2), 0.001); + Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[58], (double) (1), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + + tsAgg = new TimeSeriesAggregator(new ArrayList(), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), + 1386120000 * 1000, 1386123600 * 1000, 60 * 1000); + try { + for (TestEntity e : entities) { + tsAgg.accumulate(e); + } + Map, List> result = tsAgg.result(); + Assert.assertEquals(result.size(), 5); + Assert.assertEquals(result.get(Arrays.asList("0")).get(0), (double) (1), 0.001); + Assert.assertEquals(result.get(Arrays.asList("17")).get(0), (double) (2), 0.001); + Assert.assertEquals(result.get(Arrays.asList("35")).get(0), (double) (1), 0.001); + Assert.assertEquals(result.get(Arrays.asList("53")).get(0), (double) (1), 0.001); + Assert.assertEquals(result.get(Arrays.asList("58")).get(0), (double) (3), 0.001); + + Map, List> tsResult = tsAgg.getMetric(); + Assert.assertEquals(tsResult.size(), 1); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0).length, 60); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[0], (double) (1), 0.001); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[17], (double) (2), 0.001); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[35], (double) (1), 0.001); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[53], (double) (1), 0.001); + Assert.assertEquals(tsResult.get(new ArrayList()).get(0)[58], (double) (3), 0.001); + } catch (Exception ex) { + LOG.error("Can not aggregate", ex); + Assert.fail("Can not aggregate"); + } + } } diff --git a/eagle-core/eagle-query/eagle-service-base/pom.xml b/eagle-core/eagle-query/eagle-service-base/pom.xml index a3fc2e7e46..dd520336da 100755 --- a/eagle-core/eagle-query/eagle-service-base/pom.xml +++ b/eagle-core/eagle-query/eagle-service-base/pom.xml @@ -70,8 +70,21 @@ test - - + + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + true + true + + + + diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/EagleExceptionWrapper.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/EagleExceptionWrapper.java index 33aabab63b..49ec5ed6ed 100644 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/EagleExceptionWrapper.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/EagleExceptionWrapper.java @@ -17,26 +17,28 @@ package org.apache.eagle.service.common; public class EagleExceptionWrapper { - private final static int MAX_DEPTH = 10; - - public static String wrap(Exception ex){ - return wrap(ex, EagleExceptionWrapper.MAX_DEPTH); - } - - public static String wrap(Exception ex, int maxdepth){ - int d = maxdepth; - if(d <= 0) - d = EagleExceptionWrapper.MAX_DEPTH; - int index = 0; - StringBuffer sb = new StringBuffer(); - sb.append(ex); - sb.append(System.getProperty("line.separator")); - for(StackTraceElement element : ex.getStackTrace()){ - sb.append(element.toString()); - sb.append(System.getProperty("line.separator")); - if(++index >= d) - break; - } - return sb.toString(); - } + private static final int MAX_DEPTH = 10; + + public static String wrap(Exception ex) { + return wrap(ex, EagleExceptionWrapper.MAX_DEPTH); + } + + public static String wrap(Exception ex, int maxdepth) { + int d = maxdepth; + if (d <= 0) { + d = EagleExceptionWrapper.MAX_DEPTH; + } + int index = 0; + StringBuffer sb = new StringBuffer(); + sb.append(ex); + sb.append(System.getProperty("line.separator")); + for (StackTraceElement element : ex.getStackTrace()) { + sb.append(element.toString()); + sb.append(System.getProperty("line.separator")); + if (++index >= d) { + break; + } + } + return sb.toString(); + } } diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/SplitFullScanEntityReader.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/SplitFullScanEntityReader.java index ae9ecef849..aa20aab3e0 100755 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/SplitFullScanEntityReader.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/SplitFullScanEntityReader.java @@ -16,6 +16,18 @@ */ package org.apache.eagle.service.common; +import org.apache.eagle.common.DateTimeUtil; +import org.apache.eagle.common.EagleBase64Wrapper; +import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; +import org.apache.eagle.log.entity.GenericEntityBatchReader; +import org.apache.eagle.log.entity.RowkeyBuilder; +import org.apache.eagle.log.entity.SearchCondition; +import org.apache.eagle.log.entity.meta.EntityDefinition; +import org.apache.eagle.log.entity.meta.EntityDefinitionManager; +import org.apache.eagle.query.ListQueryCompiler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -25,265 +37,251 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; -import org.apache.eagle.log.entity.GenericEntityBatchReader; -import org.apache.eagle.log.entity.RowkeyBuilder; -import org.apache.eagle.log.entity.SearchCondition; -import org.apache.eagle.log.entity.meta.EntityDefinition; -import org.apache.eagle.log.entity.meta.EntityDefinitionManager; -import org.apache.eagle.query.ListQueryCompiler; -import org.apache.eagle.common.DateTimeUtil; -import org.apache.eagle.common.EagleBase64Wrapper; - /** - * Support stream based entity read. Internally it splits entity fetching to multiple threads to improve + * Support stream based entity read. Internally it splits entity fetching to multiple threads to improve * the performance. However, it doesn't support multi-threading for client to read entities from result set. - * */ -public class SplitFullScanEntityReader { - - // class members - public static final int DEFAULT_BUFFER_SIZE = 10 * 1000; - public static final int MAX_WRITE_TIME_OUT_IN_SECONDS = 60; - private static final Logger LOG = LoggerFactory.getLogger(SplitFullScanEntityReader.class); - private static final TaggedLogAPIEntity COMPLETED_ENTITY = new TaggedLogAPIEntity(); - - // instance members - private final int splits; - private final String query; - private final long startTime; - private final long endTime; - private final String startRowkey; - private final int pageSize; - private final int bufferSize; - - public SplitFullScanEntityReader(String query, - String startTime, String endTime, - int splits, String startRowkey, int pageSize) { - this( - query, - DateTimeUtil.humanDateToSecondsWithoutException(startTime) * 1000, - DateTimeUtil.humanDateToSecondsWithoutException(endTime) * 1000, - splits, - startRowkey, - pageSize - ); - } - - public SplitFullScanEntityReader(String query, long startTime, long endTime, - int splits, String startRowkey, int pageSize) { - this(query, startTime, endTime, splits, startRowkey, pageSize, - DEFAULT_BUFFER_SIZE); - } - - public SplitFullScanEntityReader(String query, long startTime, long endTime, - int splits, String startRowkey, int pageSize, int bufferSize) { - this.query = query; - this.startTime = startTime; - this.endTime = endTime; - this.splits = splits; - this.startRowkey = startRowkey; - this.pageSize = pageSize; - this.bufferSize = bufferSize; - } - - public EntityResultSet read() throws Exception { - final EntityResultSet resultSet = new EntityResultSet(new ArrayBlockingQueue(bufferSize)); - final List readers = createSplitThreads(); - - final int size = readers.size(); - if (size > 0) { - final AtomicInteger threadCount = new AtomicInteger(size); - final AtomicInteger entityCount = new AtomicInteger(0); - for (GenericEntityBatchReader reader : readers) { - final EntityFetchThread thread = new EntityFetchThread(reader, threadCount, entityCount, resultSet); - thread.start(); - } - } else { - resultSet.getQueue().add(COMPLETED_ENTITY); - } - return resultSet; - } - - protected List createSplitThreads() throws Exception { - - final List readers = new ArrayList(); - final ListQueryCompiler comp = new ListQueryCompiler(query); - final EntityDefinition entityDef = EntityDefinitionManager.getEntityByServiceName(comp.serviceName()); - if (entityDef == null) { - throw new IllegalArgumentException("Invalid entity name: " + comp.serviceName()); - } - - // TODO: For now we don't support one query to query multiple partitions. In future - // if partition is defined for the entity, internally We need to spawn multiple - // queries and send one query for each search condition for each partition - final List partitionValues = comp.getQueryPartitionValues(); - partitionConstraintValidate(partitionValues, query); - - long lastTimestamp = Long.MAX_VALUE; - if (startRowkey != null) { - final byte[] lastRowkey = EagleBase64Wrapper.decode(startRowkey); - lastTimestamp = RowkeyBuilder.getTimestamp(lastRowkey, entityDef); - } - - final long duration = (endTime - startTime) / splits; - for (int i = 0; i < splits; ++i) { - - final long slotStartTime = startTime + (i * duration); - if (slotStartTime > lastTimestamp) { - // ignore this slot - continue; - } - final long slotEndTime = startTime + ((i + 1) * duration); - final SearchCondition condition = new SearchCondition(); - final String slotStartTimeString = DateTimeUtil.secondsToHumanDate(slotStartTime / 1000); - final String slotEndTimeString = DateTimeUtil.secondsToHumanDate(slotEndTime / 1000); - condition.setStartTime(slotStartTimeString); - condition.setEndTime(slotEndTimeString); - - condition.setFilter(comp.filter()); - condition.setQueryExpression(comp.getQueryExpression()); - if (partitionValues != null) { - condition.setPartitionValues(Arrays.asList(partitionValues.get(0))); - } - // Should be careful to the startRowkey setting. Only set startRowkey when - // lastTimestamp is within the slot time range. - if (startRowkey != null && lastTimestamp >= startTime && lastTimestamp < endTime) { - condition.setStartRowkey(startRowkey); - } - condition.setPageSize(pageSize); - - if (comp.hasAgg()) { - List groupbyFields = comp.groupbyFields(); - List outputFields = new ArrayList(); - if(groupbyFields != null){ - outputFields.addAll(groupbyFields); - } - outputFields.addAll(comp.aggregateFields()); - condition.setOutputFields(outputFields); - } else { - condition.setOutputFields(comp.outputFields()); - } - readers.add(new GenericEntityBatchReader(comp.serviceName(), condition)); - } - return readers; - } - - - private static void partitionConstraintValidate(List partitionValues, String query) { - if (partitionValues != null && partitionValues.size() > 1) { - final String[] values = partitionValues.get(0); - for (int i = 1; i < partitionValues.size(); ++i) { - final String[] tmpValues = partitionValues.get(i); - for (int j = 0; j < values.length; ++j) { - if (values[j] == null || (!values[j].equals(tmpValues[j]))) { - final String errMsg = "One query for multiple partitions is NOT allowed for now! Query: " + query; - LOG.error(errMsg); - throw new IllegalArgumentException(errMsg); - } - } - } - } - } - - - @SuppressWarnings("unchecked") - public static class EntityResultSet { - private static final long DEFAULT_TIMEOUT_IN_MS = 1000; - - private boolean fetchCompleted = false; - private final BlockingQueue queue; - private volatile Exception ex = null; - - public EntityResultSet(BlockingQueue queue) { - this.queue = queue; - } - - public boolean hasMoreData() { - return queue.size() > 0 || (!fetchCompleted); - } - - public ENTITY next(long timeout, TimeUnit unit) throws InterruptedException { - if (fetchCompleted) { - return null; - } - final TaggedLogAPIEntity entity = queue.poll(timeout, unit); - if (COMPLETED_ENTITY.equals(entity)) { - fetchCompleted = true; - return null; - } - return (ENTITY)entity; - } - - public ENTITY next() throws Exception { - TaggedLogAPIEntity entity = null; - while (!fetchCompleted) { - try { - entity = queue.poll(DEFAULT_TIMEOUT_IN_MS, TimeUnit.MILLISECONDS); - if (COMPLETED_ENTITY.equals(entity)) { - fetchCompleted = true; - if (ex != null) { - throw ex; - } - return null; - } - if (entity != null) { - return (ENTITY)entity; - } - } catch (InterruptedException ex) { - // Just ignore - } - } - return null; - } - - void setException(Exception ex) { - this.ex = ex; - } - - BlockingQueue getQueue() { - return queue; - } - } - - private static class EntityFetchThread extends Thread { - - private final GenericEntityBatchReader reader; - private final AtomicInteger threadCount; - private final AtomicInteger entityCount; - private final EntityResultSet resultSet; - - private EntityFetchThread(GenericEntityBatchReader reader, AtomicInteger threadCount, AtomicInteger entityCount, EntityResultSet resultSet) { - this.reader = reader; - this.threadCount = threadCount; - this.entityCount = entityCount; - this.resultSet = resultSet; - } - - @Override - public void run() { - try { - final List entities = reader.read(); - entityCount.addAndGet(entities.size()); - for (ENTITY entity : entities) { - if (!resultSet.getQueue().offer(entity, MAX_WRITE_TIME_OUT_IN_SECONDS, TimeUnit.SECONDS)) { - resultSet.setException(new IOException("Write entity to queue timeout")); - resultSet.getQueue().add(COMPLETED_ENTITY); - } - } - } catch (Exception ex) { - resultSet.setException(ex); - resultSet.getQueue().add(COMPLETED_ENTITY); - } finally { - final int count = threadCount.decrementAndGet(); - if (count == 0) { - resultSet.getQueue().add(COMPLETED_ENTITY); - LOG.info("Total fetched " + entityCount.get() + " entities"); - } - } - } - } +public class SplitFullScanEntityReader { + + // class members + public static final int DEFAULT_BUFFER_SIZE = 10 * 1000; + public static final int MAX_WRITE_TIME_OUT_IN_SECONDS = 60; + private static final Logger LOG = LoggerFactory.getLogger(SplitFullScanEntityReader.class); + private static final TaggedLogAPIEntity COMPLETED_ENTITY = new TaggedLogAPIEntity(); + + // instance members + private final int splits; + private final String query; + private final long startTime; + private final long endTime; + private final String startRowkey; + private final int pageSize; + private final int bufferSize; + + public SplitFullScanEntityReader(String query, + String startTime, String endTime, + int splits, String startRowkey, int pageSize) { + this( + query, + DateTimeUtil.humanDateToSecondsWithoutException(startTime) * 1000, + DateTimeUtil.humanDateToSecondsWithoutException(endTime) * 1000, + splits, + startRowkey, + pageSize + ); + } + + public SplitFullScanEntityReader(String query, long startTime, long endTime, + int splits, String startRowkey, int pageSize) { + this(query, startTime, endTime, splits, startRowkey, pageSize, + DEFAULT_BUFFER_SIZE); + } + + public SplitFullScanEntityReader(String query, long startTime, long endTime, + int splits, String startRowkey, int pageSize, int bufferSize) { + this.query = query; + this.startTime = startTime; + this.endTime = endTime; + this.splits = splits; + this.startRowkey = startRowkey; + this.pageSize = pageSize; + this.bufferSize = bufferSize; + } + + public EntityResultSet read() throws Exception { + final EntityResultSet resultSet = new EntityResultSet(new ArrayBlockingQueue(bufferSize)); + final List readers = createSplitThreads(); + + final int size = readers.size(); + if (size > 0) { + final AtomicInteger threadCount = new AtomicInteger(size); + final AtomicInteger entityCount = new AtomicInteger(0); + for (GenericEntityBatchReader reader : readers) { + final EntityFetchThread thread = new EntityFetchThread(reader, threadCount, entityCount, resultSet); + thread.start(); + } + } else { + resultSet.getQueue().add(COMPLETED_ENTITY); + } + return resultSet; + } + + protected List createSplitThreads() throws Exception { + + final List readers = new ArrayList(); + final ListQueryCompiler comp = new ListQueryCompiler(query); + final EntityDefinition entityDef = EntityDefinitionManager.getEntityByServiceName(comp.serviceName()); + if (entityDef == null) { + throw new IllegalArgumentException("Invalid entity name: " + comp.serviceName()); + } + + // TODO: For now we don't support one query to query multiple partitions. In future + // if partition is defined for the entity, internally We need to spawn multiple + // queries and send one query for each search condition for each partition + final List partitionValues = comp.getQueryPartitionValues(); + partitionConstraintValidate(partitionValues, query); + + long lastTimestamp = Long.MAX_VALUE; + if (startRowkey != null) { + final byte[] lastRowkey = EagleBase64Wrapper.decode(startRowkey); + lastTimestamp = RowkeyBuilder.getTimestamp(lastRowkey, entityDef); + } + + final long duration = (endTime - startTime) / splits; + for (int i = 0; i < splits; ++i) { + + final long slotStartTime = startTime + (i * duration); + if (slotStartTime > lastTimestamp) { + // ignore this slot + continue; + } + final long slotEndTime = startTime + ((i + 1) * duration); + final SearchCondition condition = new SearchCondition(); + final String slotStartTimeString = DateTimeUtil.secondsToHumanDate(slotStartTime / 1000); + final String slotEndTimeString = DateTimeUtil.secondsToHumanDate(slotEndTime / 1000); + condition.setStartTime(slotStartTimeString); + condition.setEndTime(slotEndTimeString); + + condition.setFilter(comp.filter()); + condition.setQueryExpression(comp.getQueryExpression()); + if (partitionValues != null) { + condition.setPartitionValues(Arrays.asList(partitionValues.get(0))); + } + // Should be careful to the startRowkey setting. Only set startRowkey when + // lastTimestamp is within the slot time range. + if (startRowkey != null && lastTimestamp >= startTime && lastTimestamp < endTime) { + condition.setStartRowkey(startRowkey); + } + condition.setPageSize(pageSize); + + if (comp.hasAgg()) { + List groupbyFields = comp.groupbyFields(); + List outputFields = new ArrayList(); + if (groupbyFields != null) { + outputFields.addAll(groupbyFields); + } + outputFields.addAll(comp.aggregateFields()); + condition.setOutputFields(outputFields); + } else { + condition.setOutputFields(comp.outputFields()); + } + readers.add(new GenericEntityBatchReader(comp.serviceName(), condition)); + } + return readers; + } + + + private static void partitionConstraintValidate(List partitionValues, String query) { + if (partitionValues != null && partitionValues.size() > 1) { + final String[] values = partitionValues.get(0); + for (int i = 1; i < partitionValues.size(); ++i) { + final String[] tmpValues = partitionValues.get(i); + for (int j = 0; j < values.length; ++j) { + if (values[j] == null || (!values[j].equals(tmpValues[j]))) { + final String errMsg = "One query for multiple partitions is NOT allowed for now! Query: " + query; + LOG.error(errMsg); + throw new IllegalArgumentException(errMsg); + } + } + } + } + } + + + @SuppressWarnings("unchecked") + public static class EntityResultSet { + private static final long DEFAULT_TIMEOUT_IN_MS = 1000; + + private boolean fetchCompleted = false; + private final BlockingQueue queue; + private volatile Exception ex = null; + + public EntityResultSet(BlockingQueue queue) { + this.queue = queue; + } + + public boolean hasMoreData() { + return queue.size() > 0 || (!fetchCompleted); + } + + public T next(long timeout, TimeUnit unit) throws InterruptedException { + if (fetchCompleted) { + return null; + } + final TaggedLogAPIEntity entity = queue.poll(timeout, unit); + if (COMPLETED_ENTITY.equals(entity)) { + fetchCompleted = true; + return null; + } + return (T) entity; + } + + public T next() throws Exception { + TaggedLogAPIEntity entity = null; + while (!fetchCompleted) { + try { + entity = queue.poll(DEFAULT_TIMEOUT_IN_MS, TimeUnit.MILLISECONDS); + if (COMPLETED_ENTITY.equals(entity)) { + fetchCompleted = true; + if (ex != null) { + throw ex; + } + return null; + } + if (entity != null) { + return (T) entity; + } + } catch (InterruptedException ex) { + // Just ignore + } + } + return null; + } + + void setException(Exception ex) { + this.ex = ex; + } + + BlockingQueue getQueue() { + return queue; + } + } + + private static class EntityFetchThread extends Thread { + + private final GenericEntityBatchReader reader; + private final AtomicInteger threadCount; + private final AtomicInteger entityCount; + private final EntityResultSet resultSet; + + private EntityFetchThread(GenericEntityBatchReader reader, AtomicInteger threadCount, AtomicInteger entityCount, EntityResultSet resultSet) { + this.reader = reader; + this.threadCount = threadCount; + this.entityCount = entityCount; + this.resultSet = resultSet; + } + + @Override + public void run() { + try { + final List entities = reader.read(); + entityCount.addAndGet(entities.size()); + for (T entity : entities) { + if (!resultSet.getQueue().offer(entity, MAX_WRITE_TIME_OUT_IN_SECONDS, TimeUnit.SECONDS)) { + resultSet.setException(new IOException("Write entity to queue timeout")); + resultSet.getQueue().add(COMPLETED_ENTITY); + } + } + } catch (Exception ex) { + resultSet.setException(ex); + resultSet.getQueue().add(COMPLETED_ENTITY); + } finally { + final int count = threadCount.decrementAndGet(); + if (count == 0) { + resultSet.getQueue().add(COMPLETED_ENTITY); + LOG.info("Total fetched " + entityCount.get() + " entities"); + } + } + } + } } diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericEntityServiceResource.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericEntityServiceResource.java index a0d8143d68..6f374dabf7 100644 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericEntityServiceResource.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericEntityServiceResource.java @@ -16,8 +16,10 @@ */ package org.apache.eagle.service.generic; +import com.sun.jersey.api.json.JSONWithPadding; import com.sun.jersey.core.header.FormDataContentDisposition; import com.sun.jersey.multipart.FormDataParam; +import org.apache.commons.lang.time.StopWatch; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity; import org.apache.eagle.log.entity.meta.EntityDefinition; @@ -28,8 +30,6 @@ import org.apache.eagle.storage.operation.*; import org.apache.eagle.storage.result.ModifyResult; import org.apache.eagle.storage.result.QueryResult; -import com.sun.jersey.api.json.JSONWithPadding; -import org.apache.commons.lang.time.StopWatch; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.type.TypeFactory; import org.slf4j.Logger; @@ -46,25 +46,24 @@ import java.util.List; import java.util.Map; -/** - * @since 3/18/15 - */ + @Path(GenericEntityServiceResource.ROOT_PATH) @SuppressWarnings("unchecked") public class GenericEntityServiceResource { - public final static String ROOT_PATH = "/entities"; - public final static String JSONP_PATH = "jsonp"; - public final static String DELETE_ENTITIES_PATH = "delete"; - public final static String ROWKEY_PATH = "rowkey"; + public static final String ROOT_PATH = "/entities"; + public static final String JSONP_PATH = "jsonp"; + public static final String DELETE_ENTITIES_PATH = "delete"; + public static final String ROWKEY_PATH = "rowkey"; - public final static String FIRST_TIMESTAMP = "firstTimestamp"; - public final static String LAST_TIMESTAMP = "lastTimestamp"; - public final static String ELAPSEDMS = "elapsedms"; - public final static String TOTAL_RESULTS = "totalResults"; + public static final String FIRST_TIMESTAMP = "firstTimestamp"; + public static final String LAST_TIMESTAMP = "lastTimestamp"; + public static final String ELAPSEDMS = "elapsedms"; + public static final String TOTAL_RESULTS = "totalResults"; - private final static Logger LOG = LoggerFactory.getLogger(GenericEntityServiceResource.class); + private static final Logger LOG = LoggerFactory.getLogger(GenericEntityServiceResource.class); - private List unmarshalEntitiesByServie(InputStream inputStream,EntityDefinition entityDefinition) throws IllegalAccessException, InstantiationException, IOException { + private List unmarshalEntitiesByServie(InputStream inputStream, EntityDefinition entityDefinition) throws IllegalAccessException, InstantiationException, + IOException { ObjectMapper objectMapper = new ObjectMapper(); return objectMapper.readValue(inputStream, TypeFactory.defaultInstance().constructCollectionType(LinkedList.class, entityDefinition.getEntityClass())); } @@ -77,33 +76,33 @@ private List unmarshalAsStringlist(InputStream inputStream) throws Illeg public GenericServiceAPIResponseEntity updateDatabase(Statement> statement) { GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity<>(); - Map meta = new HashMap<>(); + Map meta = new HashMap<>(); StopWatch stopWatch = new StopWatch(); try { stopWatch.start(); DataStorage dataStorage = DataStorageManager.getDataStorageByEagleConfig(); - if(dataStorage == null){ + if (dataStorage == null) { LOG.error("Data storage is null"); throw new IllegalDataStorageException("Data storage is null"); } ModifyResult result = statement.execute(dataStorage); - if(result.isSuccess()) { - List keys =result.getIdentifiers(); - if(keys != null) { + if (result.isSuccess()) { + List keys = result.getIdentifiers(); + if (keys != null) { response.setObj(keys, String.class); meta.put(TOTAL_RESULTS, keys.size()); } else { meta.put(TOTAL_RESULTS, 0); } - meta.put(ELAPSEDMS,stopWatch.getTime()); + meta.put(ELAPSEDMS, stopWatch.getTime()); response.setMeta(meta); response.setSuccess(true); } } catch (Exception e) { LOG.error(e.getMessage(), e); response.setException(e); - }finally { + } finally { stopWatch.stop(); } return response; @@ -119,82 +118,82 @@ public GenericServiceAPIResponseEntity updateEntities(List response = new GenericServiceAPIResponseEntity(); - Map meta = new HashMap<>(); + Map meta = new HashMap<>(); StopWatch stopWatch = new StopWatch(); try { stopWatch.start(); EntityDefinition entityDefinition = EntityDefinitionManager.getEntityByServiceName(serviceName); - if(entityDefinition == null){ - throw new IllegalArgumentException("entity definition of service "+serviceName+" not found"); + if (entityDefinition == null) { + throw new IllegalArgumentException("entity definition of service " + serviceName + " not found"); } List entities = unmarshalEntitiesByServie(inputStream, entityDefinition); DataStorage dataStorage = DataStorageManager.getDataStorageByEagleConfig(); - CreateStatement createStatement = new CreateStatement(entities,entityDefinition); + CreateStatement createStatement = new CreateStatement(entities, entityDefinition); ModifyResult result = createStatement.execute(dataStorage); - if(result.isSuccess()) { - List keys =result.getIdentifiers(); - if(keys != null) { + if (result.isSuccess()) { + List keys = result.getIdentifiers(); + if (keys != null) { response.setObj(keys, String.class); response.setObj(keys, String.class); - meta.put(TOTAL_RESULTS,keys.size()); - }else{ - meta.put(TOTAL_RESULTS,0); + meta.put(TOTAL_RESULTS, keys.size()); + } else { + meta.put(TOTAL_RESULTS, 0); } - meta.put(ELAPSEDMS,stopWatch.getTime()); + meta.put(ELAPSEDMS, stopWatch.getTime()); response.setMeta(meta); response.setSuccess(true); } } catch (Exception e) { LOG.error(e.getMessage(), e); response.setException(e); - }finally { + } finally { stopWatch.stop(); } return response; } @POST - @Consumes({MediaType.MULTIPART_FORM_DATA}) + @Consumes( {MediaType.MULTIPART_FORM_DATA}) @Produces(MediaType.APPLICATION_JSON) public GenericServiceAPIResponseEntity create(@FormDataParam("file") InputStream fileInputStream, @FormDataParam("file") FormDataContentDisposition cdh, @QueryParam("serviceName") String serviceName) { GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity(); - Map meta = new HashMap<>(); + Map meta = new HashMap<>(); StopWatch stopWatch = new StopWatch(); try { stopWatch.start(); EntityDefinition entityDefinition = EntityDefinitionManager.getEntityByServiceName(serviceName); - if(entityDefinition == null){ - throw new IllegalArgumentException("entity definition of service "+serviceName+" not found"); + if (entityDefinition == null) { + throw new IllegalArgumentException("entity definition of service " + serviceName + " not found"); } List entities = unmarshalEntitiesByServie(fileInputStream, entityDefinition); DataStorage dataStorage = DataStorageManager.getDataStorageByEagleConfig(); - CreateStatement createStatement = new CreateStatement(entities,entityDefinition); + CreateStatement createStatement = new CreateStatement(entities, entityDefinition); ModifyResult result = createStatement.execute(dataStorage); - if(result.isSuccess()) { - List keys =result.getIdentifiers(); - if(keys != null) { + if (result.isSuccess()) { + List keys = result.getIdentifiers(); + if (keys != null) { response.setObj(keys, String.class); response.setObj(keys, String.class); - meta.put(TOTAL_RESULTS,keys.size()); - }else{ - meta.put(TOTAL_RESULTS,0); + meta.put(TOTAL_RESULTS, keys.size()); + } else { + meta.put(TOTAL_RESULTS, 0); } - meta.put(ELAPSEDMS,stopWatch.getTime()); + meta.put(ELAPSEDMS, stopWatch.getTime()); response.setMeta(meta); response.setSuccess(true); } } catch (Exception e) { LOG.error(e.getMessage(), e); response.setException(e); - }finally { + } finally { stopWatch.stop(); } return response; @@ -204,33 +203,33 @@ public GenericServiceAPIResponseEntity create(@FormDataParam("file") InputStream @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public GenericServiceAPIResponseEntity update(InputStream inputStream, - @QueryParam("serviceName") String serviceName){ + @QueryParam("serviceName") String serviceName) { GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity(); DataStorage dataStorage; - Map meta = new HashMap<>(); + Map meta = new HashMap<>(); StopWatch stopWatch = new StopWatch(); try { stopWatch.start(); EntityDefinition entityDefinition = EntityDefinitionManager.getEntityByServiceName(serviceName); - if(entityDefinition == null){ - throw new IllegalArgumentException("entity definition of service "+serviceName+" not found"); + if (entityDefinition == null) { + throw new IllegalArgumentException("entity definition of service " + serviceName + " not found"); } List entities = unmarshalEntitiesByServie(inputStream, entityDefinition); dataStorage = DataStorageManager.getDataStorageByEagleConfig(); - UpdateStatement updateStatement = new UpdateStatement(entities,entityDefinition); + UpdateStatement updateStatement = new UpdateStatement(entities, entityDefinition); ModifyResult result = updateStatement.execute(dataStorage); - if(result.isSuccess()) { - List keys =result.getIdentifiers(); - if(keys != null) { + if (result.isSuccess()) { + List keys = result.getIdentifiers(); + if (keys != null) { response.setObj(keys, String.class); - meta.put(TOTAL_RESULTS,keys.size()); - }else{ - meta.put(TOTAL_RESULTS,0); + meta.put(TOTAL_RESULTS, keys.size()); + } else { + meta.put(TOTAL_RESULTS, 0); } - meta.put(ELAPSEDMS,stopWatch.getTime()); + meta.put(ELAPSEDMS, stopWatch.getTime()); response.setMeta(meta); response.setSuccess(true); } @@ -244,37 +243,37 @@ public GenericServiceAPIResponseEntity update(InputStream inputStream, } @PUT - @Consumes({MediaType.MULTIPART_FORM_DATA}) + @Consumes( {MediaType.MULTIPART_FORM_DATA}) @Produces(MediaType.APPLICATION_JSON) public GenericServiceAPIResponseEntity update(@FormDataParam("file") InputStream fileInputStream, @FormDataParam("file") FormDataContentDisposition cdh, - @QueryParam("serviceName") String serviceName){ + @QueryParam("serviceName") String serviceName) { GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity(); DataStorage dataStorage; - Map meta = new HashMap<>(); + Map meta = new HashMap<>(); StopWatch stopWatch = new StopWatch(); try { stopWatch.start(); EntityDefinition entityDefinition = EntityDefinitionManager.getEntityByServiceName(serviceName); - if(entityDefinition == null){ - throw new IllegalArgumentException("entity definition of service "+serviceName+" not found"); + if (entityDefinition == null) { + throw new IllegalArgumentException("entity definition of service " + serviceName + " not found"); } List entities = unmarshalEntitiesByServie(fileInputStream, entityDefinition); dataStorage = DataStorageManager.getDataStorageByEagleConfig(); - UpdateStatement updateStatement = new UpdateStatement(entities,entityDefinition); + UpdateStatement updateStatement = new UpdateStatement(entities, entityDefinition); ModifyResult result = updateStatement.execute(dataStorage); - if(result.isSuccess()) { - List keys =result.getIdentifiers(); - if(keys != null) { + if (result.isSuccess()) { + List keys = result.getIdentifiers(); + if (keys != null) { response.setObj(keys, String.class); - meta.put(TOTAL_RESULTS,keys.size()); - }else{ - meta.put(TOTAL_RESULTS,0); + meta.put(TOTAL_RESULTS, keys.size()); + } else { + meta.put(TOTAL_RESULTS, 0); } - meta.put(ELAPSEDMS,stopWatch.getTime()); + meta.put(ELAPSEDMS, stopWatch.getTime()); response.setMeta(meta); response.setSuccess(true); } @@ -288,36 +287,39 @@ public GenericServiceAPIResponseEntity update(@FormDataParam("file") InputStream } - /** - * @param value rowkey value + * search. + * @param value rowkey value * @param serviceName entity service name * @return GenericServiceAPIResponseEntity */ @GET @Path(ROWKEY_PATH) @Produces(MediaType.APPLICATION_JSON) - public GenericServiceAPIResponseEntity search(@QueryParam("value") String value,@QueryParam("serviceName") String serviceName){ + public GenericServiceAPIResponseEntity search(@QueryParam("value") String value, @QueryParam("serviceName") String serviceName) { GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity(); - Map meta = new HashMap<>(); + Map meta = new HashMap<>(); DataStorage dataStorage; StopWatch stopWatch = null; try { - if(serviceName == null) throw new IllegalArgumentException("serviceName is null"); - RowkeyQueryStatement queryStatement = new RowkeyQueryStatement(value,serviceName); + if (serviceName == null) { + throw new IllegalArgumentException("serviceName is null"); + } + stopWatch = new StopWatch(); stopWatch.start(); dataStorage = DataStorageManager.getDataStorageByEagleConfig(); - if(dataStorage==null){ + if (dataStorage == null) { LOG.error("Data storage is null"); throw new IllegalDataStorageException("data storage is null"); } + RowkeyQueryStatement queryStatement = new RowkeyQueryStatement(value, serviceName); QueryResult result = queryStatement.execute(dataStorage); - if(result.isSuccess()){ + if (result.isSuccess()) { meta.put(FIRST_TIMESTAMP, result.getFirstTimestamp()); meta.put(LAST_TIMESTAMP, result.getLastTimestamp()); meta.put(TOTAL_RESULTS, result.getSize()); - meta.put(ELAPSEDMS,stopWatch.getTime()); + meta.put(ELAPSEDMS, stopWatch.getTime()); response.setObj(result.getData()); response.setType(result.getEntityType()); response.setSuccess(true); @@ -326,14 +328,17 @@ public GenericServiceAPIResponseEntity search(@QueryParam("value") String value, } } catch (Exception e) { response.setException(e); - LOG.error(e.getMessage(),e); - }finally { - if(stopWatch!=null) stopWatch.stop(); + LOG.error(e.getMessage(), e); + } finally { + if (stopWatch != null) { + stopWatch.stop(); + } } return response; } /** + * search with serviceName. * @param serviceName entity service name * @return GenericServiceAPIResponseEntity */ @@ -341,31 +346,33 @@ public GenericServiceAPIResponseEntity search(@QueryParam("value") String value, @Path(ROWKEY_PATH) @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) - public GenericServiceAPIResponseEntity search(InputStream inputStream,@QueryParam("serviceName") String serviceName){ + public GenericServiceAPIResponseEntity search(InputStream inputStream, @QueryParam("serviceName") String serviceName) { GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity(); - Map meta = new HashMap<>(); + Map meta = new HashMap<>(); DataStorage dataStorage; StopWatch stopWatch = null; try { - if(serviceName == null) throw new IllegalArgumentException("serviceName is null"); + if (serviceName == null) { + throw new IllegalArgumentException("serviceName is null"); + } final List values = unmarshalAsStringlist(inputStream); - final RowkeyQueryStatement queryStatement = new RowkeyQueryStatement(values,serviceName); + final RowkeyQueryStatement queryStatement = new RowkeyQueryStatement(values, serviceName); stopWatch = new StopWatch(); stopWatch.start(); dataStorage = DataStorageManager.getDataStorageByEagleConfig(); - if(dataStorage==null){ + if (dataStorage == null) { LOG.error("Data storage is null"); throw new IllegalDataStorageException("Data storage is null"); } QueryResult result = queryStatement.execute(dataStorage); - if(result.isSuccess()){ + if (result.isSuccess()) { meta.put(FIRST_TIMESTAMP, result.getFirstTimestamp()); meta.put(LAST_TIMESTAMP, result.getLastTimestamp()); meta.put(TOTAL_RESULTS, result.getSize()); - meta.put(ELAPSEDMS,stopWatch.getTime()); + meta.put(ELAPSEDMS, stopWatch.getTime()); response.setObj(result.getData()); response.setType(result.getEntityType()); response.setSuccess(true); @@ -374,16 +381,18 @@ public GenericServiceAPIResponseEntity search(InputStream inputStream,@QueryPara } } catch (Exception e) { response.setException(e); - LOG.error(e.getMessage(),e); - }finally { - if(stopWatch!=null) stopWatch.stop(); + LOG.error(e.getMessage(), e); + } finally { + if (stopWatch != null) { + stopWatch.stop(); + } } return response; } /** - * + * search. * @param query * @param startTime * @param endTime @@ -403,50 +412,50 @@ public GenericServiceAPIResponseEntity search(InputStream inputStream,@QueryPara @Produces(MediaType.APPLICATION_JSON) @SuppressWarnings("unchecked") public GenericServiceAPIResponseEntity search(@QueryParam("query") String query, - @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, - @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, - @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, - @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, - @QueryParam("filterIfMissing") boolean filterIfMissing, - @QueryParam("parallel") int parallel, - @QueryParam("metricName") String metricName, - @QueryParam("verbose") Boolean verbose){ + @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, + @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, + @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, + @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, + @QueryParam("filterIfMissing") boolean filterIfMissing, + @QueryParam("parallel") int parallel, + @QueryParam("metricName") String metricName, + @QueryParam("verbose") Boolean verbose) { RawQuery rawQuery = RawQuery.build() - .query(query) - .startTime(startTime) - .endTime(endTime) - .pageSize(pageSize) - .startRowkey(startRowkey) - .treeAgg(treeAgg) - .timeSeries(timeSeries) - .intervalMin(intervalmin) - .top(top) - .filerIfMissing(filterIfMissing) - .parallel(parallel) - .metricName(metricName) - .verbose(verbose) - .done(); + .query(query) + .startTime(startTime) + .endTime(endTime) + .pageSize(pageSize) + .startRowkey(startRowkey) + .treeAgg(treeAgg) + .timeSeries(timeSeries) + .intervalMin(intervalmin) + .top(top) + .filerIfMissing(filterIfMissing) + .parallel(parallel) + .metricName(metricName) + .verbose(verbose) + .done(); QueryStatement queryStatement = new QueryStatement(rawQuery); GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity(); - Map meta = new HashMap<>(); + Map meta = new HashMap<>(); DataStorage dataStorage; StopWatch stopWatch = new StopWatch(); try { stopWatch.start(); dataStorage = DataStorageManager.getDataStorageByEagleConfig(); - if(dataStorage==null){ + if (dataStorage == null) { LOG.error("Data storage is null"); throw new IllegalDataStorageException("data storage is null"); } QueryResult result = queryStatement.execute(dataStorage); - if(result.isSuccess()){ + if (result.isSuccess()) { meta.put(FIRST_TIMESTAMP, result.getFirstTimestamp()); meta.put(LAST_TIMESTAMP, result.getLastTimestamp()); meta.put(TOTAL_RESULTS, result.getSize()); - meta.put(ELAPSEDMS,stopWatch.getTime()); + meta.put(ELAPSEDMS, stopWatch.getTime()); response.setObj(result.getData()); response.setType(result.getEntityType()); @@ -456,19 +465,19 @@ public GenericServiceAPIResponseEntity search(@QueryParam("query") String query, } } catch (Exception e) { response.setException(e); - LOG.error(e.getMessage(),e); + LOG.error(e.getMessage(), e); throw new WebApplicationException(e, - Response.status(Response.Status.BAD_REQUEST) + Response.status(Response.Status.BAD_REQUEST) .type(MediaType.APPLICATION_JSON_TYPE) .entity(response).build()); - }finally { + } finally { stopWatch.stop(); } return response; } /** - * + * search with jsonp. * @param query * @param startTime * @param endTime @@ -497,15 +506,16 @@ public JSONWithPadding searchWithJsonp(@QueryParam("query") String query, @QueryParam("parallel") int parallel, @QueryParam("metricName") String metricName, @QueryParam("verbose") Boolean verbose, - @QueryParam("callback") String callback){ + @QueryParam("callback") String callback) { GenericServiceAPIResponseEntity result = search(query, startTime, endTime, pageSize, startRowkey, treeAgg, timeSeries, intervalmin, top, filterIfMissing, parallel, metricName, verbose); - return new JSONWithPadding(new GenericEntity(result){}, callback); + return new JSONWithPadding(new GenericEntity(result) { + }, callback); } /** * TODO * - * Delete by query + *

    Delete by query. * * @return */ @@ -513,65 +523,64 @@ public JSONWithPadding searchWithJsonp(@QueryParam("query") String query, @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public GenericServiceAPIResponseEntity deleteByQuery(@QueryParam("query") String query, - @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, - @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, - @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, - @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, - @QueryParam("filterIfMissing") boolean filterIfMissing, - @QueryParam("parallel") int parallel, - @QueryParam("metricName") String metricName, - @QueryParam("verbose") Boolean verbose){ + @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, + @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, + @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, + @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, + @QueryParam("filterIfMissing") boolean filterIfMissing, + @QueryParam("parallel") int parallel, + @QueryParam("metricName") String metricName, + @QueryParam("verbose") Boolean verbose) { RawQuery rawQuery = RawQuery.build() - .query(query) - .startTime(startTime) - .endTime(endTime) - .pageSize(pageSize) - .startRowkey(startRowkey) - .treeAgg(treeAgg) - .timeSeries(timeSeries) - .intervalMin(intervalmin) - .top(top) - .filerIfMissing(filterIfMissing) - .parallel(parallel) - .metricName(metricName) - .verbose(verbose) - .done(); + .query(query) + .startTime(startTime) + .endTime(endTime) + .pageSize(pageSize) + .startRowkey(startRowkey) + .treeAgg(treeAgg) + .timeSeries(timeSeries) + .intervalMin(intervalmin) + .top(top) + .filerIfMissing(filterIfMissing) + .parallel(parallel) + .metricName(metricName) + .verbose(verbose) + .done(); GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity(); - Map meta = new HashMap(); + Map meta = new HashMap(); DataStorage dataStorage = null; StopWatch stopWatch = new StopWatch(); try { stopWatch.start(); dataStorage = DataStorageManager.getDataStorageByEagleConfig(); - if(dataStorage==null){ + if (dataStorage == null) { LOG.error("Data storage is null"); throw new IllegalDataStorageException("Data storage is null"); } DeleteStatement deleteStatement = new DeleteStatement(rawQuery); ModifyResult deleteResult = deleteStatement.execute(dataStorage); - if(deleteResult.isSuccess()){ + if (deleteResult.isSuccess()) { meta.put(ELAPSEDMS, stopWatch.getTime()); - response.setObj(deleteResult.getIdentifiers(),String.class); + response.setObj(deleteResult.getIdentifiers(), String.class); response.setSuccess(true); response.setMeta(meta); } return response; } catch (Exception e) { response.setException(e); - LOG.error(e.getMessage(),e); - }finally { + LOG.error(e.getMessage(), e); + } finally { stopWatch.stop(); } return response; } /** - * * Delete by entity lists * - * Use "POST /entities/delete" instead of "DELETE /entities" to walk around jersey DELETE issue for request with body + *

    Use "POST /entities/delete" instead of "DELETE /entities" to walk around jersey DELETE issue for request with body. * * @param inputStream * @param serviceName @@ -582,13 +591,15 @@ public GenericServiceAPIResponseEntity deleteByQuery(@QueryParam("query") String @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public GenericServiceAPIResponseEntity deleteEntities(InputStream inputStream, - @QueryParam("serviceName") String serviceName, - @QueryParam("byId") Boolean deleteById){ + @QueryParam("serviceName") String serviceName, + @QueryParam("byId") Boolean deleteById) { GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity(); DataStorage dataStorage = null; - Map meta = new HashMap(); + Map meta = new HashMap(); - if(deleteById == null) deleteById = false; + if (deleteById == null) { + deleteById = false; + } StopWatch stopWatch = new StopWatch(); @@ -597,12 +608,12 @@ public GenericServiceAPIResponseEntity deleteEntities(InputStream inputStream, dataStorage = DataStorageManager.getDataStorageByEagleConfig(); DeleteStatement statement = new DeleteStatement(serviceName); - if(deleteById) { - LOG.info("Deleting "+serviceName+" by ids"); + if (deleteById) { + LOG.info("Deleting " + serviceName + " by ids"); List deleteIds = unmarshalAsStringlist(inputStream); statement.setIds(deleteIds); - }else { - LOG.info("Deleting "+serviceName+" by entities"); + } else { + LOG.info("Deleting " + serviceName + " by entities"); EntityDefinition entityDefinition = EntityDefinitionManager.getEntityByServiceName(serviceName); if (entityDefinition == null) { throw new IllegalArgumentException("Entity definition of service " + serviceName + " not found"); @@ -627,7 +638,7 @@ public GenericServiceAPIResponseEntity deleteEntities(InputStream inputStream, } catch (Exception e) { LOG.error(e.getMessage(), e); response.setException(e); - }finally { + } finally { stopWatch.stop(); } return response; diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericObjectMapperProvider.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericObjectMapperProvider.java index 35b04d1ee8..2f62a92424 100755 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericObjectMapperProvider.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericObjectMapperProvider.java @@ -30,16 +30,18 @@ @Produces(MediaType.APPLICATION_JSON) @Singleton public class GenericObjectMapperProvider implements ContextResolver { - private final static ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + @Override public ObjectMapper getContext(Class clazz) { return OBJECT_MAPPER; } - public static void setFilter(FilterProvider filter){ + + public static void setFilter(FilterProvider filter) { OBJECT_MAPPER.setFilters(filter); } - static{ + static { setFilter(TaggedLogAPIEntity.getFilterProvider()); // set more filter here } diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/ListQueryResource.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/ListQueryResource.java index b14dc226df..92d1e46a39 100755 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/ListQueryResource.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/ListQueryResource.java @@ -16,6 +16,10 @@ */ package org.apache.eagle.service.generic; +import com.sun.jersey.api.json.JSONWithPadding; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.time.StopWatch; +import org.apache.eagle.common.DateTimeUtil; import org.apache.eagle.common.config.EagleConfigFactory; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.*; @@ -23,13 +27,9 @@ import org.apache.eagle.log.entity.meta.EntityDefinitionManager; import org.apache.eagle.query.GenericQuery; import org.apache.eagle.query.ListQueryCompiler; +import org.apache.eagle.query.aggregate.timeseries.*; import org.apache.eagle.service.common.EagleExceptionWrapper; import org.apache.eagle.storage.hbase.query.GenericQueryBuilder; -import org.apache.eagle.common.DateTimeUtil; -import com.sun.jersey.api.json.JSONWithPadding; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.time.StopWatch; -import org.apache.eagle.query.aggregate.timeseries.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,462 +43,495 @@ @Path("list") public class ListQueryResource { - private static final Logger LOG = LoggerFactory.getLogger(ListQueryResource.class); + private static final Logger LOG = LoggerFactory.getLogger(ListQueryResource.class); + + /** + * Support old interface without verbose parameter. + */ + public ListQueryAPIResponseEntity listQuery(@QueryParam("query") String query, + @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, + @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, + @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, + @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, + @QueryParam("filterIfMissing") boolean filterIfMissing, + @QueryParam("parallel") int parallel, + @QueryParam("metricName") String metricName) { + return listQuery(query, startTime, endTime, pageSize, startRowkey, treeAgg, timeSeries, intervalmin, top, filterIfMissing, parallel, metricName, true); + } + + /** + * TODO refactor the code structure, now it's messy. + * + * @param query + * @param startTime + * @param endTime + * @param pageSize + * @param startRowkey + * @param treeAgg + * @param timeSeries + * @param intervalmin + * @return + */ + @GET + @Produces( {MediaType.APPLICATION_JSON}) + public ListQueryAPIResponseEntity listQuery(@QueryParam("query") String query, + @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, + @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, + @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, + @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, + @QueryParam("filterIfMissing") boolean filterIfMissing, + @QueryParam("parallel") int parallel, + @QueryParam("metricName") String metricName, + @QueryParam("verbose") Boolean verbose) { + if (!EagleConfigFactory.load().isCoprocessorEnabled()) { + return listQueryWithoutCoprocessor(query, startTime, endTime, pageSize, startRowkey, treeAgg, timeSeries, intervalmin, top, filterIfMissing, parallel, metricName, verbose); + } + + StopWatch watch = new StopWatch(); + watch.start(); + ListQueryAPIResponseEntity result = new ListQueryAPIResponseEntity(); + try { + validateQueryParameters(startRowkey, pageSize); + + // 1. Compile query to parse parameters and HBase Filter + ListQueryCompiler comp = new ListQueryCompiler(query, filterIfMissing); + + SearchCondition condition = new SearchCondition(); + condition.setOutputVerbose(verbose == null || verbose); + condition.setOutputAlias(comp.getOutputAlias()); + condition.setFilter(comp.filter()); + condition.setQueryExpression(comp.getQueryExpression()); + if (comp.sortOptions() == null && top > 0) { + LOG.warn("Parameter \"top\" is only used for sort query! Ignore top parameter this time since it's not a sort query"); + } - /** - * Support old interface without verbose parameter - */ - public ListQueryAPIResponseEntity listQuery(@QueryParam("query") String query, - @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, - @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, - @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, - @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, - @QueryParam("filterIfMissing") boolean filterIfMissing, - @QueryParam("parallel") int parallel, - @QueryParam("metricName") String metricName){ - return listQuery(query, startTime, endTime, pageSize, startRowkey, treeAgg, timeSeries, intervalmin, top, filterIfMissing, parallel, metricName,true); - } + // 2. Initialize partition values if set + // TODO: For now we don't support one query to query multiple partitions. In future + // if partition is defined for the entity, internally We need to spawn multiple + // queries and send one query for each search condition for each partition + final List partitionValues = comp.getQueryPartitionValues(); + if (partitionValues != null) { + condition.setPartitionValues(Arrays.asList(partitionValues.get(0))); + } - /** - * TODO refactor the code structure, now it's messy - * @param query - * @param startTime - * @param endTime - * @param pageSize - * @param startRowkey - * @param treeAgg - * @param timeSeries - * @param intervalmin - * @return - */ - @GET - @Produces({MediaType.APPLICATION_JSON}) - public ListQueryAPIResponseEntity listQuery(@QueryParam("query") String query, - @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, - @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, - @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, - @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, - @QueryParam("filterIfMissing") boolean filterIfMissing, - @QueryParam("parallel") int parallel, - @QueryParam("metricName") String metricName, - @QueryParam("verbose") Boolean verbose) { - if(!EagleConfigFactory.load().isCoprocessorEnabled()) - return listQueryWithoutCoprocessor(query,startTime,endTime,pageSize,startRowkey,treeAgg,timeSeries,intervalmin,top,filterIfMissing,parallel,metricName,verbose); + // 3. Set time range if it's timeseries service + String serviceName = comp.serviceName(); + EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(serviceName); + if (ed.isTimeSeries()) { + // TODO check timestamp exists for timeseries or topology data + condition.setStartTime(startTime); + condition.setEndTime(endTime); + } - StopWatch watch = new StopWatch(); - watch.start(); - ListQueryAPIResponseEntity result = new ListQueryAPIResponseEntity(); - try{ - validateQueryParameters(startRowkey, pageSize); + // 4. Set HBase start scanning rowkey if given + condition.setStartRowkey(startRowkey); - // 1. Compile query to parse parameters and HBase Filter - ListQueryCompiler comp = new ListQueryCompiler(query, filterIfMissing); - String serviceName = comp.serviceName(); - - SearchCondition condition = new SearchCondition(); - condition.setOutputVerbose(verbose == null || verbose); - condition.setOutputAlias(comp.getOutputAlias()); - condition.setFilter(comp.filter()); - condition.setQueryExpression(comp.getQueryExpression()); - if(comp.sortOptions() == null && top > 0) { - LOG.warn("Parameter \"top\" is only used for sort query! Ignore top parameter this time since it's not a sort query"); - } + // 5. Set page size + condition.setPageSize(pageSize); - // 2. Initialize partition values if set - // TODO: For now we don't support one query to query multiple partitions. In future - // if partition is defined for the entity, internally We need to spawn multiple - // queries and send one query for each search condition for each partition - final List partitionValues = comp.getQueryPartitionValues(); - if (partitionValues != null) { - condition.setPartitionValues(Arrays.asList(partitionValues.get(0))); - } + // 6. Generate output,group-by,aggregated fields + List outputFields = comp.outputFields(); + List groupbyFields = comp.groupbyFields(); + List aggregateFields = comp.aggregateFields(); - // 3. Set time range if it's timeseries service - EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(serviceName); - if(ed.isTimeSeries()){ - // TODO check timestamp exists for timeseries or topology data - condition.setStartTime(startTime); - condition.setEndTime(endTime); - } + // Start to generate output fields list { + condition.setOutputAll(comp.isOutputAll()); + if (outputFields == null) { + outputFields = new ArrayList(); + } + if (comp.hasAgg()) { + if (groupbyFields != null) { + outputFields.addAll(groupbyFields); + } + if (aggregateFields != null) { + outputFields.addAll(aggregateFields); + } + if (GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName) && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)) { + outputFields.add(GenericMetricEntity.VALUE_FIELD); + } + } + Set filterFields = comp.getFilterFields(); + if (filterFields != null) { + outputFields.addAll(filterFields); + } + condition.setOutputFields(outputFields); + if (comp.isOutputAll()) { + LOG.info("Output fields: ALL"); + } else { + LOG.info("Output fields: " + StringUtils.join(outputFields, ",")); + } + // } END - // 4. Set HBase start scanning rowkey if given - condition.setStartRowkey(startRowkey); + // 7. Build GenericQuery + GenericQuery reader = GenericQueryBuilder + .select(outputFields) + .from(serviceName, metricName).where(condition) + .groupBy( + comp.hasAgg(), + groupbyFields, + comp.aggregateFunctionTypes(), + aggregateFields) + .timeSeries(timeSeries, intervalmin) + .treeAgg(treeAgg) + .orderBy(comp.sortOptions(), comp.sortFunctions(), comp.sortFields()).top(top) + .parallel(parallel) + .build(); - // 5. Set page size - condition.setPageSize(pageSize); + // 8. Fill response object + List entities = reader.result(); + result.setObj(entities); + result.setTotalResults(entities.size()); + result.setSuccess(true); + result.setLastTimestamp(reader.getLastTimestamp()); + result.setFirstTimestamp(reader.getFirstTimeStamp()); + } catch (Exception ex) { + LOG.error("Fail executing list query", ex); + result.setException(EagleExceptionWrapper.wrap(ex)); + result.setSuccess(false); + return result; + } finally { + watch.stop(); + result.setElapsedms(watch.getTime()); + } + LOG.info("Query done " + watch.getTime() + " ms"); + return result; + } - // 6. Generate output,group-by,aggregated fields - List outputFields = comp.outputFields(); - List groupbyFields = comp.groupbyFields(); - List aggregateFields = comp.aggregateFields(); - Set filterFields = comp.getFilterFields(); + /** + * TODO remove the legacy deprecated implementation of listQueryWithoutCoprocessor. + * + * @param query + * @param startTime + * @param endTime + * @param pageSize + * @param startRowkey + * @param treeAgg + * @param timeSeries + * @param intervalmin + * @return + * @see #listQuery(String, String, String, int, String, boolean, boolean, long, int, boolean, int, String, Boolean) + */ + @GET + @Path("/legacy") + @Produces( {MediaType.APPLICATION_JSON}) + @Deprecated + public ListQueryAPIResponseEntity listQueryWithoutCoprocessor(@QueryParam("query") String query, + @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, + @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, + @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, + @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, + @QueryParam("filterIfMissing") boolean filterIfMissing, + @QueryParam("parallel") int parallel, + @QueryParam("metricName") String metricName, + @QueryParam("verbose") Boolean verbose) { + StopWatch watch = new StopWatch(); + watch.start(); + ListQueryAPIResponseEntity result = new ListQueryAPIResponseEntity(); + try { + validateQueryParameters(startRowkey, pageSize); + ListQueryCompiler comp = new ListQueryCompiler(query, filterIfMissing); - // Start to generate output fields list { - condition.setOutputAll(comp.isOutputAll()); - if(outputFields == null) outputFields = new ArrayList(); - if(comp.hasAgg()){ - if(groupbyFields != null) outputFields.addAll(groupbyFields); - if(aggregateFields != null) outputFields.addAll(aggregateFields); - if(GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName) && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)){ - outputFields.add(GenericMetricEntity.VALUE_FIELD); - } - } - if(filterFields!=null) outputFields.addAll(filterFields); - condition.setOutputFields(outputFields); - if(comp.isOutputAll()){ - LOG.info("Output fields: ALL"); - }else{ - LOG.info("Output fields: " + StringUtils.join(outputFields, ",")); - } - // } END + SearchCondition condition = new SearchCondition(); + condition.setFilter(comp.filter()); + condition.setQueryExpression(comp.getQueryExpression()); + if (comp.sortOptions() == null && top > 0) { + LOG.warn("Parameter \"top\" is only used for sort query! Ignore top parameter this time since it's not a sort query"); + } - // 7. Build GenericQuery - GenericQuery reader = GenericQueryBuilder - .select(outputFields) - .from(serviceName, metricName).where(condition) - .groupBy( - comp.hasAgg(), - groupbyFields, - comp.aggregateFunctionTypes(), - aggregateFields) - .timeSeries(timeSeries, intervalmin) - .treeAgg(treeAgg) - .orderBy(comp.sortOptions(),comp.sortFunctions(),comp.sortFields()).top(top) - .parallel(parallel) - .build(); - - // 8. Fill response object - List entities = reader.result(); - result.setObj(entities); - result.setTotalResults(entities.size()); - result.setSuccess(true); - result.setLastTimestamp(reader.getLastTimestamp()); - result.setFirstTimestamp(reader.getFirstTimeStamp()); - }catch(Exception ex){ - LOG.error("Fail executing list query", ex); - result.setException(EagleExceptionWrapper.wrap(ex)); - result.setSuccess(false); - return result; - }finally{ - watch.stop(); - result.setElapsedms(watch.getTime()); - } - LOG.info("Query done " + watch.getTime() + " ms"); - return result; - } - - /** - * TODO remove the legacy deprecated implementation of listQueryWithoutCoprocessor - * - * @see #listQuery(String, String, String, int, String, boolean, boolean, long, int, boolean, int, String,Boolean) - * - * @param query - * @param startTime - * @param endTime - * @param pageSize - * @param startRowkey - * @param treeAgg - * @param timeSeries - * @param intervalmin - * @return - */ - @GET - @Path("/legacy") - @Produces({MediaType.APPLICATION_JSON}) - @Deprecated - public ListQueryAPIResponseEntity listQueryWithoutCoprocessor(@QueryParam("query") String query, - @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, - @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, - @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, - @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, - @QueryParam("filterIfMissing") boolean filterIfMissing, - @QueryParam("parallel") int parallel, - @QueryParam("metricName") String metricName, - @QueryParam("verbose") Boolean verbose) { - StopWatch watch = new StopWatch(); - watch.start(); - ListQueryAPIResponseEntity result = new ListQueryAPIResponseEntity(); - try{ - validateQueryParameters(startRowkey, pageSize); - ListQueryCompiler comp = new ListQueryCompiler(query, filterIfMissing); - String serviceName = comp.serviceName(); + // TODO: For now we don't support one query to query multiple partitions. In future + // if partition is defined for the entity, internally We need to spawn multiple + // queries and send one query for each search condition for each partition + final List partitionValues = comp.getQueryPartitionValues(); + if (partitionValues != null) { + condition.setPartitionValues(Arrays.asList(partitionValues.get(0))); + } + String serviceName = comp.serviceName(); + EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(serviceName); + if (ed.isTimeSeries()) { + // TODO check timestamp exists for timeseries or topology data + condition.setStartTime(startTime); + condition.setEndTime(endTime); + } + condition.setOutputVerbose(verbose == null || verbose); + condition.setOutputAlias(comp.getOutputAlias()); + condition.setOutputAll(comp.isOutputAll()); + condition.setStartRowkey(startRowkey); + condition.setPageSize(pageSize); - SearchCondition condition = new SearchCondition(); - condition.setFilter(comp.filter()); - condition.setQueryExpression(comp.getQueryExpression()); - if(comp.sortOptions() == null && top > 0) { - LOG.warn("Parameter \"top\" is only used for sort query! Ignore top parameter this time since it's not a sort query"); - } + List outputFields = comp.outputFields(); + if (outputFields == null) { + outputFields = new ArrayList(); + } - // TODO: For now we don't support one query to query multiple partitions. In future - // if partition is defined for the entity, internally We need to spawn multiple - // queries and send one query for each search condition for each partition - final List partitionValues = comp.getQueryPartitionValues(); - if (partitionValues != null) { - condition.setPartitionValues(Arrays.asList(partitionValues.get(0))); - } - EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(serviceName); - if(ed.isTimeSeries()){ - // TODO check timestamp exists for timeseries or topology data - condition.setStartTime(startTime); - condition.setEndTime(endTime); - } - condition.setOutputVerbose(verbose==null || verbose ); - condition.setOutputAlias(comp.getOutputAlias()); - condition.setOutputAll(comp.isOutputAll()); - condition.setStartRowkey(startRowkey); - condition.setPageSize(pageSize); + /** + * TODO ugly logic, waiting for refactoring. + */ + if (!comp.hasAgg() && !serviceName.equals(GenericMetricEntity.GENERIC_METRIC_SERVICE)) { // pure list query + // List outputFields = comp.outputFields(); + Set filterFields = comp.getFilterFields(); + if (filterFields != null) { + outputFields.addAll(filterFields); + } + condition.setOutputFields(outputFields); + if (condition.isOutputAll()) { + LOG.info("Output: ALL"); + } else { + LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); + } + GenericEntityBatchReader reader = new GenericEntityBatchReader(serviceName, condition); + List entityList = reader.read(); + result.setObj(entityList); + result.setTotalResults(entityList.size()); + result.setSuccess(true); + result.setLastTimestamp(reader.getLastTimestamp()); + result.setFirstTimestamp(reader.getFirstTimestamp()); + } else if (!comp.hasAgg() && serviceName.equals(GenericMetricEntity.GENERIC_METRIC_SERVICE)) { + // validate metric name + if (metricName == null || metricName.isEmpty()) { + throw new IllegalArgumentException("metricName should not be empty for metric list query"); + } + // List outputFields = comp.outputFields(); + Set filterFields = comp.getFilterFields(); + if (filterFields != null) { + outputFields.addAll(filterFields); + } + condition.setOutputFields(outputFields); + if (condition.isOutputAll()) { + LOG.info("Output: ALL"); + } else { + LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); + } + GenericMetricEntityBatchReader reader = new GenericMetricEntityBatchReader(metricName, condition); + List entityList = reader.read(); + result.setObj(entityList); + result.setTotalResults(entityList.size()); + result.setSuccess(true); + result.setLastTimestamp(reader.getLastTimestamp()); + result.setFirstTimestamp(reader.getFirstTimestamp()); + } else if (!treeAgg && !timeSeries && parallel <= 0) { // non time-series based aggregate query, not hierarchical + List groupbyFields = comp.groupbyFields(); + List aggregateFields = comp.aggregateFields(); + Set filterFields = comp.getFilterFields(); + // List outputFields = new ArrayList(); + if (groupbyFields != null) { + outputFields.addAll(groupbyFields); + } + if (filterFields != null) { + outputFields.addAll(filterFields); + } + outputFields.addAll(aggregateFields); - List outputFields = comp.outputFields(); - if(outputFields == null) outputFields = new ArrayList(); + if (GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName) && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)) { + outputFields.add(GenericMetricEntity.VALUE_FIELD); + } - /** - * TODO ugly logic, waiting for refactoring - */ - if(!comp.hasAgg() && !serviceName.equals(GenericMetricEntity.GENERIC_METRIC_SERVICE)){ // pure list query -// List outputFields = comp.outputFields(); - Set filterFields = comp.getFilterFields(); - if(filterFields != null) outputFields.addAll(filterFields); - condition.setOutputFields(outputFields); - if(condition.isOutputAll()){ - LOG.info("Output: ALL"); - }else{ - LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); - } - GenericEntityBatchReader reader = new GenericEntityBatchReader(serviceName, condition); - List entityList = reader.read(); - result.setObj(entityList); - result.setTotalResults(entityList.size()); - result.setSuccess(true); - result.setLastTimestamp(reader.getLastTimestamp()); - result.setFirstTimestamp(reader.getFirstTimestamp()); - }else if(!comp.hasAgg() && serviceName.equals(GenericMetricEntity.GENERIC_METRIC_SERVICE)){ - // validate metric name - if(metricName == null || metricName.isEmpty()){ - throw new IllegalArgumentException("metricName should not be empty for metric list query"); - } -// List outputFields = comp.outputFields(); - Set filterFields = comp.getFilterFields(); - if(filterFields != null) outputFields.addAll(filterFields); - condition.setOutputFields(outputFields); - if(condition.isOutputAll()){ - LOG.info("Output: ALL"); - }else{ - LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); - } - GenericMetricEntityBatchReader reader = new GenericMetricEntityBatchReader(metricName, condition); - List entityList = reader.read(); - result.setObj(entityList); - result.setTotalResults(entityList.size()); - result.setSuccess(true); - result.setLastTimestamp(reader.getLastTimestamp()); - result.setFirstTimestamp(reader.getFirstTimestamp()); - } - else if(!treeAgg && !timeSeries && parallel <= 0 ){ // non time-series based aggregate query, not hierarchical - List groupbyFields = comp.groupbyFields(); - List aggregateFields = comp.aggregateFields(); - Set filterFields = comp.getFilterFields(); -// List outputFields = new ArrayList(); - if(groupbyFields != null) outputFields.addAll(groupbyFields); - if(filterFields != null) outputFields.addAll(filterFields); - outputFields.addAll(aggregateFields); + StreamReader reader = null; + if (ed.getMetricDefinition() == null) { + reader = new GenericEntityStreamReader(serviceName, condition); + } else { // metric aggregation need metric reader + reader = new GenericMetricEntityDecompactionStreamReader(metricName, condition); + } + condition.setOutputFields(outputFields); + if (condition.isOutputAll()) { + LOG.info("Output: ALL"); + } else { + LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); + } + FlatAggregator agg = new FlatAggregator(groupbyFields, comp.aggregateFunctionTypes(), comp.aggregateFields()); + reader.register(agg); + reader.readAsStream(); + ArrayList, List>> obj = new ArrayList, List>>(); + obj.addAll(agg.result().entrySet()); + if (comp.sortOptions() == null) { + result.setObj(obj); + } else { // has sort options + result.setObj(PostFlatAggregateSort.sort(agg.result(), comp.sortOptions(), top)); + } + result.setTotalResults(0); + result.setSuccess(true); + result.setLastTimestamp(reader.getLastTimestamp()); + result.setFirstTimestamp(reader.getFirstTimestamp()); + } else if (!treeAgg && !timeSeries && parallel > 0) { // TODO ugly branch, let us refactor + List groupbyFields = comp.groupbyFields(); + List aggregateFields = comp.aggregateFields(); + Set filterFields = comp.getFilterFields(); + // List outputFields = new ArrayList(); + if (groupbyFields != null) { + outputFields.addAll(groupbyFields); + } + if (filterFields != null) { + outputFields.addAll(filterFields); + } + outputFields.addAll(aggregateFields); + if (GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName) && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)) { + outputFields.add(GenericMetricEntity.VALUE_FIELD); + } + condition.setOutputFields(outputFields); + if (condition.isOutputAll()) { + LOG.info("Output: ALL"); + } else { + LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); + } + FlatAggregator agg = new FlatAggregator(groupbyFields, comp.aggregateFunctionTypes(), comp.aggregateFields()); + EntityCreationListener listener = EntityCreationListenerFactory.synchronizedEntityCreationListener(agg); + StreamReader reader = new GenericEntityStreamReaderMT(serviceName, condition, parallel); + reader.register(listener); + reader.readAsStream(); + ArrayList, List>> obj = new ArrayList, List>>(); + obj.addAll(agg.result().entrySet()); + if (comp.sortOptions() == null) { + result.setObj(obj); + } else { // has sort options + result.setObj(PostFlatAggregateSort.sort(agg.result(), comp.sortOptions(), top)); + } + result.setTotalResults(0); + result.setSuccess(true); + result.setLastTimestamp(reader.getLastTimestamp()); + result.setFirstTimestamp(reader.getFirstTimestamp()); + } else if (!treeAgg && timeSeries) { // time-series based aggregate query, not hierarchical + List groupbyFields = comp.groupbyFields(); + List sortFields = comp.sortFields(); + Set filterFields = comp.getFilterFields(); + // List outputFields = new ArrayList(); + if (groupbyFields != null) { + outputFields.addAll(groupbyFields); + } + if (filterFields != null) { + outputFields.addAll(filterFields); + } + if (sortFields != null) { + outputFields.addAll(sortFields); + } + List aggregateFields = comp.aggregateFields(); + outputFields.addAll(aggregateFields); + if (GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName) && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)) { + outputFields.add(GenericMetricEntity.VALUE_FIELD); + } + StreamReader reader = null; + if (ed.getMetricDefinition() == null) { + if (parallel <= 0) { // TODO ugly quick win + reader = new GenericEntityStreamReader(serviceName, condition); + } else { + reader = new GenericEntityStreamReaderMT(serviceName, condition, parallel); + } + } else { // metric aggregation need metric reader + reader = new GenericMetricEntityDecompactionStreamReader(metricName, condition); + if (!outputFields.contains(GenericMetricEntity.VALUE_FIELD)) { + outputFields.add(GenericMetricEntity.VALUE_FIELD); + } + } + condition.setOutputFields(outputFields); + if (condition.isOutputAll()) { + LOG.info("Output: ALL"); + } else { + LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); + } + TimeSeriesAggregator tsAgg = new TimeSeriesAggregator(groupbyFields, comp.aggregateFunctionTypes(), aggregateFields, + DateTimeUtil.humanDateToDate(condition.getStartTime()).getTime(), DateTimeUtil.humanDateToDate(condition.getEndTime()).getTime(), intervalmin * 60 * 1000); + if (parallel <= 0) { + reader.register(tsAgg); + } else { + EntityCreationListener listener = EntityCreationListenerFactory.synchronizedEntityCreationListener(tsAgg); + reader.register(listener); + } + // for sorting + FlatAggregator sortAgg = null; + if (comp.sortOptions() != null) { + sortAgg = new FlatAggregator(groupbyFields, comp.sortFunctions(), comp.sortFields()); + if (parallel <= 0) { + reader.register(sortAgg); + } else { + EntityCreationListener listener = EntityCreationListenerFactory.synchronizedEntityCreationListener(sortAgg); + reader.register(listener); + } + } + reader.readAsStream(); + ArrayList, List>> obj = new ArrayList, List>>(); + obj.addAll(tsAgg.getMetric().entrySet()); + if (comp.sortOptions() == null) { + result.setObj(obj); + } else { // has sort options + result.setObj(TimeSeriesPostFlatAggregateSort.sort(sortAgg.result(), tsAgg.getMetric(), comp.sortOptions(), top)); + } + result.setTotalResults(0); + result.setSuccess(true); + result.setLastTimestamp(reader.getLastTimestamp()); + result.setFirstTimestamp(reader.getFirstTimestamp()); + } else { // use hierarchical aggregate mode + List groupbyFields = comp.groupbyFields(); + List aggregateFields = comp.aggregateFields(); + Set filterFields = comp.getFilterFields(); + // List outputFields = new ArrayList(); + if (groupbyFields != null) { + outputFields.addAll(groupbyFields); + } + if (filterFields != null) { + outputFields.addAll(filterFields); + } + outputFields.addAll(aggregateFields); + if (GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName) && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)) { + outputFields.add(GenericMetricEntity.VALUE_FIELD); + } + condition.setOutputFields(outputFields); + if (condition.isOutputAll()) { + LOG.info("Output: ALL"); + } else { + LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); + } + GenericEntityStreamReader reader = new GenericEntityStreamReader(serviceName, condition); + HierarchicalAggregator agg = new HierarchicalAggregator(groupbyFields, comp.aggregateFunctionTypes(), comp.aggregateFields()); + reader.register(agg); + reader.readAsStream(); + if (comp.sortOptions() == null) { + result.setObj(agg.result()); + } else { // has sort options + result.setObj(PostHierarchicalAggregateSort.sort(agg.result(), comp.sortOptions())); + } + result.setTotalResults(0); + result.setSuccess(true); + result.setLastTimestamp(reader.getLastTimestamp()); + result.setFirstTimestamp(reader.getFirstTimestamp()); + } + } catch (Exception ex) { + LOG.error("Fail executing list query: " + query, ex); + result.setException(EagleExceptionWrapper.wrap(ex)); + result.setSuccess(false); + return result; + } finally { + watch.stop(); + result.setElapsedms(watch.getTime()); + } + LOG.info("Query done " + watch.getTime() + " ms"); + return result; + } - if(GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName) && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)){ - outputFields.add(GenericMetricEntity.VALUE_FIELD); - } - FlatAggregator agg = new FlatAggregator(groupbyFields, comp.aggregateFunctionTypes(), comp.aggregateFields()); - StreamReader reader = null; - if(ed.getMetricDefinition() == null){ - reader = new GenericEntityStreamReader(serviceName, condition); - }else{ // metric aggregation need metric reader - reader = new GenericMetricEntityDecompactionStreamReader(metricName, condition); - } - condition.setOutputFields(outputFields); - if(condition.isOutputAll()){ - LOG.info("Output: ALL"); - }else{ - LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); - } - reader.register(agg); - reader.readAsStream(); - ArrayList, List>> obj = new ArrayList, List>>(); - obj.addAll(agg.result().entrySet()); - if(comp.sortOptions() == null){ - result.setObj(obj); - }else{ // has sort options - result.setObj(PostFlatAggregateSort.sort(agg.result(), comp.sortOptions(), top)); - } - result.setTotalResults(0); - result.setSuccess(true); - result.setLastTimestamp(reader.getLastTimestamp()); - result.setFirstTimestamp(reader.getFirstTimestamp()); - }else if(!treeAgg && !timeSeries && parallel > 0){ // TODO ugly branch, let us refactor - List groupbyFields = comp.groupbyFields(); - List aggregateFields = comp.aggregateFields(); - Set filterFields = comp.getFilterFields(); -// List outputFields = new ArrayList(); - if(groupbyFields != null) outputFields.addAll(groupbyFields); - if(filterFields != null) outputFields.addAll(filterFields); - outputFields.addAll(aggregateFields); - if(GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName) && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)){ - outputFields.add(GenericMetricEntity.VALUE_FIELD); - } - condition.setOutputFields(outputFields); - if(condition.isOutputAll()){ - LOG.info("Output: ALL"); - }else{ - LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); - } - FlatAggregator agg = new FlatAggregator(groupbyFields, comp.aggregateFunctionTypes(), comp.aggregateFields()); - EntityCreationListener listener = EntityCreationListenerFactory.synchronizedEntityCreationListener(agg); - StreamReader reader = new GenericEntityStreamReaderMT(serviceName, condition, parallel); - reader.register(listener); - reader.readAsStream(); - ArrayList, List>> obj = new ArrayList, List>>(); - obj.addAll(agg.result().entrySet()); - if(comp.sortOptions() == null){ - result.setObj(obj); - }else{ // has sort options - result.setObj(PostFlatAggregateSort.sort(agg.result(), comp.sortOptions(), top)); - } - result.setTotalResults(0); - result.setSuccess(true); - result.setLastTimestamp(reader.getLastTimestamp()); - result.setFirstTimestamp(reader.getFirstTimestamp()); - }else if(!treeAgg && timeSeries){ // time-series based aggregate query, not hierarchical - List groupbyFields = comp.groupbyFields(); - List sortFields = comp.sortFields(); - List aggregateFields = comp.aggregateFields(); - Set filterFields = comp.getFilterFields(); -// List outputFields = new ArrayList(); - if(groupbyFields != null) outputFields.addAll(groupbyFields); - if(filterFields != null) outputFields.addAll(filterFields); - if (sortFields != null) outputFields.addAll(sortFields); - outputFields.addAll(aggregateFields); - if(GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName) && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)){ - outputFields.add(GenericMetricEntity.VALUE_FIELD); - } - StreamReader reader = null; - if(ed.getMetricDefinition() == null){ - if(parallel <= 0){ // TODO ugly quick win - reader = new GenericEntityStreamReader(serviceName, condition); - }else{ - reader = new GenericEntityStreamReaderMT(serviceName, condition, parallel); - } - }else{ // metric aggregation need metric reader - reader = new GenericMetricEntityDecompactionStreamReader(metricName, condition); - if(!outputFields.contains(GenericMetricEntity.VALUE_FIELD)){ - outputFields.add(GenericMetricEntity.VALUE_FIELD); - } - } - condition.setOutputFields(outputFields); - if(condition.isOutputAll()){ - LOG.info("Output: ALL"); - }else{ - LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); - } - TimeSeriesAggregator tsAgg = new TimeSeriesAggregator(groupbyFields, comp.aggregateFunctionTypes(), aggregateFields, - DateTimeUtil.humanDateToDate(condition.getStartTime()).getTime(), DateTimeUtil.humanDateToDate(condition.getEndTime()).getTime(), intervalmin*60*1000); - if(parallel <= 0){ - reader.register(tsAgg); - }else{ - EntityCreationListener listener = EntityCreationListenerFactory.synchronizedEntityCreationListener(tsAgg); - reader.register(listener); - } - // for sorting - FlatAggregator sortAgg = null; - if (comp.sortOptions() != null) { - sortAgg = new FlatAggregator(groupbyFields, comp.sortFunctions(), comp.sortFields()); - if(parallel <= 0){ - reader.register(sortAgg); - }else{ - EntityCreationListener listener = EntityCreationListenerFactory.synchronizedEntityCreationListener(sortAgg); - reader.register(listener); - } - } - reader.readAsStream(); - ArrayList, List>> obj = new ArrayList, List>>(); - obj.addAll(tsAgg.getMetric().entrySet()); - if(comp.sortOptions() == null){ - result.setObj(obj); - }else{ // has sort options - result.setObj(TimeSeriesPostFlatAggregateSort.sort(sortAgg.result(), tsAgg.getMetric(), comp.sortOptions(), top)); - } - result.setTotalResults(0); - result.setSuccess(true); - result.setLastTimestamp(reader.getLastTimestamp()); - result.setFirstTimestamp(reader.getFirstTimestamp()); - } - else{ // use hierarchical aggregate mode - List groupbyFields = comp.groupbyFields(); - List aggregateFields = comp.aggregateFields(); - Set filterFields = comp.getFilterFields(); -// List outputFields = new ArrayList(); - if(groupbyFields != null) outputFields.addAll(groupbyFields); - if(filterFields != null) outputFields.addAll(filterFields); - outputFields.addAll(aggregateFields); - if(GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName) && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)){ - outputFields.add(GenericMetricEntity.VALUE_FIELD); - } - condition.setOutputFields(outputFields); - if(condition.isOutputAll()){ - LOG.info("Output: ALL"); - }else{ - LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); - } - GenericEntityStreamReader reader = new GenericEntityStreamReader(serviceName, condition); - HierarchicalAggregator agg = new HierarchicalAggregator(groupbyFields, comp.aggregateFunctionTypes(), comp.aggregateFields()); - reader.register(agg); - reader.readAsStream(); - if(comp.sortOptions() == null){ - result.setObj(agg.result()); - }else{ // has sort options - result.setObj(PostHierarchicalAggregateSort.sort(agg.result(), comp.sortOptions())); - } - result.setTotalResults(0); - result.setSuccess(true); - result.setLastTimestamp(reader.getLastTimestamp()); - result.setFirstTimestamp(reader.getFirstTimestamp()); - } - }catch(Exception ex){ - LOG.error("Fail executing list query: " + query, ex); - result.setException(EagleExceptionWrapper.wrap(ex)); - result.setSuccess(false); - return result; - }finally{ - watch.stop(); - result.setElapsedms(watch.getTime()); - } - LOG.info("Query done " + watch.getTime() + " ms"); - return result; - } + @GET + @Path("/jsonp") + @Produces( {"application/x-javascript", "application/json", "application/xml"}) + public JSONWithPadding listQueryWithJsonp(@QueryParam("query") String query, + @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, + @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, + @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, + @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, + @QueryParam("filterIfMissing") boolean filterIfMissing, + @QueryParam("parallel") int parallel, + @QueryParam("update") String callback, + @QueryParam("verbose") boolean verbose) { + ListQueryAPIResponseEntity result = listQuery(query, startTime, endTime, pageSize, startRowkey, treeAgg, timeSeries, intervalmin, top, filterIfMissing, parallel, null, verbose); + return new JSONWithPadding(new GenericEntity(result) { + }, callback); + } + private void validateQueryParameters(String startRowkey, int pageSize) { + if (pageSize <= 0) { + throw new IllegalArgumentException("Positive pageSize value should be always provided. " + + "The list query format is:\n" + "eagle-service/rest/list?query=&pageSize=10&startRowkey=xyz&startTime=xxx&endTime=xxx"); + } - @GET - @Path("/jsonp") - @Produces({"application/x-javascript", "application/json", "application/xml"}) - public JSONWithPadding listQueryWithJsonp(@QueryParam("query") String query, - @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, - @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, - @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, - @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, - @QueryParam("filterIfMissing") boolean filterIfMissing, - @QueryParam("parallel") int parallel, - @QueryParam("update") String callback, - @QueryParam("verbose") boolean verbose) { - ListQueryAPIResponseEntity result = listQuery(query, startTime, endTime, pageSize, startRowkey, treeAgg, timeSeries, intervalmin, top, filterIfMissing, parallel, null,verbose); - return new JSONWithPadding(new GenericEntity(result){}, callback); - } - - private void validateQueryParameters(String startRowkey, int pageSize){ - if(pageSize <= 0){ - throw new IllegalArgumentException("Positive pageSize value should be always provided. The list query format is:\n" + "eagle-service/rest/list?query=&pageSize=10&startRowkey=xyz&startTime=xxx&endTime=xxx"); - } - - if(startRowkey != null && startRowkey.equals("null")){ - LOG.warn("startRowkey being null string is not same to startRowkey == null"); - } - return; - } + if (startRowkey != null && startRowkey.equals("null")) { + LOG.warn("startRowkey being null string is not same to startRowkey == null"); + } + return; + } } diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/MetadataResource.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/MetadataResource.java index d5ab87fd9f..2c5e1e7b4c 100755 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/MetadataResource.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/MetadataResource.java @@ -16,15 +16,14 @@ */ package org.apache.eagle.service.generic; -import org.apache.eagle.log.entity.meta.EntityDefinition; -import org.apache.eagle.log.entity.meta.EntityDefinitionManager; -import org.apache.eagle.log.entity.meta.IndexDefinition; -import org.apache.eagle.log.entity.meta.MetricDefinition; import com.sun.jersey.api.model.AbstractResource; import com.sun.jersey.api.model.AbstractResourceMethod; import com.sun.jersey.api.model.AbstractSubResourceMethod; import com.sun.jersey.server.impl.modelapi.annotation.IntrospectionModeller; - +import org.apache.eagle.log.entity.meta.EntityDefinition; +import org.apache.eagle.log.entity.meta.EntityDefinitionManager; +import org.apache.eagle.log.entity.meta.IndexDefinition; +import org.apache.eagle.log.entity.meta.MetricDefinition; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.node.ArrayNode; import org.codehaus.jackson.node.JsonNodeFactory; @@ -38,161 +37,157 @@ import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; - import java.util.Map; -/** - * @since : 7/3/14,2014 - */ @Path(MetadataResource.PATH_META) public class MetadataResource { - final static String PATH_META = "meta"; - final static String PATH_RESOURCE = "resource"; - final static String PATH_SERVICE = "service"; - - @GET - @Produces(MediaType.APPLICATION_JSON) - public Response index(@Context Application application, - @Context HttpServletRequest request){ - String basePath = request.getRequestURL().toString(); - ObjectNode root = JsonNodeFactory.instance.objectNode(); - - root.put(PATH_RESOURCE,joinUri(basePath,PATH_RESOURCE)); - root.put(PATH_SERVICE,joinUri(basePath,PATH_SERVICE)); - return Response.ok().entity(root).build(); - } - - @GET - @Path(PATH_RESOURCE) - @Produces(MediaType.APPLICATION_JSON) - public Response listAllResourcesRoutes(@Context Application application, - @Context HttpServletRequest request){ - String basePath = request.getRequestURL().toString(); - basePath = basePath.substring(0,basePath.length() - PATH_META.length() - PATH_RESOURCE.length() -1); - ObjectNode root = JsonNodeFactory.instance.objectNode(); - root.put("base",basePath); - ArrayNode resources = JsonNodeFactory.instance.arrayNode(); - root.put( "resources", resources ); - - for ( Class aClass : application.getClasses()){ - if ( isAnnotatedResourceClass(aClass)){ - AbstractResource resource = IntrospectionModeller.createResource(aClass); - ObjectNode resourceNode = JsonNodeFactory.instance.objectNode(); - String uriPrefix = resource.getPath().getValue(); - - for ( AbstractSubResourceMethod srm : resource.getSubResourceMethods() ) { - String uri = uriPrefix + "/" + srm.getPath().getValue(); - addTo( resourceNode, uri, srm, joinUri(basePath, uri) ); - } - - for ( AbstractResourceMethod srm : resource.getResourceMethods() ) { - addTo( resourceNode, uriPrefix, srm, joinUri( basePath, uriPrefix ) ); - } - resources.add( resourceNode ); - } - } - - return Response.ok().entity( root ).build(); - } - - private String joinUri(String basePath, String uriPrefix) { - if(basePath.endsWith("/") && uriPrefix.startsWith("/")){ - return basePath.substring(0,basePath.length()-2)+uriPrefix; - }else if(basePath.endsWith("/") || uriPrefix.startsWith("/")){ - return basePath+ uriPrefix; - } - return basePath+"/"+uriPrefix; - } - - private void addTo( ObjectNode resourceNode, String uriPrefix, AbstractResourceMethod srm, String path ){ - if(resourceNode.get( uriPrefix ) == null){ - ObjectNode inner = JsonNodeFactory.instance.objectNode(); - inner.put("path", path); - inner.put("methods", JsonNodeFactory.instance.arrayNode()); - resourceNode.put( uriPrefix, inner ); - } - - ((ArrayNode) resourceNode.get( uriPrefix ).get("methods")).add( srm.getHttpMethod() ); - } - - @SuppressWarnings({ "rawtypes", "unchecked" }) - private boolean isAnnotatedResourceClass( Class rc ){ - if ( rc.isAnnotationPresent( Path.class ) ) { - return true; - } - - for ( Class i : rc.getInterfaces() ) { - if ( i.isAnnotationPresent( Path.class ) ) { - return true; - } - } - - return false; - } - - @GET - @Path(PATH_SERVICE) - @Produces(MediaType.APPLICATION_JSON) - public Response listAllEntities(@Context Application application, - @Context HttpServletRequest request) throws Exception { - Map entities = EntityDefinitionManager.entities(); - ObjectNode root = JsonNodeFactory.instance.objectNode(); - - ArrayNode services = JsonNodeFactory.instance.arrayNode(); - - for(Map.Entry entry : entities.entrySet()){ -// ObjectNode serviceNode = JsonNodeFactory.instance.objectNode(); -// serviceNode.put(entry.getKey(),entityDefationitionAsJson(entry.getValue())); - services.add(entityDefationitionAsJson(entry.getValue())); - } - root.put("count",entities.keySet().size()); - root.put("services",services); - return Response.ok().entity(root).build(); - } - - private JsonNode entityDefationitionAsJson(EntityDefinition def) { - ObjectNode node = JsonNodeFactory.instance.objectNode(); - node.put("service",def.getService()); - node.put("entityClass",def.getEntityClass().getName()); - node.put("table",def.getTable()); - node.put("columnFamily",def.getColumnFamily()); - node.put("prefix",def.getPrefix()); - if(def.getPartitions()!=null){ - node.put("partitions",arrayNode(def.getPartitions())); - } - node.put("isTimeSeries",def.isTimeSeries()); - - MetricDefinition mdf = def.getMetricDefinition(); - if(mdf!=null){ - node.put("interval", mdf.getInterval()); - } - - IndexDefinition[] indexDef = def.getIndexes(); - if(indexDef!=null){ - ArrayNode indexDefArray = JsonNodeFactory.instance.arrayNode(); - for(IndexDefinition idef : indexDef){ - ObjectNode idn = JsonNodeFactory.instance.objectNode(); - idn.put("indexPrefix",idef.getIndexPrefix()); - - if(idef.getIndex()!=null){ - ObjectNode index = JsonNodeFactory.instance.objectNode(); - index.put("name",idef.getIndex().name()); - index.put("columns",arrayNode(idef.getIndex().columns())); - idn.put("index",index); - } - - indexDefArray.add(idn); - } - node.put("indexs",indexDefArray); - } - return node; - } - - private ArrayNode arrayNode(String[] values){ - ArrayNode an = JsonNodeFactory.instance.arrayNode(); - for(String v:values){ - an.add(v); - } - return an; - } + static final String PATH_META = "meta"; + static final String PATH_RESOURCE = "resource"; + static final String PATH_SERVICE = "service"; + + @GET + @Produces(MediaType.APPLICATION_JSON) + public Response index(@Context Application application, + @Context HttpServletRequest request) { + String basePath = request.getRequestURL().toString(); + ObjectNode root = JsonNodeFactory.instance.objectNode(); + + root.put(PATH_RESOURCE, joinUri(basePath, PATH_RESOURCE)); + root.put(PATH_SERVICE, joinUri(basePath, PATH_SERVICE)); + return Response.ok().entity(root).build(); + } + + @GET + @Path(PATH_RESOURCE) + @Produces(MediaType.APPLICATION_JSON) + public Response listAllResourcesRoutes(@Context Application application, + @Context HttpServletRequest request) { + String basePath = request.getRequestURL().toString(); + basePath = basePath.substring(0, basePath.length() - PATH_META.length() - PATH_RESOURCE.length() - 1); + ObjectNode root = JsonNodeFactory.instance.objectNode(); + root.put("base", basePath); + ArrayNode resources = JsonNodeFactory.instance.arrayNode(); + root.put("resources", resources); + + for (Class aClass : application.getClasses()) { + if (isAnnotatedResourceClass(aClass)) { + AbstractResource resource = IntrospectionModeller.createResource(aClass); + ObjectNode resourceNode = JsonNodeFactory.instance.objectNode(); + String uriPrefix = resource.getPath().getValue(); + + for (AbstractSubResourceMethod srm : resource.getSubResourceMethods()) { + String uri = uriPrefix + "/" + srm.getPath().getValue(); + addTo(resourceNode, uri, srm, joinUri(basePath, uri)); + } + + for (AbstractResourceMethod srm : resource.getResourceMethods()) { + addTo(resourceNode, uriPrefix, srm, joinUri(basePath, uriPrefix)); + } + resources.add(resourceNode); + } + } + + return Response.ok().entity(root).build(); + } + + private String joinUri(String basePath, String uriPrefix) { + if (basePath.endsWith("/") && uriPrefix.startsWith("/")) { + return basePath.substring(0, basePath.length() - 2) + uriPrefix; + } else if (basePath.endsWith("/") || uriPrefix.startsWith("/")) { + return basePath + uriPrefix; + } + return basePath + "/" + uriPrefix; + } + + private void addTo(ObjectNode resourceNode, String uriPrefix, AbstractResourceMethod srm, String path) { + if (resourceNode.get(uriPrefix) == null) { + ObjectNode inner = JsonNodeFactory.instance.objectNode(); + inner.put("path", path); + inner.put("methods", JsonNodeFactory.instance.arrayNode()); + resourceNode.put(uriPrefix, inner); + } + + ((ArrayNode) resourceNode.get(uriPrefix).get("methods")).add(srm.getHttpMethod()); + } + + @SuppressWarnings( {"rawtypes", "unchecked"}) + private boolean isAnnotatedResourceClass(Class rc) { + if (rc.isAnnotationPresent(Path.class)) { + return true; + } + + for (Class i : rc.getInterfaces()) { + if (i.isAnnotationPresent(Path.class)) { + return true; + } + } + + return false; + } + + @GET + @Path(PATH_SERVICE) + @Produces(MediaType.APPLICATION_JSON) + public Response listAllEntities(@Context Application application, + @Context HttpServletRequest request) throws Exception { + Map entities = EntityDefinitionManager.entities(); + ObjectNode root = JsonNodeFactory.instance.objectNode(); + + ArrayNode services = JsonNodeFactory.instance.arrayNode(); + + for (Map.Entry entry : entities.entrySet()) { + // ObjectNode serviceNode = JsonNodeFactory.instance.objectNode(); + // serviceNode.put(entry.getKey(),entityDefationitionAsJson(entry.getValue())); + services.add(entityDefationitionAsJson(entry.getValue())); + } + root.put("count", entities.keySet().size()); + root.put("services", services); + return Response.ok().entity(root).build(); + } + + private JsonNode entityDefationitionAsJson(EntityDefinition def) { + ObjectNode node = JsonNodeFactory.instance.objectNode(); + node.put("service", def.getService()); + node.put("entityClass", def.getEntityClass().getName()); + node.put("table", def.getTable()); + node.put("columnFamily", def.getColumnFamily()); + node.put("prefix", def.getPrefix()); + if (def.getPartitions() != null) { + node.put("partitions", arrayNode(def.getPartitions())); + } + node.put("isTimeSeries", def.isTimeSeries()); + + MetricDefinition mdf = def.getMetricDefinition(); + if (mdf != null) { + node.put("interval", mdf.getInterval()); + } + + IndexDefinition[] indexDef = def.getIndexes(); + if (indexDef != null) { + ArrayNode indexDefArray = JsonNodeFactory.instance.arrayNode(); + for (IndexDefinition idef : indexDef) { + ObjectNode idn = JsonNodeFactory.instance.objectNode(); + idn.put("indexPrefix", idef.getIndexPrefix()); + + if (idef.getIndex() != null) { + ObjectNode index = JsonNodeFactory.instance.objectNode(); + index.put("name", idef.getIndex().name()); + index.put("columns", arrayNode(idef.getIndex().columns())); + idn.put("index", index); + } + + indexDefArray.add(idn); + } + node.put("indexs", indexDefArray); + } + return node; + } + + private ArrayNode arrayNode(String[] values) { + ArrayNode an = JsonNodeFactory.instance.arrayNode(); + for (String v : values) { + an.add(v); + } + return an; + } } diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/metric/EagleMetricResource.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/metric/EagleMetricResource.java index f3ff4206d3..8e8d2139ce 100644 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/metric/EagleMetricResource.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/metric/EagleMetricResource.java @@ -16,43 +16,41 @@ */ package org.apache.eagle.service.metric; -import java.util.List; +import org.apache.eagle.log.entity.GenericCreateAPIResponseEntity; +import org.apache.eagle.log.entity.GenericEntityWriter; +import org.apache.eagle.log.entity.GenericMetricEntity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.apache.eagle.log.entity.GenericCreateAPIResponseEntity; -import org.apache.eagle.log.entity.GenericEntityWriter; -import org.apache.eagle.log.entity.GenericMetricEntity; +import java.util.List; @Path(EagleMetricResource.METRIC_URL_PATH) public class EagleMetricResource { - private static final Logger LOG = LoggerFactory.getLogger(EagleMetricResource.class); - public static final String METRIC_URL_PATH = "/metric"; - - @POST - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public GenericCreateAPIResponseEntity createGenericMetricEntity(List entities) { - GenericCreateAPIResponseEntity result = new GenericCreateAPIResponseEntity(); - try{ - GenericEntityWriter writer = new GenericEntityWriter(GenericMetricEntity.GENERIC_METRIC_SERVICE); - List rowkeys = null; - rowkeys = writer.write(entities); - result.setEncodedRowkeys(rowkeys); - result.setSuccess(true); - return result; - }catch(Exception ex){ - LOG.error("Fail writing Generic Metric entity", ex); - result.setSuccess(false); - result.setException(ex.getMessage()); - return result; - } - } + private static final Logger LOG = LoggerFactory.getLogger(EagleMetricResource.class); + public static final String METRIC_URL_PATH = "/metric"; + + @POST + @Consumes(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON) + public GenericCreateAPIResponseEntity createGenericMetricEntity(List entities) { + GenericCreateAPIResponseEntity result = new GenericCreateAPIResponseEntity(); + try { + GenericEntityWriter writer = new GenericEntityWriter(GenericMetricEntity.GENERIC_METRIC_SERVICE); + List rowkeys = null; + rowkeys = writer.write(entities); + result.setEncodedRowkeys(rowkeys); + result.setSuccess(true); + return result; + } catch (Exception ex) { + LOG.error("Fail writing Generic Metric entity", ex); + result.setSuccess(false); + result.setException(ex.getMessage()); + return result; + } + } } diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/rowkey/RowKeyQueryResource.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/rowkey/RowKeyQueryResource.java index 9480695e06..cb05e7500a 100644 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/rowkey/RowKeyQueryResource.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/rowkey/RowKeyQueryResource.java @@ -15,22 +15,11 @@ * limitations under the License. */ /** - * + * */ package org.apache.eagle.service.rowkey; -import java.io.IOException; - -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; - -import org.apache.eagle.service.common.EagleExceptionWrapper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import org.apache.eagle.common.EagleBase64Wrapper; import org.apache.eagle.log.base.taggedlog.NoSuchRowException; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.HBaseInternalLogHelper; @@ -39,50 +28,56 @@ import org.apache.eagle.log.entity.index.RowKeyLogReader; import org.apache.eagle.log.entity.meta.EntityDefinition; import org.apache.eagle.log.entity.meta.EntityDefinitionManager; -import org.apache.eagle.common.EagleBase64Wrapper; +import org.apache.eagle.service.common.EagleExceptionWrapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.MediaType; +import java.io.IOException; + -/** - * @since Jan 26, 2015 - */ @Path("/rowkeyquery") public class RowKeyQueryResource { - private static final Logger LOG = LoggerFactory.getLogger(RowKeyQueryResource.class); + private static final Logger LOG = LoggerFactory.getLogger(RowKeyQueryResource.class); - @GET - @Produces({MediaType.APPLICATION_JSON}) - public RowkeyQueryAPIResponseEntity getEntityByRowkey(@QueryParam("query") String query, @QueryParam("rowkey") String rowkey){ - RowkeyQueryAPIResponseEntity result = new RowkeyQueryAPIResponseEntity(); - RowKeyLogReader reader = null; + @GET + @Produces( {MediaType.APPLICATION_JSON}) + public RowkeyQueryAPIResponseEntity getEntityByRowkey(@QueryParam("query") String query, @QueryParam("rowkey") String rowkey) { + RowkeyQueryAPIResponseEntity result = new RowkeyQueryAPIResponseEntity(); + RowKeyLogReader reader = null; - try { - EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(query); - reader = new RowKeyLogReader(ed, EagleBase64Wrapper.decode(rowkey)); - reader.open(); - InternalLog log = reader.read(); - TaggedLogAPIEntity entity; - entity = HBaseInternalLogHelper.buildEntity(log, ed); - result.setObj(entity); - result.setSuccess(true); - return result; - } - catch(NoSuchRowException ex){ - LOG.error("rowkey " + ex.getMessage() + " does not exist!", ex); - result.setSuccess(false); - result.setException(EagleExceptionWrapper.wrap(ex)); - return result; - } - catch(Exception ex){ - LOG.error("Cannot read alert by rowkey", ex); - result.setSuccess(false); - result.setException(EagleExceptionWrapper.wrap(ex)); - return result; - } - finally{ - try { - if(reader != null) - reader.close(); - } catch (IOException e) { - } - } - } + try { + EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(query); + reader = new RowKeyLogReader(ed, EagleBase64Wrapper.decode(rowkey)); + reader.open(); + InternalLog log = reader.read(); + TaggedLogAPIEntity entity; + entity = HBaseInternalLogHelper.buildEntity(log, ed); + result.setObj(entity); + result.setSuccess(true); + return result; + } catch (NoSuchRowException ex) { + LOG.error("rowkey " + ex.getMessage() + " does not exist!", ex); + result.setSuccess(false); + result.setException(EagleExceptionWrapper.wrap(ex)); + return result; + } catch (Exception ex) { + LOG.error("Cannot read alert by rowkey", ex); + result.setSuccess(false); + result.setException(EagleExceptionWrapper.wrap(ex)); + return result; + } finally { + try { + if (reader != null) { + reader.close(); + } + } catch (IOException e) { + LOG.warn("cloese reader error"); + } + } + } } diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/rowkey/RowkeyResource.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/rowkey/RowkeyResource.java index d4a6a42317..99dd267f8b 100644 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/rowkey/RowkeyResource.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/rowkey/RowkeyResource.java @@ -16,118 +16,115 @@ */ package org.apache.eagle.service.rowkey; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; - -import org.apache.eagle.service.common.EagleExceptionWrapper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import org.apache.eagle.common.ByteUtil; +import org.apache.eagle.common.DateTimeUtil; +import org.apache.eagle.common.EagleBase64Wrapper; +import org.apache.eagle.common.service.POSTResultEntityBase; import org.apache.eagle.log.base.taggedlog.RowkeyAPIEntity; import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity; import org.apache.eagle.log.entity.InternalLog; import org.apache.eagle.log.entity.old.GenericDeleter; import org.apache.eagle.log.entity.old.HBaseLogByRowkeyReader; -import org.apache.eagle.common.ByteUtil; -import org.apache.eagle.common.DateTimeUtil; -import org.apache.eagle.common.EagleBase64Wrapper; -import org.apache.eagle.common.service.POSTResultEntityBase; +import org.apache.eagle.service.common.EagleExceptionWrapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.ws.rs.*; +import javax.ws.rs.core.MediaType; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; @Deprecated @Path("rowkey") public class RowkeyResource { - private static final Logger LOG = LoggerFactory.getLogger(RowkeyResource.class); - - @GET - @Produces(MediaType.APPLICATION_JSON) - public RowkeyAPIEntity inspectRowkey(@QueryParam("table") String table, @QueryParam("cf") String columnFamily, - @QueryParam("key") String key, @QueryParam("all") String all, @QueryParam("field") List fields){ - RowkeyAPIEntity entity = new RowkeyAPIEntity(); - byte[] row = null; - boolean includingAllQualifiers = false; - if(all != null && all.equals("true")) - includingAllQualifiers = true; - HBaseLogByRowkeyReader getter = new HBaseLogByRowkeyReader(table, columnFamily, includingAllQualifiers, fields); - InternalLog log = null; - try{ - getter.open(); - row = EagleBase64Wrapper.decode(key); - log = getter.get(row); - }catch(Exception ex){ - LOG.error("Cannot get rowkey", ex); - entity.setSuccess(false); - entity.setException(EagleExceptionWrapper.wrap(ex)); - return entity; - }finally{ - try{ - getter.close(); - }catch(Exception ex){} - } - - Map fieldNameValueMap = new TreeMap(); - entity.setFieldNameValueMap(fieldNameValueMap); - // populate qualifiers - Map qualifierValues = log.getQualifierValues(); - for(Map.Entry qualifier : qualifierValues.entrySet()){ - if(qualifier.getValue() != null){ - fieldNameValueMap.put(qualifier.getKey(), new String(qualifier.getValue())); - } - } - - // decode rowkey - // the first integer is prefix hashcode - entity.setPrefixHashCode(ByteUtil.bytesToInt(row, 0)); - long ts = Long.MAX_VALUE-ByteUtil.bytesToLong(row, 4); - entity.setTimestamp(ts); - entity.setHumanTime(DateTimeUtil.millisecondsToHumanDateWithMilliseconds(ts)); - int offset = 4+8; - int len = row.length; - Map tagNameHashValueHashMap = new HashMap(); - // TODO boundary check please - while(offset < len){ - int tagNameHash = ByteUtil.bytesToInt(row, offset); - offset += 4; - int tagValueHash = ByteUtil.bytesToInt(row, offset); - offset += 4; - tagNameHashValueHashMap.put(tagNameHash, tagValueHash); - } - - entity.setSuccess(true); - return entity; - } - - /** - * for entities, the only required field is encodedRowkey - * @param table - * @param columnFamily - * @param entities - */ - @DELETE - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public POSTResultEntityBase deleteEntityByEncodedRowkey(@QueryParam("table") String table, @QueryParam("cf") String columnFamily, - List entities){ - GenericDeleter deleter = new GenericDeleter(table, columnFamily); - POSTResultEntityBase result = new POSTResultEntityBase(); - try{ - deleter.delete(entities); - }catch(Exception ex){ - LOG.error("Fail deleting entity " + table + ":" + columnFamily, ex); - result.setSuccess(false); - result.setException(ex.getMessage()); - return result; - } - result.setSuccess(true); - return result; - } + private static final Logger LOG = LoggerFactory.getLogger(RowkeyResource.class); + + @GET + @Produces(MediaType.APPLICATION_JSON) + public RowkeyAPIEntity inspectRowkey(@QueryParam("table") String table, @QueryParam("cf") String columnFamily, + @QueryParam("key") String key, @QueryParam("all") String all, @QueryParam("field") List fields) { + RowkeyAPIEntity entity = new RowkeyAPIEntity(); + byte[] row = null; + boolean includingAllQualifiers = false; + if (all != null && all.equals("true")) { + includingAllQualifiers = true; + } + HBaseLogByRowkeyReader getter = new HBaseLogByRowkeyReader(table, columnFamily, includingAllQualifiers, fields); + InternalLog log = null; + try { + getter.open(); + row = EagleBase64Wrapper.decode(key); + log = getter.get(row); + } catch (Exception ex) { + LOG.error("Cannot get rowkey", ex); + entity.setSuccess(false); + entity.setException(EagleExceptionWrapper.wrap(ex)); + return entity; + } finally { + try { + getter.close(); + } catch (Exception ex) { + LOG.warn("cloese reader error"); + } + } + + Map fieldNameValueMap = new TreeMap(); + entity.setFieldNameValueMap(fieldNameValueMap); + // populate qualifiers + Map qualifierValues = log.getQualifierValues(); + for (Map.Entry qualifier : qualifierValues.entrySet()) { + if (qualifier.getValue() != null) { + fieldNameValueMap.put(qualifier.getKey(), new String(qualifier.getValue())); + } + } + + // decode rowkey + // the first integer is prefix hashcode + entity.setPrefixHashCode(ByteUtil.bytesToInt(row, 0)); + long ts = Long.MAX_VALUE - ByteUtil.bytesToLong(row, 4); + entity.setTimestamp(ts); + entity.setHumanTime(DateTimeUtil.millisecondsToHumanDateWithMilliseconds(ts)); + int offset = 4 + 8; + int len = row.length; + Map tagNameHashValueHashMap = new HashMap(); + // TODO boundary check please + while (offset < len) { + int tagNameHash = ByteUtil.bytesToInt(row, offset); + offset += 4; + int tagValueHash = ByteUtil.bytesToInt(row, offset); + offset += 4; + tagNameHashValueHashMap.put(tagNameHash, tagValueHash); + } + + entity.setSuccess(true); + return entity; + } + + /** + * for entities, the only required field is encodedRowkey. + * + * @param table + * @param columnFamily + * @param entities + */ + @DELETE + @Consumes(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON) + public POSTResultEntityBase deleteEntityByEncodedRowkey(@QueryParam("table") String table, @QueryParam("cf") String columnFamily, + List entities) { + GenericDeleter deleter = new GenericDeleter(table, columnFamily); + POSTResultEntityBase result = new POSTResultEntityBase(); + try { + deleter.delete(entities); + } catch (Exception ex) { + LOG.error("Fail deleting entity " + table + ":" + columnFamily, ex); + result.setSuccess(false); + result.setException(ex.getMessage()); + return result; + } + result.setSuccess(true); + return result; + } } diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/EagleServiceSelfCheckAPIEntity.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/EagleServiceSelfCheckAPIEntity.java index 0e52a2e7bc..8e8d052464 100644 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/EagleServiceSelfCheckAPIEntity.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/EagleServiceSelfCheckAPIEntity.java @@ -17,31 +17,37 @@ package org.apache.eagle.service.selfcheck; /** - * expose internal configuration or metrics + * expose internal configuration or metrics. */ //@XmlRootElement //@XmlAccessorType(XmlAccessType.FIELD) //@XmlType(propOrder = {"env", "hbaseZookeeperQuorum", "hbaseZookeeperClientPort"}) public class EagleServiceSelfCheckAPIEntity { - private String env; - private String hbaseZookeeperQuorum; - private String hbaseZookeeperClientPort; - public String getEnv() { - return env; - } - public void setEnv(String env) { - this.env = env; - } - public String getHbaseZookeeperQuorum() { - return hbaseZookeeperQuorum; - } - public void setHbaseZookeeperQuorum(String hbaseZookeeperQuorum) { - this.hbaseZookeeperQuorum = hbaseZookeeperQuorum; - } - public String getHbaseZookeeperClientPort() { - return hbaseZookeeperClientPort; - } - public void setHbaseZookeeperClientPort(String hbaseZookeeperClientPort) { - this.hbaseZookeeperClientPort = hbaseZookeeperClientPort; - } + private String env; + private String hbaseZookeeperQuorum; + private String hbaseZookeeperClientPort; + + public String getEnv() { + return env; + } + + public void setEnv(String env) { + this.env = env; + } + + public String getHbaseZookeeperQuorum() { + return hbaseZookeeperQuorum; + } + + public void setHbaseZookeeperQuorum(String hbaseZookeeperQuorum) { + this.hbaseZookeeperQuorum = hbaseZookeeperQuorum; + } + + public String getHbaseZookeeperClientPort() { + return hbaseZookeeperClientPort; + } + + public void setHbaseZookeeperClientPort(String hbaseZookeeperClientPort) { + this.hbaseZookeeperClientPort = hbaseZookeeperClientPort; + } } diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/EagleServiceSelfCheckResource.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/EagleServiceSelfCheckResource.java index 88ab61345b..67322a9274 100644 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/EagleServiceSelfCheckResource.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/EagleServiceSelfCheckResource.java @@ -16,23 +16,23 @@ */ package org.apache.eagle.service.selfcheck; +import org.apache.eagle.common.config.EagleConfigFactory; + import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; -import org.apache.eagle.common.config.EagleConfigFactory; - @Path("ValidateInternals") public class EagleServiceSelfCheckResource { - - @GET - @Produces(MediaType.APPLICATION_JSON) - public EagleServiceSelfCheckAPIEntity selfCheck(){ - EagleServiceSelfCheckAPIEntity entity = new EagleServiceSelfCheckAPIEntity(); - entity.setHbaseZookeeperQuorum(EagleConfigFactory.load().getZKQuorum()); - entity.setHbaseZookeeperClientPort(EagleConfigFactory.load().getZKPort()); - entity.setEnv(EagleConfigFactory.load().getEnv()); - return entity; - } + + @GET + @Produces(MediaType.APPLICATION_JSON) + public EagleServiceSelfCheckAPIEntity selfCheck() { + EagleServiceSelfCheckAPIEntity entity = new EagleServiceSelfCheckAPIEntity(); + entity.setHbaseZookeeperQuorum(EagleConfigFactory.load().getZKQuorum()); + entity.setHbaseZookeeperClientPort(EagleConfigFactory.load().getZKPort()); + entity.setEnv(EagleConfigFactory.load().getEnv()); + return entity; + } } diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/ServiceResource.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/ServiceResource.java index 2db13f5895..1d19955614 100644 --- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/ServiceResource.java +++ b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/selfcheck/ServiceResource.java @@ -16,27 +16,25 @@ */ package org.apache.eagle.service.selfcheck; -import java.util.Map; +import com.sun.jersey.api.json.JSONWithPadding; +import org.apache.eagle.log.entity.meta.EntityDefinition; +import org.apache.eagle.log.entity.meta.EntityDefinitionManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.GenericEntity; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.apache.eagle.log.entity.meta.EntityDefinition; -import org.apache.eagle.log.entity.meta.EntityDefinitionManager; -import com.sun.jersey.api.json.JSONWithPadding; +import java.util.Map; @Path("services") public class ServiceResource { private static final Logger LOG = LoggerFactory.getLogger(ServiceResource.class); @GET - @Produces({"application/json", "application/xml"}) + @Produces( {"application/json", "application/xml"}) public Map getServiceDefinitions() { Map result = null; try { @@ -50,7 +48,7 @@ public Map getServiceDefinitions() { @GET @Path("/jsonp") - @Produces({"application/x-javascript", "application/json", "application/xml"}) + @Produces( {"application/x-javascript", "application/json", "application/xml"}) public JSONWithPadding getServiceDefinitionsWithJsonp(@QueryParam("callback") String callback) { Map result = null; try { diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/webapp/WEB-INF/web.xml b/eagle-core/eagle-query/eagle-service-base/src/main/webapp/WEB-INF/web.xml index c6c7a8ec05..d8ab4041bd 100644 --- a/eagle-core/eagle-query/eagle-service-base/src/main/webapp/WEB-INF/web.xml +++ b/eagle-core/eagle-query/eagle-service-base/src/main/webapp/WEB-INF/web.xml @@ -18,7 +18,8 @@ ~ This web.xml file is not required when using Servlet 3.0 container, ~ see implementation details http://jersey.java.net/nonav/documentation/latest/jax-rs.html --> - + Jersey Web Application com.sun.jersey.spi.container.servlet.ServletContainer @@ -30,7 +31,7 @@ com.sun.jersey.api.json.POJOMappingFeature true - +