diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java index 32f1eb9..f9c0aeb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java @@ -1,5 +1,10 @@ package org.apache.hadoop.hive.ql.optimizer.optiq.translator; +import java.sql.Date; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Calendar; + import org.apache.hadoop.hive.ql.optimizer.optiq.RelOptHiveTable; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.HiveParser; @@ -41,30 +46,26 @@ static ASTNode table(TableAccessRelBase scan) { return b.node(); } - static ASTNode join(ASTNode left, ASTNode right, JoinRelType joinType, - ASTNode cond, boolean semiJoin) { + static ASTNode join(ASTNode left, ASTNode right, JoinRelType joinType, ASTNode cond, + boolean semiJoin) { ASTBuilder b = null; switch (joinType) { case INNER: if (semiJoin) { - b = ASTBuilder.construct(HiveParser.TOK_LEFTSEMIJOIN, - "TOK_LEFTSEMIJOIN"); + b = ASTBuilder.construct(HiveParser.TOK_LEFTSEMIJOIN, "TOK_LEFTSEMIJOIN"); } else { b = ASTBuilder.construct(HiveParser.TOK_JOIN, "TOK_JOIN"); } break; case LEFT: - b = ASTBuilder.construct(HiveParser.TOK_LEFTOUTERJOIN, - "TOK_LEFTOUTERJOIN"); + b = ASTBuilder.construct(HiveParser.TOK_LEFTOUTERJOIN, "TOK_LEFTOUTERJOIN"); break; case RIGHT: - b = ASTBuilder.construct(HiveParser.TOK_RIGHTOUTERJOIN, - "TOK_RIGHTOUTERJOIN"); + b = ASTBuilder.construct(HiveParser.TOK_RIGHTOUTERJOIN, "TOK_RIGHTOUTERJOIN"); break; case FULL: - b = ASTBuilder.construct(HiveParser.TOK_FULLOUTERJOIN, - "TOK_FULLOUTERJOIN"); + b = ASTBuilder.construct(HiveParser.TOK_FULLOUTERJOIN, "TOK_FULLOUTERJOIN"); break; } @@ -87,9 +88,8 @@ static ASTNode qualifiedName(String tableName, String colName) { } static ASTNode unqualifiedName(String colName) { - ASTBuilder b = ASTBuilder -.construct(HiveParser.TOK_TABLE_OR_COL, - "TOK_TABLE_OR_COL").add(HiveParser.Identifier, colName); + ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL").add( + HiveParser.Identifier, colName); return b.node(); } @@ -108,39 +108,61 @@ static ASTNode limit(Object value) { static ASTNode selectExpr(ASTNode expr, String alias) { return ASTBuilder.construct(HiveParser.TOK_SELEXPR, "TOK_SELEXPR").add(expr) - .add(HiveParser.Identifier, alias).node(); + .add(HiveParser.Identifier, alias).node(); } static ASTNode literal(RexLiteral literal) { - Object val = literal.getValue3(); + Object val = null; int type = 0; SqlTypeName sqlType = literal.getType().getSqlTypeName(); switch (sqlType) { case TINYINT: + val = literal.getValue3(); type = HiveParser.TinyintLiteral; break; case SMALLINT: + val = literal.getValue3(); type = HiveParser.SmallintLiteral; break; case INTEGER: case BIGINT: + val = literal.getValue3(); type = HiveParser.BigintLiteral; break; case DECIMAL: case FLOAT: case DOUBLE: case REAL: + val = literal.getValue3(); type = HiveParser.Number; break; case VARCHAR: case CHAR: + val = literal.getValue3(); type = HiveParser.StringLiteral; val = "'" + String.valueOf(val) + "'"; break; case BOOLEAN: - type = ((Boolean) val).booleanValue() ? HiveParser.KW_TRUE - : HiveParser.KW_FALSE; + val = literal.getValue3(); + type = ((Boolean) val).booleanValue() ? HiveParser.KW_TRUE : HiveParser.KW_FALSE; + break; + case DATE: { + val = literal.getValue(); + type = HiveParser.TOK_DATELITERAL; + DateFormat df = new SimpleDateFormat("yyyy-MM-dd"); + val = df.format(((Calendar) val).getTime()); + val = "'" + val + "'"; + } + break; + case TIME: + case TIMESTAMP: { + val = literal.getValue(); + type = HiveParser.TOK_TIMESTAMP; + DateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); + val = df.format(((Calendar) val).getTime()); + val = "'" + val + "'"; + } break; case NULL: type = HiveParser.TOK_NULL; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java index 0228553..49240b1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java @@ -2,11 +2,18 @@ import java.math.BigDecimal; import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.common.type.HiveChar; +import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.parse.ParseUtils; import org.apache.hadoop.hive.ql.parse.RowResolver; @@ -20,12 +27,15 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseNumeric; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToBinary; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToChar; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDate; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDecimal; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToVarchar; +import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -81,8 +91,9 @@ public RexNodeConverter(RelOptCluster cluster, List inpCtxLst, boolean public RexNode convert(ExprNodeDesc expr) throws SemanticException { if (expr instanceof ExprNodeNullDesc) { - return m_cluster.getRexBuilder().makeNullLiteral(TypeConverter.convert( - expr.getTypeInfo(), m_cluster.getRexBuilder().getTypeFactory()).getSqlTypeName()); + return m_cluster.getRexBuilder().makeNullLiteral( + TypeConverter.convert(expr.getTypeInfo(), m_cluster.getRexBuilder().getTypeFactory()) + .getSqlTypeName()); } if (expr instanceof ExprNodeGenericFuncDesc) { return convert((ExprNodeGenericFuncDesc) expr); @@ -182,8 +193,9 @@ private RexNode handleExplicitCast(ExprNodeGenericFuncDesc func, List c GenericUDF udf = func.getGenericUDF(); if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar) || (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate) - || (udf instanceof GenericUDFToBinary) || (udf instanceof GenericUDFToUnixTimeStamp) - || castExprUsingUDFBridge(udf)) { + || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)) { + // || (udf instanceof GenericUDFToUnixTimeStamp) || (udf instanceof + // GenericUDFTimestamp) || castExprUsingUDFBridge(udf)) { castExpr = m_cluster.getRexBuilder().makeCast( TypeConverter.convert(func.getTypeInfo(), m_cluster.getTypeFactory()), childRexNodeLst.get(0)); @@ -233,7 +245,9 @@ protected RexNode convert(ExprNodeConstantDesc literal) { PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory(); - Object value = literal.getValue(); + ConstantObjectInspector coi = literal.getWritableObjectInspector(); + Object value = ObjectInspectorUtils.copyToStandardJavaObject(literal + .getWritableObjectInspector().getWritableConstantValue(), coi); RexNode optiqLiteral = null; // TODO: Verify if we need to use ConstantObjectInspector to unwrap data @@ -255,6 +269,10 @@ protected RexNode convert(ExprNodeConstantDesc literal) { break; // TODO: is Decimal an exact numeric or approximate numeric? case DECIMAL: + if (value instanceof HiveDecimal) + value = ((HiveDecimal) value).bigDecimalValue(); + if (value instanceof Decimal128) + value = ((Decimal128) value).toBigDecimal(); optiqLiteral = rexBuilder.makeExactLiteral((BigDecimal) value); break; case FLOAT: @@ -263,11 +281,28 @@ protected RexNode convert(ExprNodeConstantDesc literal) { case DOUBLE: optiqLiteral = rexBuilder.makeApproxLiteral(new BigDecimal((Double) value), optiqDataType); break; + case CHAR: + if (value instanceof HiveChar) + value = ((HiveChar) value).getValue(); + optiqLiteral = rexBuilder.makeLiteral((String) value); + break; + case VARCHAR: + if (value instanceof HiveVarchar) + value = ((HiveVarchar) value).getValue(); + optiqLiteral = rexBuilder.makeLiteral((String) value); + break; case STRING: optiqLiteral = rexBuilder.makeLiteral((String) value); break; case DATE: + Calendar cal = new GregorianCalendar(); + cal.setTime((Date) value); + optiqLiteral = rexBuilder.makeDateLiteral(cal); + break; case TIMESTAMP: + optiqLiteral = rexBuilder.makeTimestampLiteral((Calendar) value, + RelDataType.PRECISION_NOT_SPECIFIED); + break; case BINARY: case VOID: case UNKNOWN: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java index ba1c085..755b5ce 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java @@ -22,6 +22,7 @@ import org.eigenbase.relopt.RelOptCluster; import org.eigenbase.reltype.RelDataType; import org.eigenbase.reltype.RelDataTypeFactory; +import org.eigenbase.reltype.RelDataTypeFactoryImpl.JavaType; import org.eigenbase.reltype.RelDataTypeField; import org.eigenbase.rex.RexBuilder; import org.eigenbase.sql.type.SqlTypeName; @@ -51,8 +52,7 @@ }; /*********************** Convert Hive Types To Optiq Types ***********************/ - public static RelDataType getType(RelOptCluster cluster, - List cInfoLst) { + public static RelDataType getType(RelOptCluster cluster, List cInfoLst) { RexBuilder rexBuilder = cluster.getRexBuilder(); RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory(); List fieldTypes = new LinkedList(); @@ -65,8 +65,7 @@ public static RelDataType getType(RelOptCluster cluster, return dtFactory.createStructType(fieldTypes, fieldNames); } - public static RelDataType getType(RelOptCluster cluster, RowResolver rr, - List neededCols) { + public static RelDataType getType(RelOptCluster cluster, RowResolver rr, List neededCols) { RexBuilder rexBuilder = cluster.getRexBuilder(); RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory(); RowSchema rs = rr.getRowSchema(); @@ -105,8 +104,7 @@ public static RelDataType convert(TypeInfo type, RelDataTypeFactory dtFactory) { return convertedType; } - public static RelDataType convert(PrimitiveTypeInfo type, - RelDataTypeFactory dtFactory) { + public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtFactory) { RelDataType convertedType = null; switch (type.getPrimitiveCategory()) { @@ -135,9 +133,7 @@ public static RelDataType convert(PrimitiveTypeInfo type, convertedType = dtFactory.createSqlType(SqlTypeName.DOUBLE); break; case STRING: - //TODO: shall we pass -1 for len to distinguish between STRING & VARCHAR on way out - convertedType = dtFactory.createSqlType(SqlTypeName.VARCHAR, - RelDataType.PRECISION_NOT_SPECIFIED); + convertedType = dtFactory.createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE); break; case DATE: convertedType = dtFactory.createSqlType(SqlTypeName.DATE); @@ -149,8 +145,9 @@ public static RelDataType convert(PrimitiveTypeInfo type, convertedType = dtFactory.createSqlType(SqlTypeName.BINARY); break; case DECIMAL: - DecimalTypeInfo dtInf = (DecimalTypeInfo)type; - convertedType = dtFactory.createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale()); + DecimalTypeInfo dtInf = (DecimalTypeInfo) type; + convertedType = dtFactory + .createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale()); break; case VARCHAR: convertedType = dtFactory.createSqlType(SqlTypeName.VARCHAR, @@ -172,45 +169,39 @@ public static RelDataType convert(PrimitiveTypeInfo type, return convertedType; } - public static RelDataType convert(ListTypeInfo lstType, - RelDataTypeFactory dtFactory) { + public static RelDataType convert(ListTypeInfo lstType, RelDataTypeFactory dtFactory) { RelDataType elemType = convert(lstType.getListElementTypeInfo(), dtFactory); return dtFactory.createArrayType(elemType, -1); } - public static RelDataType convert(MapTypeInfo mapType, - RelDataTypeFactory dtFactory) { + public static RelDataType convert(MapTypeInfo mapType, RelDataTypeFactory dtFactory) { RelDataType keyType = convert(mapType.getMapKeyTypeInfo(), dtFactory); RelDataType valueType = convert(mapType.getMapValueTypeInfo(), dtFactory); return dtFactory.createMapType(keyType, valueType); } - public static RelDataType convert(StructTypeInfo structType, - final RelDataTypeFactory dtFactory) { - List fTypes = Lists.transform( - structType.getAllStructFieldTypeInfos(), + public static RelDataType convert(StructTypeInfo structType, final RelDataTypeFactory dtFactory) { + List fTypes = Lists.transform(structType.getAllStructFieldTypeInfos(), new Function() { @Override public RelDataType apply(TypeInfo tI) { return convert(tI, dtFactory); } }); - return dtFactory.createStructType(fTypes, - structType.getAllStructFieldNames()); + return dtFactory.createStructType(fTypes, structType.getAllStructFieldNames()); } - public static RelDataType convert(UnionTypeInfo unionType, - RelDataTypeFactory dtFactory) { + public static RelDataType convert(UnionTypeInfo unionType, RelDataTypeFactory dtFactory) { // @todo what do we about unions? throw new UnsupportedOperationException(); } public static TypeInfo convert(RelDataType rType) { - if ( rType.isStruct() ) { + if (rType.isStruct()) { return convertStructType(rType); - } else if ( rType.getComponentType() != null ) { + } else if (rType.getComponentType() != null) { return convertListType(rType); - } else if ( rType.getKeyType() != null ) { + } else if (rType.getKeyType() != null) { return convertMapType(rType); } else { return convertPrimtiveType(rType); @@ -218,16 +209,14 @@ public static TypeInfo convert(RelDataType rType) { } public static TypeInfo convertStructType(RelDataType rType) { - List fTypes = Lists.transform( - rType.getFieldList(), + List fTypes = Lists.transform(rType.getFieldList(), new Function() { @Override public TypeInfo apply(RelDataTypeField f) { return convert(f.getType()); } }); - List fNames = Lists.transform( - rType.getFieldList(), + List fNames = Lists.transform(rType.getFieldList(), new Function() { @Override public String apply(RelDataTypeField f) { @@ -247,7 +236,7 @@ public static TypeInfo convertListType(RelDataType rType) { } public static TypeInfo convertPrimtiveType(RelDataType rType) { - switch(rType.getSqlTypeName()) { + switch (rType.getSqlTypeName()) { case BOOLEAN: return TypeInfoFactory.booleanTypeInfo; case TINYINT: @@ -271,14 +260,14 @@ public static TypeInfo convertPrimtiveType(RelDataType rType) { case DECIMAL: return TypeInfoFactory.getDecimalTypeInfo(rType.getPrecision(), rType.getScale()); case VARCHAR: - if (rType.getPrecision() == RelDataType.PRECISION_NOT_SPECIFIED) + if (rType.getPrecision() == Integer.MAX_VALUE) return TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME); else return TypeInfoFactory.getVarcharTypeInfo(rType.getPrecision()); case CHAR: return TypeInfoFactory.getCharTypeInfo(rType.getPrecision()); case OTHER: - default: + default: return TypeInfoFactory.voidTypeInfo; } @@ -290,19 +279,21 @@ public static HiveToken hiveToken(RelDataType optiqType) { switch (optiqType.getSqlTypeName()) { case CHAR: { - ht = new HiveToken(HiveParser.TOK_CHAR, "TOK_CHAR", - String.valueOf(optiqType.getPrecision())); + ht = new HiveToken(HiveParser.TOK_CHAR, "TOK_CHAR", String.valueOf(optiqType.getPrecision())); } break; case VARCHAR: { - ht = new HiveToken(HiveParser.TOK_VARCHAR, "TOK_VARCHAR", - String.valueOf(optiqType.getPrecision())); + if (optiqType.getPrecision() == Integer.MAX_VALUE) + ht = new HiveToken(HiveParser.TOK_STRING, "TOK_STRING", String.valueOf(optiqType + .getPrecision())); + else + ht = new HiveToken(HiveParser.TOK_VARCHAR, "TOK_VARCHAR", String.valueOf(optiqType + .getPrecision())); } break; case DECIMAL: { - ht = new HiveToken(HiveParser.TOK_DECIMAL, "TOK_DECIMAL", - String.valueOf(optiqType.getPrecision()), String.valueOf(optiqType - .getScale())); + ht = new HiveToken(HiveParser.TOK_DECIMAL, "TOK_DECIMAL", String.valueOf(optiqType + .getPrecision()), String.valueOf(optiqType.getScale())); } break; default: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 54af461..5d41d22 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -11847,14 +11847,13 @@ else return (ltd.getReplace() ? WriteEntity.WriteType.INSERT_OVERWRITE : // TODO: Extend QP to indicate LV, Multi Insert, Cubes, Rollups... private boolean canHandleQuery() { boolean runOptiqPlanner = false; - - if (((queryProperties.getJoinCount() > 1) || conf.getBoolVar(ConfVars.HIVE_IN_TEST)) - && !queryProperties.hasClusterBy() - && !queryProperties.hasDistributeBy() - && !queryProperties.hasSortBy() - && !queryProperties.hasPTF() - && !queryProperties.usesScript() - && !queryProperties.hasMultiDestQuery()) { + // Assumption: If top level QB is query then everything below it must also + // be Query + if (qb.getIsQuery() + && ((queryProperties.getJoinCount() > 1) || conf.getBoolVar(ConfVars.HIVE_IN_TEST)) + && !queryProperties.hasClusterBy() && !queryProperties.hasDistributeBy() + && !queryProperties.hasSortBy() && !queryProperties.hasPTF() + && !queryProperties.usesScript() && !queryProperties.hasMultiDestQuery()) { runOptiqPlanner = true; } else { LOG.info("Can not invoke CBO; query contains operators not supported for CBO."); @@ -11944,7 +11943,7 @@ public RelNode apply(RelOptCluster cluster, RelOptSchema relOptSchema, SchemaPlu optiqOptimizedPlan = hepPlanner.findBestExp(); - if (LOG.isDebugEnabled()) { + if (LOG.isDebugEnabled() && !conf.getBoolVar(ConfVars.HIVE_IN_TEST)) { LOG.debug("CBO Planning details:\n"); LOG.debug("Original Plan:\n"); LOG.debug(RelOptUtil.toString(optiqGenPlan, SqlExplainLevel.ALL_ATTRIBUTES)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java index a0aeccf..f76fc10 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java @@ -22,6 +22,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -33,7 +34,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; - +@Description(name = "from_utc_timestamp", + value = "from_utc_timestamp(timestamp, string timezone) - " + + "Assumes given timestamp ist UTC and converts to given timezone (as of Hive 0.8.0)") public class GenericUDFFromUtcTimestamp extends GenericUDF { static final Log LOG = LogFactory.getLog(GenericUDFFromUtcTimestamp.class); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java index ba4fed7..2f854f4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; @@ -39,6 +40,8 @@ * Creates a TimestampWritable object using PrimitiveObjectInspectorConverter * */ +@Description(name = "timestamp", +value = "cast(date as timestamp) - Returns timestamp") @VectorizedExpressions({CastLongToTimestampViaLongToLong.class, CastDoubleToTimestampViaDoubleToLong.class, CastDecimalToTimestamp.class}) public class GenericUDFTimestamp extends GenericUDF { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java index af4da3a..4234346 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java @@ -17,7 +17,11 @@ */ package org.apache.hadoop.hive.ql.udf.generic; +import org.apache.hadoop.hive.ql.exec.Description; +@Description(name = "to_utc_timestamp", + value = "to_utc_timestamp(timestamp, string timezone) - " + + "Assumes given timestamp is in given timezone and converts to UTC (as of Hive 0.8.0)") public class GenericUDFToUtcTimestamp extends GenericUDFFromUtcTimestamp {