diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index c9ee423..329fd5c 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -2006,7 +2006,10 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { HIVE_VECTORIZATION_GROUPBY_FLUSH_PERCENT("hive.vectorized.groupby.flush.percent", (float) 0.1, "Percent of entries in the group by aggregation hash flushed when the memory threshold is exceeded."), - HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true, ""), + HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true, "The property has been extended to control " + + "whether to check, convert, and normalize partition value to conform to its column typen in " + + "all partition operations including but not limited to insert."), + HIVE_HADOOP_CLASSPATH("hive.hadoop.classpath", null, "For Windows OS, we need to pass HIVE_HADOOP_CLASSPATH Java parameter while starting HiveServer2 \n" + "using \"-hiveconf hive.hadoop.classpath=%HIVE_LIB%\"."), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index f49ad0c..dd86acc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -43,6 +43,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -50,7 +51,6 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryProperties; -import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -64,6 +64,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; +import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; @@ -74,6 +75,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import com.google.common.annotations.VisibleForTesting; @@ -986,17 +988,6 @@ public void setUpdateColumnAccessInfo(ColumnAccessInfo updateColumnAccessInfo) { this.updateColumnAccessInfo = updateColumnAccessInfo; } - protected LinkedHashMap extractPartitionSpecs(Tree partspec) - throws SemanticException { - LinkedHashMap partSpec = new LinkedHashMap(); - for (int i = 0; i < partspec.getChildCount(); ++i) { - CommonTree partspec_val = (CommonTree) partspec.getChild(i); - String val = stripQuotes(partspec_val.getChild(1).getText()); - partSpec.put(partspec_val.getChild(0).getText().toLowerCase(), val); - } - return partSpec; - } - /** * Checks if given specification is proper specification for prefix of * partition cols, for table partitioned by ds, hr, min valid ones are @@ -1221,7 +1212,7 @@ protected boolean analyzeStoredAdDirs(ASTNode child) { return storedAsDirs; } - private static boolean getPartExprNodeDesc(ASTNode astNode, + private static boolean getPartExprNodeDesc(ASTNode astNode, String defaultPartitionName, Map astExprNodeMap) throws SemanticException { if (astNode == null) { @@ -1236,14 +1227,16 @@ private static boolean getPartExprNodeDesc(ASTNode astNode, ASTNode childASTNode = (ASTNode)childNode; if (childASTNode.getType() != HiveParser.TOK_PARTVAL) { - result = getPartExprNodeDesc(childASTNode, astExprNodeMap) && result; + result = getPartExprNodeDesc(childASTNode, defaultPartitionName, astExprNodeMap) && result; } else { boolean isDynamicPart = childASTNode.getChildren().size() <= 1; result = !isDynamicPart && result; if (!isDynamicPart) { ASTNode partVal = (ASTNode)childASTNode.getChildren().get(1); - astExprNodeMap.put((ASTNode)childASTNode.getChildren().get(0), - TypeCheckProcFactory.genExprNode(partVal, typeCheckCtx).get(partVal)); + if (!defaultPartitionName.equalsIgnoreCase(unescapeSQLString(partVal.getText()))) { + astExprNodeMap.put((ASTNode)childASTNode.getChildren().get(0), + TypeCheckProcFactory.genExprNode(partVal, typeCheckCtx).get(partVal)); + } } } } @@ -1253,13 +1246,18 @@ private static boolean getPartExprNodeDesc(ASTNode astNode, public static void validatePartSpec(Table tbl, Map partSpec, ASTNode astNode, HiveConf conf, boolean shouldBeFull) throws SemanticException { tbl.validatePartColumnNames(partSpec, shouldBeFull); + validatePartColumnType(tbl, partSpec, astNode, conf); + } + public static void validatePartColumnType(Table tbl, Map partSpec, + ASTNode astNode, HiveConf conf) throws SemanticException { if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_TYPE_CHECK_ON_INSERT)) { return; } Map astExprNodeMap = new HashMap(); - if (!getPartExprNodeDesc(astNode, astExprNodeMap)) { + if (!getPartExprNodeDesc(astNode, + HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME), astExprNodeMap)) { STATIC_LOG.warn("Dynamic partitioning is used; only validating " + astExprNodeMap.size() + " columns"); } @@ -1279,29 +1277,38 @@ public static void validatePartSpec(Table tbl, Map partSpec, astKeyName = stripIdentifierQuotes(astKeyName); } String colType = partCols.get(astKeyName); - ObjectInspector inputOI = astExprNodePair.getValue().getWritableObjectInspector(); + ObjectInspector inputOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo + (astExprNodePair.getValue().getTypeInfo()); TypeInfo expectedType = TypeInfoUtils.getTypeInfoFromTypeString(colType); ObjectInspector outputOI = - TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(expectedType); - Object value = null; - String colSpec = partSpec.get(astKeyName); - try { - value = - ExprNodeEvaluatorFactory.get(astExprNodePair.getValue()). - evaluate(colSpec); - } catch (HiveException e) { - throw new SemanticException(e); - } - Object convertedValue = - ObjectInspectorConverters.getConverter(inputOI, outputOI).convert(value); - if (convertedValue == null) { - throw new SemanticException(ErrorMsg.PARTITION_SPEC_TYPE_MISMATCH, astKeyName, - inputOI.getTypeName(), outputOI.getTypeName()); + TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(expectedType); + // Since partVal is a constant, it is safe to cast ExprNodeDesc to ExprNodeConstantDesc. + // Its value should be in normalized format (e.g. no leading zero in integer, date is in + // format of YYYY-MM-DD etc) + Object value = ((ExprNodeConstantDesc)astExprNodePair.getValue()).getValue(); + Object convertedValue = value; + if (!inputOI.getTypeName().equals(outputOI.getTypeName())) { + convertedValue = ObjectInspectorConverters.getConverter(inputOI, outputOI).convert(value); + if (convertedValue == null) { + throw new SemanticException(ErrorMsg.PARTITION_SPEC_TYPE_MISMATCH, astKeyName, + inputOI.getTypeName(), outputOI.getTypeName()); + } + + if (!convertedValue.toString().equals(value.toString())) { + // value might have been changed because of the normalization in conversion + STATIC_LOG.warn("Partition " + astKeyName + " expects type " + outputOI.getTypeName() + + " but input value is in type " + inputOI.getTypeName() + ". Convert " + + value.toString() + " to " + convertedValue.toString()); + } } - normalizeColSpec(partSpec, astKeyName, colType, colSpec, convertedValue); + if (!convertedValue.toString().equals(partSpec.get(astKeyName))) { + STATIC_LOG.warn("Partition Spec " + astKeyName + "=" + partSpec.get(astKeyName) + + " has been changed to " + astKeyName + "=" + convertedValue.toString()); + } + partSpec.put(astKeyName, convertedValue.toString()); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java index e8066be..a5f0a7f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java @@ -101,25 +101,13 @@ private Table getTable(ASTNode tree) throws SemanticException { return getTable(names[0], names[1], true); } - private Map getPartKeyValuePairsFromAST(ASTNode tree) { + private Map getPartKeyValuePairsFromAST(Table tbl, ASTNode tree, + HiveConf hiveConf) throws SemanticException { ASTNode child = ((ASTNode) tree.getChild(0).getChild(1)); Map partSpec = new HashMap(); - if (null == child) { - // case of analyze table T compute statistics for columns; - return partSpec; - } - String partKey; - String partValue; - for (int i = 0; i < child.getChildCount(); i++) { - partKey = new String(getUnescapedName((ASTNode) child.getChild(i).getChild(0))).toLowerCase(); - if (child.getChild(i).getChildCount() > 1) { - partValue = new String(getUnescapedName((ASTNode) child.getChild(i).getChild(1))); - partValue = partValue.replaceAll("'", ""); - } else { - partValue = null; - } - partSpec.put(partKey, partValue); - } + if (child != null) { + partSpec = DDLSemanticAnalyzer.getValidatedPartSpec(tbl, child, hiveConf, false); + } //otherwise, it is the case of analyze table T compute statistics for columns; return partSpec; } @@ -426,7 +414,7 @@ public void analyze(ASTNode ast, Context origCtx) throws SemanticException { if (isPartitionStats) { isTableLevel = false; - partSpec = getPartKeyValuePairsFromAST(ast); + partSpec = getPartKeyValuePairsFromAST(tbl, ast, conf); handlePartialPartitionSpec(partSpec); } else { isTableLevel = true; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 76a0eee..bc69d23 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -245,7 +245,19 @@ public void analyzeInternal(ASTNode input) throws SemanticException { ast = (ASTNode) input.getChild(1); String[] qualified = getQualifiedTableName((ASTNode) input.getChild(0)); String tableName = getDotName(qualified); - HashMap partSpec = DDLSemanticAnalyzer.getPartSpec((ASTNode) input.getChild(2)); + HashMap partSpec = null; + ASTNode partSpecNode = (ASTNode)input.getChild(2); + if (partSpecNode != null) { + // We can use alter table partition rename to convert/normalize the legacy partition + // column values. In so, we should not enable the validation to the old partition spec + // passed in this command. + if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMEPART) { + partSpec = getPartSpec(partSpecNode); + } else { + partSpec = getValidatedPartSpec(getTable(tableName), partSpecNode, conf, false); + } + } + if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAME) { analyzeAlterTableRename(qualified, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_TOUCH) { @@ -667,7 +679,7 @@ private void analyzeExchangePartition(String[] qualified, ASTNode ast) throws Se Table sourceTable = getTable(getUnescapedName((ASTNode)ast.getChild(1))); // Get the partition specs - Map partSpecs = getPartSpec((ASTNode) ast.getChild(0)); + Map partSpecs = getValidatedPartSpec(sourceTable, (ASTNode)ast.getChild(0), conf, false); validatePartitionValues(partSpecs); boolean sameColumns = MetaStoreUtils.compareFieldColumns( destTable.getAllCols(), sourceTable.getAllCols()); @@ -869,9 +881,11 @@ private void analyzeTruncateTable(ASTNode ast) throws SemanticException { } } else { if (isFullSpec(table, partSpec)) { + validatePartSpec(table, partSpec, (ASTNode) root.getChild(1), conf, true); Partition partition = getPartition(table, partSpec, true); outputs.add(new WriteEntity(partition, WriteEntity.WriteType.DDL_EXCLUSIVE)); } else { + validatePartSpec(table, partSpec, (ASTNode) root.getChild(1), conf, false); for (Partition partition : getPartitions(table, partSpec, false)) { outputs.add(new WriteEntity(partition, WriteEntity.WriteType.DDL_EXCLUSIVE)); } @@ -1156,7 +1170,7 @@ private void analyzeAlterIndexRebuild(ASTNode ast) throws SemanticException { HashMap partSpec = null; Tree part = ast.getChild(2); if (part != null) { - partSpec = extractPartitionSpecs(part); + partSpec = getValidatedPartSpec(getTable(qualified), (ASTNode)part, conf, false); } List> indexBuilder = getIndexBuilderMapRed(qualified, indexName, partSpec); rootTasks.addAll(indexBuilder); @@ -1887,18 +1901,6 @@ static public String getColPath( // or DESCRIBE table partition // check whether it is DESCRIBE table partition if (ast.getChildCount() == 2) { - ASTNode partNode = (ASTNode) ast.getChild(1); - HashMap partSpec = null; - try { - partSpec = getPartSpec(partNode); - } catch (SemanticException e) { - // get exception in resolving partition - // it could be DESCRIBE table key - // return null - // continue processing for DESCRIBE table key - return null; - } - Table tab = null; try { tab = db.getTable(tableName); @@ -1910,6 +1912,18 @@ static public String getColPath( throw new SemanticException(e.getMessage(), e); } + ASTNode partNode = (ASTNode) ast.getChild(1); + HashMap partSpec = null; + try { + partSpec = getValidatedPartSpec(tab, partNode, db.getConf(), false); + } catch (SemanticException e) { + // get exception in resolving partition + // it could be DESCRIBE table key + // return null + // continue processing for DESCRIBE table key + return null; + } + if (partSpec != null) { Partition part = null; try { @@ -2076,10 +2090,19 @@ private void analyzeDescDatabase(ASTNode ast) throws SemanticException { return partSpec; } + public static HashMap getValidatedPartSpec(Table table, ASTNode astNode, + HiveConf conf, boolean shouldBeFull) throws SemanticException { + HashMap partSpec = getPartSpec(astNode); + if (partSpec != null && !partSpec.isEmpty()) { + validatePartSpec(table, partSpec, astNode, conf, shouldBeFull); + } + return partSpec; + } + private void analyzeShowPartitions(ASTNode ast) throws SemanticException { ShowPartitionsDesc showPartsDesc; String tableName = getUnescapedName((ASTNode) ast.getChild(0)); - List> partSpecs = getPartitionSpecs(ast); + List> partSpecs = getPartitionSpecs(getTable(tableName), ast); // We only can have a single partition spec assert (partSpecs.size() <= 1); Map partSpec = null; @@ -2193,7 +2216,7 @@ private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { if (child.getToken().getType() == HiveParser.Identifier) { dbName = unescapeIdentifier(child.getText()); } else if (child.getToken().getType() == HiveParser.TOK_PARTSPEC) { - partSpec = getPartSpec(child); + partSpec = getValidatedPartSpec(getTable(tableNames), child, conf, false); } else { throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg()); } @@ -2294,8 +2317,8 @@ private void analyzeShowLocks(ASTNode ast) throws SemanticException { QualifiedNameUtil.getFullyQualifiedName((ASTNode) tableTypeExpr.getChild(0)); // get partition metadata if partition specified if (tableTypeExpr.getChildCount() == 2) { - ASTNode partspec = (ASTNode) tableTypeExpr.getChild(1); - partSpec = getPartSpec(partspec); + ASTNode partSpecNode = (ASTNode) tableTypeExpr.getChild(1); + partSpec = getValidatedPartSpec(getTable(tableName), partSpecNode, conf, false); } } else if (child.getType() == HiveParser.KW_EXTENDED) { isExtended = true; @@ -2371,7 +2394,7 @@ private void analyzeLockTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)).toLowerCase(); String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); - List> partSpecs = getPartitionSpecs(ast); + List> partSpecs = getPartitionSpecs(getTable(tableName), ast); // We only can have a single partition spec assert (partSpecs.size() <= 1); @@ -2424,7 +2447,7 @@ private void analyzeShowTxns(ASTNode ast) throws SemanticException { private void analyzeUnlockTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)); - List> partSpecs = getPartitionSpecs(ast); + List> partSpecs = getPartitionSpecs(getTable(tableName), ast); // We only can have a single partition spec assert (partSpecs.size() <= 1); @@ -2566,12 +2589,13 @@ private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast, private void analyzeAlterTableRenamePart(ASTNode ast, String tblName, HashMap oldPartSpec) throws SemanticException { - Map newPartSpec = extractPartitionSpecs(ast.getChild(0)); + Table tab = getTable(tblName, true); + validateAlterTableType(tab, AlterTableTypes.RENAMEPARTITION); + Map newPartSpec = + getValidatedPartSpec(tab, (ASTNode)ast.getChild(0), conf, false); if (newPartSpec == null) { throw new SemanticException("RENAME PARTITION Missing Destination" + ast); } - Table tab = getTable(tblName, true); - validateAlterTableType(tab, AlterTableTypes.RENAMEPARTITION); ReadEntity re = new ReadEntity(tab); re.noLockNeeded(); inputs.add(re); @@ -2759,9 +2783,8 @@ private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boole addPartitionDesc.addPartition(currentPart, currentLocation); currentLocation = null; } - currentPart = getPartSpec(child); + currentPart = getValidatedPartSpec(tab, child, conf, true); validatePartitionValues(currentPart); // validate reserved values - validatePartSpec(tab, currentPart, child, conf, true); break; case HiveParser.TOK_PARTITIONLOCATION: // if location specified, set in partition @@ -2826,21 +2849,6 @@ private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boole } } - private Partition getPartitionForOutput(Table tab, Map currentPart) - throws SemanticException { - validatePartitionValues(currentPart); - try { - Partition partition = db.getPartition(tab, currentPart, false); - if (partition != null) { - outputs.add(new WriteEntity(partition, WriteEntity.WriteType.INSERT)); - } - return partition; - } catch (HiveException e) { - LOG.warn("wrong partition spec " + currentPart); - } - return null; - } - /** * Rewrite the metadata for one or more partitions in a table. Useful when * an external process modifies files on HDFS and you want the pre/post @@ -2859,7 +2867,7 @@ private void analyzeAlterTableTouch(String[] qualified, CommonTree ast) inputs.add(new ReadEntity(tab)); // partition name to value - List> partSpecs = getPartitionSpecs(ast); + List> partSpecs = getPartitionSpecs(tab, ast); if (partSpecs.size() == 0) { AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( @@ -2887,10 +2895,10 @@ private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolea throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg()); } + Table tab = getTable(qualified); // partition name to value - List> partSpecs = getPartitionSpecs(ast); + List> partSpecs = getPartitionSpecs(tab, ast); - Table tab = getTable(qualified); addTablePartsOutputs(tab, partSpecs, true, WriteEntity.WriteType.DDL_NO_LOCK); validateAlterTableType(tab, AlterTableTypes.ARCHIVE); inputs.add(new ReadEntity(tab)); @@ -2937,7 +2945,7 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { tableName = getUnescapedName((ASTNode) ast.getChild(1)); } } - List> specs = getPartitionSpecs(ast); + List> specs = getPartitionSpecs(getTable(tableName), ast); MsckDesc checkDesc = new MsckDesc(tableName, specs, ctx.getResFile(), repair); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), @@ -2952,16 +2960,17 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { * @return A list of partition name to value mappings. * @throws SemanticException */ - private List> getPartitionSpecs(CommonTree ast) + private List> getPartitionSpecs(Table tbl, CommonTree ast) throws SemanticException { List> partSpecs = new ArrayList>(); int childIndex = 0; // get partition metadata if partition specified for (childIndex = 0; childIndex < ast.getChildCount(); childIndex++) { - Tree partspec = ast.getChild(childIndex); + ASTNode partSpecNode = (ASTNode)ast.getChild(childIndex); // sanity check - if (partspec.getType() == HiveParser.TOK_PARTSPEC) { - partSpecs.add(getPartSpec((ASTNode) partspec)); + if (partSpecNode.getType() == HiveParser.TOK_PARTSPEC) { + Map partSpec = getValidatedPartSpec(tbl, partSpecNode, conf, false); + partSpecs.add(partSpec); } } return partSpecs; @@ -2993,9 +3002,12 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { for (int i = 0; i < partSpecTree.getChildCount(); ++i) { CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i); assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL); - String key = partSpecSingleKey.getChild(0).getText().toLowerCase(); + String key = stripIdentifierQuotes(partSpecSingleKey.getChild(0).getText()).toLowerCase(); String operator = partSpecSingleKey.getChild(1).getText(); - String val = stripQuotes(partSpecSingleKey.getChild(2).getText()); + ASTNode partValNode = (ASTNode)partSpecSingleKey.getChild(2); + TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null); + ExprNodeConstantDesc valExpr = (ExprNodeConstantDesc)TypeCheckProcFactory + .genExprNode(partValNode, typeCheckCtx).get(partValNode); String type = colTypes.get(key); if (type == null) { @@ -3003,12 +3015,16 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { } // Create the corresponding hive expression to filter on partition columns. PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type); - Converter converter = ObjectInspectorConverters.getConverter( - TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(TypeInfoFactory.stringTypeInfo), - TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti)); + Object val = valExpr.getValue(); + if (!valExpr.getTypeString().equals(type)) { + Converter converter = ObjectInspectorConverters.getConverter( + TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(valExpr.getTypeInfo()), + TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti)); + val = converter.convert(valExpr.getValue()); + } ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true); - ExprNodeGenericFuncDesc op = makeBinaryPredicate( - operator, column, new ExprNodeConstantDesc(pti, converter.convert(val))); + ExprNodeGenericFuncDesc op = makeBinaryPredicate(operator, column, + new ExprNodeConstantDesc(pti, val)); // If it's multi-expr filter (e.g. a='5', b='2012-01-02'), AND with previous exprs. expr = (expr == null) ? op : makeBinaryPredicate("and", expr, op); names.add(key); diff --git a/ql/src/test/queries/clientpositive/alter_partition_coltype.q b/ql/src/test/queries/clientpositive/alter_partition_coltype.q index 8c9945c..c9a898b 100644 --- a/ql/src/test/queries/clientpositive/alter_partition_coltype.q +++ b/ql/src/test/queries/clientpositive/alter_partition_coltype.q @@ -46,6 +46,7 @@ select count(*) from alter_coltype where ts = 3.0; select count(*) from alter_coltype where dt = '100'; desc alter_coltype; +set hive.typecheck.on.insert=false; desc alter_coltype partition (dt='100', ts='6.30'); desc alter_coltype partition (dt='100', ts=3.0); diff --git a/ql/src/test/results/clientnegative/archive_partspec1.q.out b/ql/src/test/results/clientnegative/archive_partspec1.q.out index da4817c..f086c42 100644 --- a/ql/src/test/results/clientnegative/archive_partspec1.q.out +++ b/ql/src/test/results/clientnegative/archive_partspec1.q.out @@ -26,4 +26,4 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12 POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -FAILED: SemanticException [Error 10125]: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr), while the partitions specified in the query are: (ds, nonexistingpart). +FAILED: ValidationFailureSemanticException Partition spec {ds=2008-04-08, nonexistingpart=12} contains non-partition columns diff --git a/ql/src/test/results/clientnegative/archive_partspec5.q.out b/ql/src/test/results/clientnegative/archive_partspec5.q.out index c18de52..46d76e1 100644 --- a/ql/src/test/results/clientnegative/archive_partspec5.q.out +++ b/ql/src/test/results/clientnegative/archive_partspec5.q.out @@ -17,13 +17,13 @@ SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12' PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12/min=00 +PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12/min=0 POSTHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12', min='00') SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12' POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12/min=00 -POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12,min=00).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12,min=00).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12/min=0 +POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12,min=0).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12,min=0).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] FAILED: SemanticException [Error 10125]: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr, min), while the partitions specified in the query are: (ds, min). diff --git a/ql/src/test/results/clientpositive/partition_timestamp.q.out b/ql/src/test/results/clientpositive/partition_timestamp.q.out index bc6ab10..7059006 100644 --- a/ql/src/test/results/clientpositive/partition_timestamp.q.out +++ b/ql/src/test/results/clientpositive/partition_timestamp.q.out @@ -14,79 +14,79 @@ PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000- select * from src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 01:00:00', region= '1') select * from src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 02:00:00', region= '2') select * from src tablesample (5 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 +PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 02:00:00', region= '2') select * from src tablesample (5 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00.0,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00.0,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 01:00:00', region= '2020-20-20') select * from src tablesample (5 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 +PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 01:00:00', region= '2020-20-20') select * from src tablesample (5 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00,region=2020-20-20).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00,region=2020-20-20).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00.0,region=2020-20-20).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00.0,region=2020-20-20).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 02:00:00', region= '1') select * from src tablesample (20 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 02:00:00', region= '1') select * from src tablesample (20 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 03:00:00', region= '10') select * from src tablesample (11 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 +PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 03:00:00', region= '10') select * from src tablesample (11 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00,region=10).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00,region=10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00.0,region=10).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00.0,region=10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select distinct dt from partition_timestamp_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 #### A masked pattern was here #### POSTHOOK: query: select distinct dt from partition_timestamp_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 #### A masked pattern was here #### 2000-01-01 01:00:00 2000-01-01 02:00:00 @@ -105,67 +105,67 @@ PREHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt = timestamp '2000-01-01 01:00:00' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt = timestamp '2000-01-01 01:00:00' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 10 PREHOOK: query: -- 10. Also try with string value in predicate select count(*) from partition_timestamp_1 where dt = '2000-01-01 01:00:00' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 10. Also try with string value in predicate select count(*) from partition_timestamp_1 where dt = '2000-01-01 01:00:00' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 10 PREHOOK: query: -- 5 select count(*) from partition_timestamp_1 where dt = timestamp '2000-01-01 02:00:00' and region = '2' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 #### A masked pattern was here #### POSTHOOK: query: -- 5 select count(*) from partition_timestamp_1 where dt = timestamp '2000-01-01 02:00:00' and region = '2' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 #### A masked pattern was here #### 5 PREHOOK: query: -- 11 select count(*) from partition_timestamp_1 where dt = timestamp '2001-01-01 03:00:00' and region = '10' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 #### A masked pattern was here #### POSTHOOK: query: -- 11 select count(*) from partition_timestamp_1 where dt = timestamp '2001-01-01 03:00:00' and region = '10' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 #### A masked pattern was here #### 11 PREHOOK: query: -- 30 select count(*) from partition_timestamp_1 where region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 30 select count(*) from partition_timestamp_1 where region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### 30 PREHOOK: query: -- 0 @@ -196,7 +196,7 @@ PREHOOK: query: -- Try other comparison operations select count(*) from partition_timestamp_1 where dt > timestamp '2000-01-01 01:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- Try other comparison operations @@ -204,72 +204,72 @@ POSTHOOK: query: -- Try other comparison operations select count(*) from partition_timestamp_1 where dt > timestamp '2000-01-01 01:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### 20 PREHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt < timestamp '2000-01-02 01:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt < timestamp '2000-01-02 01:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 10 PREHOOK: query: -- 20 select count(*) from partition_timestamp_1 where dt >= timestamp '2000-01-02 01:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 20 select count(*) from partition_timestamp_1 where dt >= timestamp '2000-01-02 01:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### 20 PREHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt <= timestamp '2000-01-01 01:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt <= timestamp '2000-01-01 01:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 10 PREHOOK: query: -- 20 select count(*) from partition_timestamp_1 where dt <> timestamp '2000-01-01 01:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 20 select count(*) from partition_timestamp_1 where dt <> timestamp '2000-01-01 01:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### 20 PREHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt between timestamp '1999-12-30 12:00:00' and timestamp '2000-01-03 12:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt between timestamp '1999-12-30 12:00:00' and timestamp '2000-01-03 12:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 10 PREHOOK: query: -- Try a string key with timestamp-like strings @@ -278,7 +278,7 @@ PREHOOK: query: -- Try a string key with timestamp-like strings select count(*) from partition_timestamp_1 where region = '2020-20-20' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 #### A masked pattern was here #### POSTHOOK: query: -- Try a string key with timestamp-like strings @@ -286,20 +286,20 @@ POSTHOOK: query: -- Try a string key with timestamp-like strings select count(*) from partition_timestamp_1 where region = '2020-20-20' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 #### A masked pattern was here #### 5 PREHOOK: query: -- 5 select count(*) from partition_timestamp_1 where region > '2010-01-01' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 #### A masked pattern was here #### POSTHOOK: query: -- 5 select count(*) from partition_timestamp_1 where region > '2010-01-01' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 #### A masked pattern was here #### 5 PREHOOK: query: drop table partition_timestamp_1 diff --git a/ql/src/test/results/clientpositive/partition_timestamp2.q.out b/ql/src/test/results/clientpositive/partition_timestamp2.q.out index 365df69..772df1a 100644 --- a/ql/src/test/results/clientpositive/partition_timestamp2.q.out +++ b/ql/src/test/results/clientpositive/partition_timestamp2.q.out @@ -18,10 +18,10 @@ insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01 insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01 01:00:00', region=2) select * PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 POSTHOOK: query: -- test timestamp literal syntax from (select * from src tablesample (1 rows)) x insert overwrite table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1) select * @@ -30,33 +30,33 @@ insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01 insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01 01:00:00', region=2) select * POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00.0,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00.0,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00.0,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00.0,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select distinct dt from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select distinct dt from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 1999-01-01 00:00:00 1999-01-01 01:00:00 @@ -65,18 +65,18 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio PREHOOK: query: select * from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1999-01-01 00:00:00 2 238 val_238 1999-01-01 01:00:00 2 @@ -87,30 +87,30 @@ insert overwrite table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 select 'changed_key', 'changed_value' from src tablesample (2 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 +PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 POSTHOOK: query: -- insert overwrite insert overwrite table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1) select 'changed_key', 'changed_value' from src tablesample (2 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).key SIMPLE [] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).value SIMPLE [] +POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).key SIMPLE [] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).value SIMPLE [] PREHOOK: query: select * from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1999-01-01 00:00:00 2 238 val_238 1999-01-01 01:00:00 2 @@ -120,26 +120,26 @@ changed_key changed_value 2000-01-01 00:00:00 1 PREHOOK: query: -- truncate truncate table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1) PREHOOK: type: TRUNCATETABLE -PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 +PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 POSTHOOK: query: -- truncate truncate table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1) POSTHOOK: type: TRUNCATETABLE -POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 +POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 PREHOOK: query: select distinct dt from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select distinct dt from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 1999-01-01 00:00:00 1999-01-01 01:00:00 @@ -148,18 +148,18 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio PREHOOK: query: select * from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1999-01-01 00:00:00 2 238 val_238 1999-01-01 01:00:00 2 @@ -172,24 +172,24 @@ POSTHOOK: query: -- alter table add partition alter table partition_timestamp2_1 add partition (dt=timestamp '1980-01-02 00:00:00', region=3) POSTHOOK: type: ALTERTABLE_ADDPARTS POSTHOOK: Output: default@partition_timestamp2_1 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 PREHOOK: query: select distinct dt from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select distinct dt from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 1980-01-02 00:00:00 1999-01-01 00:00:00 @@ -199,20 +199,20 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio PREHOOK: query: select * from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1999-01-01 00:00:00 2 238 val_238 1999-01-01 01:00:00 2 @@ -221,27 +221,27 @@ PREHOOK: query: -- alter table drop alter table partition_timestamp2_1 drop partition (dt=timestamp '1999-01-01 01:00:00', region=2) PREHOOK: type: ALTERTABLE_DROPPARTS PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 +PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 POSTHOOK: query: -- alter table drop alter table partition_timestamp2_1 drop partition (dt=timestamp '1999-01-01 01:00:00', region=2) POSTHOOK: type: ALTERTABLE_DROPPARTS POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 PREHOOK: query: select distinct dt from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select distinct dt from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 1980-01-02 00:00:00 1999-01-01 00:00:00 @@ -250,18 +250,18 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio PREHOOK: query: select * from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1999-01-01 00:00:00 2 238 val_238 2000-01-01 01:00:00 1 @@ -270,27 +270,27 @@ alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' PREHOOK: type: ALTERPARTITION_SERIALIZER PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 POSTHOOK: query: -- alter table set serde alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' POSTHOOK: type: ALTERPARTITION_SERIALIZER POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 PREHOOK: query: -- alter table set fileformat alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) set fileformat rcfile PREHOOK: type: ALTERPARTITION_FILEFORMAT PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 POSTHOOK: query: -- alter table set fileformat alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) set fileformat rcfile POSTHOOK: type: ALTERPARTITION_FILEFORMAT POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 PREHOOK: query: describe extended partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) PREHOOK: type: DESCTABLE PREHOOK: Input: default@partition_timestamp2_1 @@ -313,29 +313,29 @@ PREHOOK: query: insert overwrite table partition_timestamp2_1 partition(dt=times select * from src tablesample (2 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 POSTHOOK: query: insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) select * from src tablesample (2 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00,region=3).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00,region=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00.0,region=3).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00.0,region=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from partition_timestamp2_1 order by key,value,dt,region PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 order by key,value,dt,region POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1980-01-02 00:00:00 3 238 val_238 1999-01-01 00:00:00 2 @@ -346,15 +346,15 @@ alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', #### A masked pattern was here #### PREHOOK: type: ALTERPARTITION_LOCATION PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 #### A masked pattern was here #### POSTHOOK: query: -- alter table set location alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) #### A masked pattern was here #### POSTHOOK: type: ALTERPARTITION_LOCATION POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 #### A masked pattern was here #### PREHOOK: query: describe extended partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) PREHOOK: type: DESCTABLE @@ -378,13 +378,13 @@ PREHOOK: query: -- alter table touch alter table partition_timestamp2_1 touch partition(dt=timestamp '1980-01-02 00:00:00', region=3) PREHOOK: type: ALTERTABLE_TOUCH PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 POSTHOOK: query: -- alter table touch alter table partition_timestamp2_1 touch partition(dt=timestamp '1980-01-02 00:00:00', region=3) POSTHOOK: type: ALTERTABLE_TOUCH POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 PREHOOK: query: drop table partition_timestamp2_1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@partition_timestamp2_1