diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index e138800..3935589 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1945,7 +1945,10 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { HIVE_VECTORIZATION_GROUPBY_FLUSH_PERCENT("hive.vectorized.groupby.flush.percent", (float) 0.1, "Percent of entries in the group by aggregation hash flushed when the memory threshold is exceeded."), - HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true, ""), + HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true, "The property has been extended to control " + + "whether to check, convert, and normalize partition value to conform to its column typen in " + + "all partition operations including but not limited to insert."), + HIVE_HADOOP_CLASSPATH("hive.hadoop.classpath", null, "For Windows OS, we need to pass HIVE_HADOOP_CLASSPATH Java parameter while starting HiveServer2 \n" + "using \"-hiveconf hive.hadoop.classpath=%HIVE_LIB%\"."), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 19234b5..8c90cb0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -42,6 +42,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -49,7 +50,6 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryProperties; -import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -63,6 +63,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; +import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; @@ -73,6 +74,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import com.google.common.annotations.VisibleForTesting; @@ -973,17 +975,6 @@ public void setUpdateColumnAccessInfo(ColumnAccessInfo updateColumnAccessInfo) { this.updateColumnAccessInfo = updateColumnAccessInfo; } - protected LinkedHashMap extractPartitionSpecs(Tree partspec) - throws SemanticException { - LinkedHashMap partSpec = new LinkedHashMap(); - for (int i = 0; i < partspec.getChildCount(); ++i) { - CommonTree partspec_val = (CommonTree) partspec.getChild(i); - String val = stripQuotes(partspec_val.getChild(1).getText()); - partSpec.put(partspec_val.getChild(0).getText().toLowerCase(), val); - } - return partSpec; - } - /** * Checks if given specification is proper specification for prefix of * partition cols, for table partitioned by ds, hr, min valid ones are @@ -1208,7 +1199,7 @@ protected boolean analyzeStoredAdDirs(ASTNode child) { return storedAsDirs; } - private static boolean getPartExprNodeDesc(ASTNode astNode, + private static boolean getPartExprNodeDesc(ASTNode astNode, String defaultPartitionName, Map astExprNodeMap) throws SemanticException { if (astNode == null) { @@ -1223,14 +1214,16 @@ private static boolean getPartExprNodeDesc(ASTNode astNode, ASTNode childASTNode = (ASTNode)childNode; if (childASTNode.getType() != HiveParser.TOK_PARTVAL) { - result = getPartExprNodeDesc(childASTNode, astExprNodeMap) && result; + result = getPartExprNodeDesc(childASTNode, defaultPartitionName, astExprNodeMap) && result; } else { boolean isDynamicPart = childASTNode.getChildren().size() <= 1; result = !isDynamicPart && result; if (!isDynamicPart) { ASTNode partVal = (ASTNode)childASTNode.getChildren().get(1); - astExprNodeMap.put((ASTNode)childASTNode.getChildren().get(0), - TypeCheckProcFactory.genExprNode(partVal, typeCheckCtx).get(partVal)); + if (!defaultPartitionName.equalsIgnoreCase(unescapeSQLString(partVal.getText()))) { + astExprNodeMap.put((ASTNode)childASTNode.getChildren().get(0), + TypeCheckProcFactory.genExprNode(partVal, typeCheckCtx).get(partVal)); + } } } } @@ -1240,13 +1233,18 @@ private static boolean getPartExprNodeDesc(ASTNode astNode, public static void validatePartSpec(Table tbl, Map partSpec, ASTNode astNode, HiveConf conf, boolean shouldBeFull) throws SemanticException { tbl.validatePartColumnNames(partSpec, shouldBeFull); + validatePartColumnType(tbl, partSpec, astNode, conf); + } + public static void validatePartColumnType(Table tbl, Map partSpec, + ASTNode astNode, HiveConf conf) throws SemanticException { if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_TYPE_CHECK_ON_INSERT)) { return; } Map astExprNodeMap = new HashMap(); - if (!getPartExprNodeDesc(astNode, astExprNodeMap)) { + if (!getPartExprNodeDesc(astNode, + HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME), astExprNodeMap)) { STATIC_LOG.warn("Dynamic partitioning is used; only validating " + astExprNodeMap.size() + " columns"); } @@ -1266,29 +1264,38 @@ public static void validatePartSpec(Table tbl, Map partSpec, astKeyName = stripIdentifierQuotes(astKeyName); } String colType = partCols.get(astKeyName); - ObjectInspector inputOI = astExprNodePair.getValue().getWritableObjectInspector(); + ObjectInspector inputOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo + (astExprNodePair.getValue().getTypeInfo()); TypeInfo expectedType = TypeInfoUtils.getTypeInfoFromTypeString(colType); ObjectInspector outputOI = - TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(expectedType); - Object value = null; - String colSpec = partSpec.get(astKeyName); - try { - value = - ExprNodeEvaluatorFactory.get(astExprNodePair.getValue()). - evaluate(colSpec); - } catch (HiveException e) { - throw new SemanticException(e); - } - Object convertedValue = - ObjectInspectorConverters.getConverter(inputOI, outputOI).convert(value); - if (convertedValue == null) { - throw new SemanticException(ErrorMsg.PARTITION_SPEC_TYPE_MISMATCH, astKeyName, - inputOI.getTypeName(), outputOI.getTypeName()); + TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(expectedType); + // Since partVal is a constant, it is safe to cast ExprNodeDesc to ExprNodeConstantDesc. + // Its value should be in normalized format (e.g. no leading zero in integer, date is in + // format of YYYY-MM-DD etc) + Object value = ((ExprNodeConstantDesc)astExprNodePair.getValue()).getValue(); + Object convertedValue = value; + if (!inputOI.getTypeName().equals(outputOI.getTypeName())) { + convertedValue = ObjectInspectorConverters.getConverter(inputOI, outputOI).convert(value); + if (convertedValue == null) { + throw new SemanticException(ErrorMsg.PARTITION_SPEC_TYPE_MISMATCH, astKeyName, + inputOI.getTypeName(), outputOI.getTypeName()); + } + + if (!convertedValue.toString().equals(value.toString())) { + // value might have been changed because of the normalization in conversion + STATIC_LOG.warn("Partition " + astKeyName + " expects type " + outputOI.getTypeName() + + " but input value is in type " + inputOI.getTypeName() + ". Convert " + + value.toString() + " to " + convertedValue.toString()); + } } - normalizeColSpec(partSpec, astKeyName, colType, colSpec, convertedValue); + if (!convertedValue.toString().equals(partSpec.get(astKeyName))) { + STATIC_LOG.warn("Partition Spec " + astKeyName + "=" + partSpec.get(astKeyName) + + " has been changed to " + astKeyName + "=" + convertedValue.toString()); + } + partSpec.put(astKeyName, convertedValue.toString()); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java index e8066be..a5f0a7f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java @@ -101,25 +101,13 @@ private Table getTable(ASTNode tree) throws SemanticException { return getTable(names[0], names[1], true); } - private Map getPartKeyValuePairsFromAST(ASTNode tree) { + private Map getPartKeyValuePairsFromAST(Table tbl, ASTNode tree, + HiveConf hiveConf) throws SemanticException { ASTNode child = ((ASTNode) tree.getChild(0).getChild(1)); Map partSpec = new HashMap(); - if (null == child) { - // case of analyze table T compute statistics for columns; - return partSpec; - } - String partKey; - String partValue; - for (int i = 0; i < child.getChildCount(); i++) { - partKey = new String(getUnescapedName((ASTNode) child.getChild(i).getChild(0))).toLowerCase(); - if (child.getChild(i).getChildCount() > 1) { - partValue = new String(getUnescapedName((ASTNode) child.getChild(i).getChild(1))); - partValue = partValue.replaceAll("'", ""); - } else { - partValue = null; - } - partSpec.put(partKey, partValue); - } + if (child != null) { + partSpec = DDLSemanticAnalyzer.getValidatedPartSpec(tbl, child, hiveConf, false); + } //otherwise, it is the case of analyze table T compute statistics for columns; return partSpec; } @@ -426,7 +414,7 @@ public void analyze(ASTNode ast, Context origCtx) throws SemanticException { if (isPartitionStats) { isTableLevel = false; - partSpec = getPartKeyValuePairsFromAST(ast); + partSpec = getPartKeyValuePairsFromAST(tbl, ast, conf); handlePartialPartitionSpec(partSpec); } else { isTableLevel = true; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 8302067..a16e62c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -245,7 +245,19 @@ public void analyzeInternal(ASTNode input) throws SemanticException { ast = (ASTNode) input.getChild(1); String[] qualified = getQualifiedTableName((ASTNode) input.getChild(0)); String tableName = getDotName(qualified); - HashMap partSpec = DDLSemanticAnalyzer.getPartSpec((ASTNode) input.getChild(2)); + HashMap partSpec = null; + ASTNode partSpecNode = (ASTNode)input.getChild(2); + if (partSpecNode != null) { + // We can use alter table partition rename to convert/normalize the legacy partition + // column values. In so, we should not enable the validation to the old partition spec + // passed in this command. + if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMEPART) { + partSpec = getPartSpec(partSpecNode); + } else { + partSpec = getValidatedPartSpec(getTable(tableName), partSpecNode, conf, false); + } + } + if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAME) { analyzeAlterTableRename(qualified, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_TOUCH) { @@ -667,7 +679,7 @@ private void analyzeExchangePartition(String[] qualified, ASTNode ast) throws Se Table sourceTable = getTable(getUnescapedName((ASTNode)ast.getChild(1))); // Get the partition specs - Map partSpecs = getPartSpec((ASTNode) ast.getChild(0)); + Map partSpecs = getValidatedPartSpec(sourceTable, (ASTNode)ast.getChild(0), conf, false); validatePartitionValues(partSpecs); boolean sameColumns = MetaStoreUtils.compareFieldColumns( destTable.getAllCols(), sourceTable.getAllCols()); @@ -866,9 +878,11 @@ private void analyzeTruncateTable(ASTNode ast) throws SemanticException { } } else { if (isFullSpec(table, partSpec)) { + validatePartSpec(table, partSpec, (ASTNode) root.getChild(1), conf, true); Partition partition = getPartition(table, partSpec, true); outputs.add(new WriteEntity(partition, WriteEntity.WriteType.DDL_EXCLUSIVE)); } else { + validatePartSpec(table, partSpec, (ASTNode) root.getChild(1), conf, false); for (Partition partition : getPartitions(table, partSpec, false)) { outputs.add(new WriteEntity(partition, WriteEntity.WriteType.DDL_EXCLUSIVE)); } @@ -1153,7 +1167,7 @@ private void analyzeAlterIndexRebuild(ASTNode ast) throws SemanticException { HashMap partSpec = null; Tree part = ast.getChild(2); if (part != null) { - partSpec = extractPartitionSpecs(part); + partSpec = getValidatedPartSpec(getTable(qualified), (ASTNode)part, conf, false); } List> indexBuilder = getIndexBuilderMapRed(qualified, indexName, partSpec); rootTasks.addAll(indexBuilder); @@ -1884,18 +1898,6 @@ static public String getColPath( // or DESCRIBE table partition // check whether it is DESCRIBE table partition if (ast.getChildCount() == 2) { - ASTNode partNode = (ASTNode) ast.getChild(1); - HashMap partSpec = null; - try { - partSpec = getPartSpec(partNode); - } catch (SemanticException e) { - // get exception in resolving partition - // it could be DESCRIBE table key - // return null - // continue processing for DESCRIBE table key - return null; - } - Table tab = null; try { tab = db.getTable(tableName); @@ -1907,6 +1909,18 @@ static public String getColPath( throw new SemanticException(e.getMessage(), e); } + ASTNode partNode = (ASTNode) ast.getChild(1); + HashMap partSpec = null; + try { + partSpec = getValidatedPartSpec(tab, partNode, db.getConf(), false); + } catch (SemanticException e) { + // get exception in resolving partition + // it could be DESCRIBE table key + // return null + // continue processing for DESCRIBE table key + return null; + } + if (partSpec != null) { Partition part = null; try { @@ -2073,10 +2087,19 @@ private void analyzeDescDatabase(ASTNode ast) throws SemanticException { return partSpec; } + public static HashMap getValidatedPartSpec(Table table, ASTNode astNode, + HiveConf conf, boolean shouldBeFull) throws SemanticException { + HashMap partSpec = getPartSpec(astNode); + if (partSpec != null && !partSpec.isEmpty()) { + validatePartSpec(table, partSpec, astNode, conf, shouldBeFull); + } + return partSpec; + } + private void analyzeShowPartitions(ASTNode ast) throws SemanticException { ShowPartitionsDesc showPartsDesc; String tableName = getUnescapedName((ASTNode) ast.getChild(0)); - List> partSpecs = getPartitionSpecs(ast); + List> partSpecs = getPartitionSpecs(getTable(tableName), ast); // We only can have a single partition spec assert (partSpecs.size() <= 1); Map partSpec = null; @@ -2190,7 +2213,7 @@ private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { if (child.getToken().getType() == HiveParser.Identifier) { dbName = unescapeIdentifier(child.getText()); } else if (child.getToken().getType() == HiveParser.TOK_PARTSPEC) { - partSpec = getPartSpec(child); + partSpec = getValidatedPartSpec(getTable(tableNames), child, conf, false); } else { throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg()); } @@ -2291,8 +2314,8 @@ private void analyzeShowLocks(ASTNode ast) throws SemanticException { QualifiedNameUtil.getFullyQualifiedName((ASTNode) tableTypeExpr.getChild(0)); // get partition metadata if partition specified if (tableTypeExpr.getChildCount() == 2) { - ASTNode partspec = (ASTNode) tableTypeExpr.getChild(1); - partSpec = getPartSpec(partspec); + ASTNode partSpecNode = (ASTNode) tableTypeExpr.getChild(1); + partSpec = getValidatedPartSpec(getTable(tableName), partSpecNode, conf, false); } } else if (child.getType() == HiveParser.KW_EXTENDED) { isExtended = true; @@ -2368,7 +2391,7 @@ private void analyzeLockTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)).toLowerCase(); String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); - List> partSpecs = getPartitionSpecs(ast); + List> partSpecs = getPartitionSpecs(getTable(tableName), ast); // We only can have a single partition spec assert (partSpecs.size() <= 1); @@ -2421,7 +2444,7 @@ private void analyzeShowTxns(ASTNode ast) throws SemanticException { private void analyzeUnlockTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)); - List> partSpecs = getPartitionSpecs(ast); + List> partSpecs = getPartitionSpecs(getTable(tableName), ast); // We only can have a single partition spec assert (partSpecs.size() <= 1); @@ -2563,12 +2586,13 @@ private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast, private void analyzeAlterTableRenamePart(ASTNode ast, String tblName, HashMap oldPartSpec) throws SemanticException { - Map newPartSpec = extractPartitionSpecs(ast.getChild(0)); + Table tab = getTable(tblName, true); + validateAlterTableType(tab, AlterTableTypes.RENAMEPARTITION); + Map newPartSpec = + getValidatedPartSpec(tab, (ASTNode)ast.getChild(0), conf, false); if (newPartSpec == null) { throw new SemanticException("RENAME PARTITION Missing Destination" + ast); } - Table tab = getTable(tblName, true); - validateAlterTableType(tab, AlterTableTypes.RENAMEPARTITION); ReadEntity re = new ReadEntity(tab); re.noLockNeeded(); inputs.add(re); @@ -2734,9 +2758,8 @@ private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boole addPartitionDesc.addPartition(currentPart, currentLocation); currentLocation = null; } - currentPart = getPartSpec(child); + currentPart = getValidatedPartSpec(tab, child, conf, true); validatePartitionValues(currentPart); // validate reserved values - validatePartSpec(tab, currentPart, child, conf, true); break; case HiveParser.TOK_PARTITIONLOCATION: // if location specified, set in partition @@ -2801,21 +2824,6 @@ private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boole } } - private Partition getPartitionForOutput(Table tab, Map currentPart) - throws SemanticException { - validatePartitionValues(currentPart); - try { - Partition partition = db.getPartition(tab, currentPart, false); - if (partition != null) { - outputs.add(new WriteEntity(partition, WriteEntity.WriteType.INSERT)); - } - return partition; - } catch (HiveException e) { - LOG.warn("wrong partition spec " + currentPart); - } - return null; - } - /** * Rewrite the metadata for one or more partitions in a table. Useful when * an external process modifies files on HDFS and you want the pre/post @@ -2834,7 +2842,7 @@ private void analyzeAlterTableTouch(String[] qualified, CommonTree ast) inputs.add(new ReadEntity(tab)); // partition name to value - List> partSpecs = getPartitionSpecs(ast); + List> partSpecs = getPartitionSpecs(tab, ast); if (partSpecs.size() == 0) { AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( @@ -2862,10 +2870,10 @@ private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolea throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg()); } + Table tab = getTable(qualified); // partition name to value - List> partSpecs = getPartitionSpecs(ast); + List> partSpecs = getPartitionSpecs(tab, ast); - Table tab = getTable(qualified); addTablePartsOutputs(tab, partSpecs, true, WriteEntity.WriteType.DDL_NO_LOCK); validateAlterTableType(tab, AlterTableTypes.ARCHIVE); inputs.add(new ReadEntity(tab)); @@ -2912,7 +2920,7 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { tableName = getUnescapedName((ASTNode) ast.getChild(1)); } } - List> specs = getPartitionSpecs(ast); + List> specs = getPartitionSpecs(getTable(tableName), ast); MsckDesc checkDesc = new MsckDesc(tableName, specs, ctx.getResFile(), repair); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), @@ -2927,16 +2935,17 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { * @return A list of partition name to value mappings. * @throws SemanticException */ - private List> getPartitionSpecs(CommonTree ast) + private List> getPartitionSpecs(Table tbl, CommonTree ast) throws SemanticException { List> partSpecs = new ArrayList>(); int childIndex = 0; // get partition metadata if partition specified for (childIndex = 0; childIndex < ast.getChildCount(); childIndex++) { - Tree partspec = ast.getChild(childIndex); + ASTNode partSpecNode = (ASTNode)ast.getChild(childIndex); // sanity check - if (partspec.getType() == HiveParser.TOK_PARTSPEC) { - partSpecs.add(getPartSpec((ASTNode) partspec)); + if (partSpecNode.getType() == HiveParser.TOK_PARTSPEC) { + Map partSpec = getValidatedPartSpec(tbl, partSpecNode, conf, false); + partSpecs.add(partSpec); } } return partSpecs; @@ -2968,9 +2977,12 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { for (int i = 0; i < partSpecTree.getChildCount(); ++i) { CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i); assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL); - String key = partSpecSingleKey.getChild(0).getText().toLowerCase(); + String key = stripIdentifierQuotes(partSpecSingleKey.getChild(0).getText()).toLowerCase(); String operator = partSpecSingleKey.getChild(1).getText(); - String val = stripQuotes(partSpecSingleKey.getChild(2).getText()); + ASTNode partValNode = (ASTNode)partSpecSingleKey.getChild(2); + TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null); + ExprNodeConstantDesc valExpr = (ExprNodeConstantDesc)TypeCheckProcFactory + .genExprNode(partValNode, typeCheckCtx).get(partValNode); String type = colTypes.get(key); if (type == null) { @@ -2978,12 +2990,16 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { } // Create the corresponding hive expression to filter on partition columns. PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type); - Converter converter = ObjectInspectorConverters.getConverter( - TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(TypeInfoFactory.stringTypeInfo), - TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti)); + Object val = valExpr.getValue(); + if (!valExpr.getTypeString().equals(type)) { + Converter converter = ObjectInspectorConverters.getConverter( + TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(valExpr.getTypeInfo()), + TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti)); + val = converter.convert(valExpr.getValue()); + } ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true); - ExprNodeGenericFuncDesc op = makeBinaryPredicate( - operator, column, new ExprNodeConstantDesc(pti, converter.convert(val))); + ExprNodeGenericFuncDesc op = makeBinaryPredicate(operator, column, + new ExprNodeConstantDesc(pti, val)); // If it's multi-expr filter (e.g. a='5', b='2012-01-02'), AND with previous exprs. expr = (expr == null) ? op : makeBinaryPredicate("and", expr, op); names.add(key); diff --git a/ql/src/test/queries/clientpositive/alter_partition_coltype.q b/ql/src/test/queries/clientpositive/alter_partition_coltype.q index 8c9945c..c9a898b 100644 --- a/ql/src/test/queries/clientpositive/alter_partition_coltype.q +++ b/ql/src/test/queries/clientpositive/alter_partition_coltype.q @@ -46,6 +46,7 @@ select count(*) from alter_coltype where ts = 3.0; select count(*) from alter_coltype where dt = '100'; desc alter_coltype; +set hive.typecheck.on.insert=false; desc alter_coltype partition (dt='100', ts='6.30'); desc alter_coltype partition (dt='100', ts=3.0); diff --git a/ql/src/test/queries/clientpositive/partition_coltype_literals.q b/ql/src/test/queries/clientpositive/partition_coltype_literals.q new file mode 100644 index 0000000..bfd208d --- /dev/null +++ b/ql/src/test/queries/clientpositive/partition_coltype_literals.q @@ -0,0 +1,75 @@ +drop table if exists partcoltypenum; +create table partcoltypenum (key int, value string) partitioned by (tint tinyint, sint smallint, bint bigint); + +-- add partition +alter table partcoltypenum add partition(tint=100Y, sint=20000S, bint=300000000000L); + +-- describe partition +describe formatted partcoltypenum partition (tint=100, sint=20000S, bint='300000000000'); + +-- change partition file format +alter table partcoltypenum partition(tint=100, sint=20000S, bint='300000000000') set fileformat rcfile; +describe formatted partcoltypenum partition (tint=100Y, sint=20000S, bint=300000000000L); + +-- change partition clusterby, sortby and bucket +alter table partcoltypenum partition(tint='100', sint=20000, bint=300000000000L) clustered by (key) sorted by (key desc) into 4 buckets; +describe formatted partcoltypenum partition (tint=100Y, sint=20000S, bint=300000000000L); + +-- rename partition +alter table partcoltypenum partition(tint=100, sint=20000, bint=300000000000) rename to partition (tint=110Y, sint=22000S, bint=330000000000L); +describe formatted partcoltypenum partition (tint=110Y, sint=22000, bint='330000000000'); + +-- insert partition +insert into partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) select key, value from src limit 10; +insert into partcoltypenum partition (tint=110, sint=22000, bint=330000000000) select key, value from src limit 20; + +-- select partition +select count(1) from partcoltypenum where tint=110Y and sint=22000S and bint=330000000000L; +select count(1) from partcoltypenum where tint=110Y and sint=22000 and bint='330000000000'; + +-- analyze partition statistics and columns statistics +analyze table partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) compute statistics; +describe extended partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L); + +analyze table partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) compute statistics for columns; +describe formatted partcoltypenum.key partition (tint=110Y, sint=22000S, bint=330000000000L); +describe formatted partcoltypenum.value partition (tint=110Y, sint=22000S, bint=330000000000L); + +-- change table column type for partition +alter table partcoltypenum change key key decimal(10,0); +alter table partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) change key key decimal(10,0); +describe formatted partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L); + +-- change partititon column type +alter table partcoltypenum partition column (tint decimal(3,0)); +describe formatted partcoltypenum partition (tint=110BD, sint=22000S, bint=330000000000L); + +-- show partition +show partitions partcoltypenum partition (tint=110BD, sint=22000S, bint=330000000000L); + +-- drop partition +alter table partcoltypenum drop partition (tint=110BD, sint=22000S, bint=330000000000L); +show partitions partcoltypenum; + +-- change partition file location +insert into partcoltypenum partition (tint=100BD, sint=20000S, bint=300000000000L) select key, value from src limit 10; +describe formatted partcoltypenum partition (tint=100BD, sint=20000S, bint=300000000000L); +alter table partcoltypenum partition(tint=100BD, sint=20000S, bint=300000000000L) set location "file:/test/test/tint=1/sint=2/bint=3"; +describe formatted partcoltypenum partition (tint=100BD, sint=20000S, bint=300000000000L); + +drop table partcoltypenum; + +drop table if exists partcoltypeothers; +create table partcoltypeothers (key int, value string) partitioned by (decpart decimal(6,2), datepart date); + +set hive.typecheck.on.insert=false; +insert into partcoltypeothers partition (decpart = 1000.01BD, datepart = date '2015-4-13') select key, value from src limit 10; +show partitions partcoltypeothers; + +set hive.typecheck.on.insert=true; +alter table partcoltypeothers partition(decpart = '1000.01BD', datepart = date '2015-4-13') rename to partition (decpart = 1000.01BD, datepart = date '2015-4-13'); +show partitions partcoltypeothers; + +drop table partcoltypeothers; + + diff --git a/ql/src/test/results/clientnegative/archive_partspec1.q.out b/ql/src/test/results/clientnegative/archive_partspec1.q.out index da4817c..3dd0e1f 100644 --- a/ql/src/test/results/clientnegative/archive_partspec1.q.out +++ b/ql/src/test/results/clientnegative/archive_partspec1.q.out @@ -26,4 +26,4 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12 POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -FAILED: SemanticException [Error 10125]: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr), while the partitions specified in the query are: (ds, nonexistingpart). +FAILED: SemanticException Partition spec {ds=2008-04-08, nonexistingpart=12} contains non-partition columns diff --git a/ql/src/test/results/clientnegative/archive_partspec5.q.out b/ql/src/test/results/clientnegative/archive_partspec5.q.out index c18de52..46d76e1 100644 --- a/ql/src/test/results/clientnegative/archive_partspec5.q.out +++ b/ql/src/test/results/clientnegative/archive_partspec5.q.out @@ -17,13 +17,13 @@ SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12' PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12/min=00 +PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12/min=0 POSTHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12', min='00') SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12' POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12/min=00 -POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12,min=00).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12,min=00).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12/min=0 +POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12,min=0).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12,min=0).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] FAILED: SemanticException [Error 10125]: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr, min), while the partitions specified in the query are: (ds, min). diff --git a/ql/src/test/results/clientpositive/partition_coltype_literals.q.out b/ql/src/test/results/clientpositive/partition_coltype_literals.q.out new file mode 100644 index 0000000..9e594c9 --- /dev/null +++ b/ql/src/test/results/clientpositive/partition_coltype_literals.q.out @@ -0,0 +1,647 @@ +PREHOOK: query: drop table if exists partcoltypenum +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists partcoltypenum +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table partcoltypenum (key int, value string) partitioned by (tint tinyint, sint smallint, bint bigint) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@partcoltypenum +POSTHOOK: query: create table partcoltypenum (key int, value string) partitioned by (tint tinyint, sint smallint, bint bigint) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@partcoltypenum +PREHOOK: query: -- add partition +alter table partcoltypenum add partition(tint=100Y, sint=20000S, bint=300000000000L) +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Output: default@partcoltypenum +POSTHOOK: query: -- add partition +alter table partcoltypenum add partition(tint=100Y, sint=20000S, bint=300000000000L) +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Output: default@partcoltypenum +POSTHOOK: Output: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +PREHOOK: query: -- describe partition +describe formatted partcoltypenum partition (tint=100, sint=20000S, bint='300000000000') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: -- describe partition +describe formatted partcoltypenum partition (tint=100, sint=20000S, bint='300000000000') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@partcoltypenum +# col_name data_type comment + +key int +value string + +# Partition Information +# col_name data_type comment + +tint tinyint +sint smallint +bint bigint + +# Detailed Partition Information +Partition Value: [100, 20000, 300000000000] +Database: default +Table: partcoltypenum +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: -- change partition file format +alter table partcoltypenum partition(tint=100, sint=20000S, bint='300000000000') set fileformat rcfile +PREHOOK: type: ALTERPARTITION_FILEFORMAT +PREHOOK: Input: default@partcoltypenum +PREHOOK: Output: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +POSTHOOK: query: -- change partition file format +alter table partcoltypenum partition(tint=100, sint=20000S, bint='300000000000') set fileformat rcfile +POSTHOOK: type: ALTERPARTITION_FILEFORMAT +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Input: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +POSTHOOK: Output: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +PREHOOK: query: describe formatted partcoltypenum partition (tint=100Y, sint=20000S, bint=300000000000L) +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: describe formatted partcoltypenum partition (tint=100Y, sint=20000S, bint=300000000000L) +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@partcoltypenum +# col_name data_type comment + +key int +value string + +# Partition Information +# col_name data_type comment + +tint tinyint +sint smallint +bint bigint + +# Detailed Partition Information +Partition Value: [100, 20000, 300000000000] +Database: default +Table: partcoltypenum +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE false +#### A masked pattern was here #### + numFiles 0 + numRows -1 + rawDataSize -1 + totalSize 0 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: -- change partition clusterby, sortby and bucket +alter table partcoltypenum partition(tint='100', sint=20000, bint=300000000000L) clustered by (key) sorted by (key desc) into 4 buckets +PREHOOK: type: ALTERTABLE_CLUSTER_SORT +PREHOOK: Input: default@partcoltypenum +PREHOOK: Output: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +POSTHOOK: query: -- change partition clusterby, sortby and bucket +alter table partcoltypenum partition(tint='100', sint=20000, bint=300000000000L) clustered by (key) sorted by (key desc) into 4 buckets +POSTHOOK: type: ALTERTABLE_CLUSTER_SORT +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Input: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +POSTHOOK: Output: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +PREHOOK: query: describe formatted partcoltypenum partition (tint=100Y, sint=20000S, bint=300000000000L) +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: describe formatted partcoltypenum partition (tint=100Y, sint=20000S, bint=300000000000L) +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@partcoltypenum +# col_name data_type comment + +key int +value string + +# Partition Information +# col_name data_type comment + +tint tinyint +sint smallint +bint bigint + +# Detailed Partition Information +Partition Value: [100, 20000, 300000000000] +Database: default +Table: partcoltypenum +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE false +#### A masked pattern was here #### + numFiles 0 + numRows -1 + rawDataSize -1 + totalSize 0 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat +Compressed: No +Num Buckets: 4 +Bucket Columns: [key] +Sort Columns: [Order(col:key, order:0)] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: -- rename partition +alter table partcoltypenum partition(tint=100, sint=20000, bint=300000000000) rename to partition (tint=110Y, sint=22000S, bint=330000000000L) +PREHOOK: type: ALTERTABLE_RENAMEPART +PREHOOK: Input: default@partcoltypenum +PREHOOK: Output: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +POSTHOOK: query: -- rename partition +alter table partcoltypenum partition(tint=100, sint=20000, bint=300000000000) rename to partition (tint=110Y, sint=22000S, bint=330000000000L) +POSTHOOK: type: ALTERTABLE_RENAMEPART +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Input: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +POSTHOOK: Output: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +POSTHOOK: Output: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +PREHOOK: query: describe formatted partcoltypenum partition (tint=110Y, sint=22000, bint='330000000000') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: describe formatted partcoltypenum partition (tint=110Y, sint=22000, bint='330000000000') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@partcoltypenum +# col_name data_type comment + +key int +value string + +# Partition Information +# col_name data_type comment + +tint tinyint +sint smallint +bint bigint + +# Detailed Partition Information +Partition Value: [110, 22000, 330000000000] +Database: default +Table: partcoltypenum +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE false +#### A masked pattern was here #### + numFiles 0 + numRows -1 + rawDataSize -1 + totalSize 0 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat +Compressed: No +Num Buckets: 4 +Bucket Columns: [key] +Sort Columns: [Order(col:key, order:0)] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: -- insert partition +insert into partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) select key, value from src limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +POSTHOOK: query: -- insert partition +insert into partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) select key, value from src limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +POSTHOOK: Lineage: partcoltypenum PARTITION(tint=110,sint=22000,bint=330000000000).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partcoltypenum PARTITION(tint=110,sint=22000,bint=330000000000).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: insert into partcoltypenum partition (tint=110, sint=22000, bint=330000000000) select key, value from src limit 20 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +POSTHOOK: query: insert into partcoltypenum partition (tint=110, sint=22000, bint=330000000000) select key, value from src limit 20 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +POSTHOOK: Lineage: partcoltypenum PARTITION(tint=110,sint=22000,bint=330000000000).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partcoltypenum PARTITION(tint=110,sint=22000,bint=330000000000).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: -- select partition +select count(1) from partcoltypenum where tint=110Y and sint=22000S and bint=330000000000L +PREHOOK: type: QUERY +PREHOOK: Input: default@partcoltypenum +PREHOOK: Input: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +#### A masked pattern was here #### +POSTHOOK: query: -- select partition +select count(1) from partcoltypenum where tint=110Y and sint=22000S and bint=330000000000L +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Input: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +#### A masked pattern was here #### +30 +PREHOOK: query: select count(1) from partcoltypenum where tint=110Y and sint=22000 and bint='330000000000' +PREHOOK: type: QUERY +PREHOOK: Input: default@partcoltypenum +PREHOOK: Input: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +#### A masked pattern was here #### +POSTHOOK: query: select count(1) from partcoltypenum where tint=110Y and sint=22000 and bint='330000000000' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Input: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +#### A masked pattern was here #### +30 +PREHOOK: query: -- analyze partition statistics and columns statistics +analyze table partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@partcoltypenum +PREHOOK: Input: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +PREHOOK: Output: default@partcoltypenum +PREHOOK: Output: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +POSTHOOK: query: -- analyze partition statistics and columns statistics +analyze table partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Input: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +POSTHOOK: Output: default@partcoltypenum +POSTHOOK: Output: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +PREHOOK: query: describe extended partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: describe extended partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@partcoltypenum +key int +value string +tint tinyint +sint smallint +bint bigint + +# Partition Information +# col_name data_type comment + +tint tinyint +sint smallint +bint bigint + +#### A masked pattern was here #### +PREHOOK: query: analyze table partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) compute statistics for columns +PREHOOK: type: QUERY +PREHOOK: Input: default@partcoltypenum +PREHOOK: Input: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +#### A masked pattern was here #### +POSTHOOK: query: analyze table partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) compute statistics for columns +POSTHOOK: type: QUERY +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Input: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +#### A masked pattern was here #### +PREHOOK: query: describe formatted partcoltypenum.key partition (tint=110Y, sint=22000S, bint=330000000000L) +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: describe formatted partcoltypenum.key partition (tint=110Y, sint=22000S, bint=330000000000L) +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@partcoltypenum +# col_name data_type min max num_nulls distinct_count avg_col_len max_col_len num_trues num_falses comment + +key int 27 484 0 18 from deserializer +PREHOOK: query: describe formatted partcoltypenum.value partition (tint=110Y, sint=22000S, bint=330000000000L) +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: describe formatted partcoltypenum.value partition (tint=110Y, sint=22000S, bint=330000000000L) +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@partcoltypenum +# col_name data_type min max num_nulls distinct_count avg_col_len max_col_len num_trues num_falses comment + +value string 0 18 6.766666666666667 7 from deserializer +PREHOOK: query: -- change table column type for partition +alter table partcoltypenum change key key decimal(10,0) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@partcoltypenum +PREHOOK: Output: default@partcoltypenum +POSTHOOK: query: -- change table column type for partition +alter table partcoltypenum change key key decimal(10,0) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Output: default@partcoltypenum +PREHOOK: query: alter table partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) change key key decimal(10,0) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@partcoltypenum +PREHOOK: Output: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +POSTHOOK: query: alter table partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) change key key decimal(10,0) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Input: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +POSTHOOK: Output: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +PREHOOK: query: describe formatted partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: describe formatted partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@partcoltypenum +# col_name data_type comment + +key decimal(10,0) +value string + +# Partition Information +# col_name data_type comment + +tint tinyint +sint smallint +bint bigint + +# Detailed Partition Information +Partition Value: [110, 22000, 330000000000] +Database: default +Table: partcoltypenum +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE true +#### A masked pattern was here #### + numFiles 2 + numRows 30 + rawDataSize 316 + totalSize 346 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: -- change partititon column type +alter table partcoltypenum partition column (tint decimal(3,0)) +PREHOOK: type: ALTERTABLE_PARTCOLTYPE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: -- change partititon column type +alter table partcoltypenum partition column (tint decimal(3,0)) +POSTHOOK: type: ALTERTABLE_PARTCOLTYPE +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Output: default@partcoltypenum +PREHOOK: query: describe formatted partcoltypenum partition (tint=110BD, sint=22000S, bint=330000000000L) +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: describe formatted partcoltypenum partition (tint=110BD, sint=22000S, bint=330000000000L) +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@partcoltypenum +# col_name data_type comment + +key decimal(10,0) +value string + +# Partition Information +# col_name data_type comment + +tint decimal(3,0) +sint smallint +bint bigint + +# Detailed Partition Information +Partition Value: [110, 22000, 330000000000] +Database: default +Table: partcoltypenum +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE true +#### A masked pattern was here #### + numFiles 2 + numRows 30 + rawDataSize 316 + totalSize 346 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: -- show partition +show partitions partcoltypenum partition (tint=110BD, sint=22000S, bint=330000000000L) +PREHOOK: type: SHOWPARTITIONS +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: -- show partition +show partitions partcoltypenum partition (tint=110BD, sint=22000S, bint=330000000000L) +POSTHOOK: type: SHOWPARTITIONS +POSTHOOK: Input: default@partcoltypenum +tint=110/sint=22000/bint=330000000000 +PREHOOK: query: -- drop partition +alter table partcoltypenum drop partition (tint=110BD, sint=22000S, bint=330000000000L) +PREHOOK: type: ALTERTABLE_DROPPARTS +PREHOOK: Input: default@partcoltypenum +PREHOOK: Output: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +POSTHOOK: query: -- drop partition +alter table partcoltypenum drop partition (tint=110BD, sint=22000S, bint=330000000000L) +POSTHOOK: type: ALTERTABLE_DROPPARTS +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Output: default@partcoltypenum@tint=110/sint=22000/bint=330000000000 +PREHOOK: query: show partitions partcoltypenum +PREHOOK: type: SHOWPARTITIONS +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: show partitions partcoltypenum +POSTHOOK: type: SHOWPARTITIONS +POSTHOOK: Input: default@partcoltypenum +PREHOOK: query: -- change partition file location +insert into partcoltypenum partition (tint=100BD, sint=20000S, bint=300000000000L) select key, value from src limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +POSTHOOK: query: -- change partition file location +insert into partcoltypenum partition (tint=100BD, sint=20000S, bint=300000000000L) select key, value from src limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +POSTHOOK: Lineage: partcoltypenum PARTITION(tint=100,sint=20000,bint=300000000000).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partcoltypenum PARTITION(tint=100,sint=20000,bint=300000000000).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: describe formatted partcoltypenum partition (tint=100BD, sint=20000S, bint=300000000000L) +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: describe formatted partcoltypenum partition (tint=100BD, sint=20000S, bint=300000000000L) +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@partcoltypenum +# col_name data_type comment + +key decimal(10,0) +value string + +# Partition Information +# col_name data_type comment + +tint decimal(3,0) +sint smallint +bint bigint + +# Detailed Partition Information +Partition Value: [100, 20000, 300000000000] +Database: default +Table: partcoltypenum +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE true + numFiles 1 + numRows 10 + rawDataSize 104 + totalSize 114 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +#### A masked pattern was here #### +PREHOOK: type: ALTERPARTITION_LOCATION +PREHOOK: Input: default@partcoltypenum +PREHOOK: Output: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +#### A masked pattern was here #### +POSTHOOK: type: ALTERPARTITION_LOCATION +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Input: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +POSTHOOK: Output: default@partcoltypenum@tint=100/sint=20000/bint=300000000000 +#### A masked pattern was here #### +PREHOOK: query: describe formatted partcoltypenum partition (tint=100BD, sint=20000S, bint=300000000000L) +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@partcoltypenum +POSTHOOK: query: describe formatted partcoltypenum partition (tint=100BD, sint=20000S, bint=300000000000L) +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@partcoltypenum +# col_name data_type comment + +key decimal(10,0) +value string + +# Partition Information +# col_name data_type comment + +tint decimal(3,0) +sint smallint +bint bigint + +# Detailed Partition Information +Partition Value: [100, 20000, 300000000000] +Database: default +Table: partcoltypenum +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE true +#### A masked pattern was here #### + numFiles 1 + numRows 10 + rawDataSize 104 + totalSize 114 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: drop table partcoltypenum +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@partcoltypenum +PREHOOK: Output: default@partcoltypenum +POSTHOOK: query: drop table partcoltypenum +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@partcoltypenum +POSTHOOK: Output: default@partcoltypenum +PREHOOK: query: drop table if exists partcoltypeothers +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists partcoltypeothers +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table partcoltypeothers (key int, value string) partitioned by (decpart decimal(6,2), datepart date) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@partcoltypeothers +POSTHOOK: query: create table partcoltypeothers (key int, value string) partitioned by (decpart decimal(6,2), datepart date) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@partcoltypeothers +PREHOOK: query: insert into partcoltypeothers partition (decpart = 1000.01BD, datepart = date '2015-4-13') select key, value from src limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@partcoltypeothers@decpart=1000.01BD/datepart=2015-4-13 +POSTHOOK: query: insert into partcoltypeothers partition (decpart = 1000.01BD, datepart = date '2015-4-13') select key, value from src limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@partcoltypeothers@decpart=1000.01BD/datepart=2015-4-13 +POSTHOOK: Lineage: partcoltypeothers PARTITION(decpart=1000.01BD,datepart=2015-4-13).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partcoltypeothers PARTITION(decpart=1000.01BD,datepart=2015-4-13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: show partitions partcoltypeothers +PREHOOK: type: SHOWPARTITIONS +PREHOOK: Input: default@partcoltypeothers +POSTHOOK: query: show partitions partcoltypeothers +POSTHOOK: type: SHOWPARTITIONS +POSTHOOK: Input: default@partcoltypeothers +decpart=1000.01BD/datepart=2015-4-13 +PREHOOK: query: alter table partcoltypeothers partition(decpart = '1000.01BD', datepart = date '2015-4-13') rename to partition (decpart = 1000.01BD, datepart = date '2015-4-13') +PREHOOK: type: ALTERTABLE_RENAMEPART +PREHOOK: Input: default@partcoltypeothers +PREHOOK: Output: default@partcoltypeothers@decpart=1000.01BD/datepart=2015-4-13 +POSTHOOK: query: alter table partcoltypeothers partition(decpart = '1000.01BD', datepart = date '2015-4-13') rename to partition (decpart = 1000.01BD, datepart = date '2015-4-13') +POSTHOOK: type: ALTERTABLE_RENAMEPART +POSTHOOK: Input: default@partcoltypeothers +POSTHOOK: Input: default@partcoltypeothers@decpart=1000.01BD/datepart=2015-4-13 +POSTHOOK: Output: default@partcoltypeothers@decpart=1000.01/datepart=2015-04-13 +POSTHOOK: Output: default@partcoltypeothers@decpart=1000.01BD/datepart=2015-4-13 +PREHOOK: query: show partitions partcoltypeothers +PREHOOK: type: SHOWPARTITIONS +PREHOOK: Input: default@partcoltypeothers +POSTHOOK: query: show partitions partcoltypeothers +POSTHOOK: type: SHOWPARTITIONS +POSTHOOK: Input: default@partcoltypeothers +decpart=1000.01/datepart=2015-04-13 +PREHOOK: query: drop table partcoltypeothers +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@partcoltypeothers +PREHOOK: Output: default@partcoltypeothers +POSTHOOK: query: drop table partcoltypeothers +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@partcoltypeothers +POSTHOOK: Output: default@partcoltypeothers diff --git a/ql/src/test/results/clientpositive/partition_timestamp.q.out b/ql/src/test/results/clientpositive/partition_timestamp.q.out index bc6ab10..7059006 100644 --- a/ql/src/test/results/clientpositive/partition_timestamp.q.out +++ b/ql/src/test/results/clientpositive/partition_timestamp.q.out @@ -14,79 +14,79 @@ PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000- select * from src tablesample (10 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 01:00:00', region= '1') select * from src tablesample (10 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 02:00:00', region= '2') select * from src tablesample (5 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 +PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 02:00:00', region= '2') select * from src tablesample (5 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00.0,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00.0,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 01:00:00', region= '2020-20-20') select * from src tablesample (5 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 +PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 01:00:00', region= '2020-20-20') select * from src tablesample (5 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00,region=2020-20-20).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00,region=2020-20-20).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00.0,region=2020-20-20).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00.0,region=2020-20-20).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 02:00:00', region= '1') select * from src tablesample (20 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 02:00:00', region= '1') select * from src tablesample (20 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 03:00:00', region= '10') select * from src tablesample (11 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 +PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 03:00:00', region= '10') select * from src tablesample (11 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00,region=10).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00,region=10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00.0,region=10).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00.0,region=10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select distinct dt from partition_timestamp_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 #### A masked pattern was here #### POSTHOOK: query: select distinct dt from partition_timestamp_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 #### A masked pattern was here #### 2000-01-01 01:00:00 2000-01-01 02:00:00 @@ -105,67 +105,67 @@ PREHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt = timestamp '2000-01-01 01:00:00' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt = timestamp '2000-01-01 01:00:00' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 10 PREHOOK: query: -- 10. Also try with string value in predicate select count(*) from partition_timestamp_1 where dt = '2000-01-01 01:00:00' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 10. Also try with string value in predicate select count(*) from partition_timestamp_1 where dt = '2000-01-01 01:00:00' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 10 PREHOOK: query: -- 5 select count(*) from partition_timestamp_1 where dt = timestamp '2000-01-01 02:00:00' and region = '2' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 #### A masked pattern was here #### POSTHOOK: query: -- 5 select count(*) from partition_timestamp_1 where dt = timestamp '2000-01-01 02:00:00' and region = '2' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2 #### A masked pattern was here #### 5 PREHOOK: query: -- 11 select count(*) from partition_timestamp_1 where dt = timestamp '2001-01-01 03:00:00' and region = '10' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 #### A masked pattern was here #### POSTHOOK: query: -- 11 select count(*) from partition_timestamp_1 where dt = timestamp '2001-01-01 03:00:00' and region = '10' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10 #### A masked pattern was here #### 11 PREHOOK: query: -- 30 select count(*) from partition_timestamp_1 where region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 30 select count(*) from partition_timestamp_1 where region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### 30 PREHOOK: query: -- 0 @@ -196,7 +196,7 @@ PREHOOK: query: -- Try other comparison operations select count(*) from partition_timestamp_1 where dt > timestamp '2000-01-01 01:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- Try other comparison operations @@ -204,72 +204,72 @@ POSTHOOK: query: -- Try other comparison operations select count(*) from partition_timestamp_1 where dt > timestamp '2000-01-01 01:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### 20 PREHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt < timestamp '2000-01-02 01:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt < timestamp '2000-01-02 01:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 10 PREHOOK: query: -- 20 select count(*) from partition_timestamp_1 where dt >= timestamp '2000-01-02 01:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 20 select count(*) from partition_timestamp_1 where dt >= timestamp '2000-01-02 01:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### 20 PREHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt <= timestamp '2000-01-01 01:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt <= timestamp '2000-01-01 01:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 10 PREHOOK: query: -- 20 select count(*) from partition_timestamp_1 where dt <> timestamp '2000-01-01 01:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 20 select count(*) from partition_timestamp_1 where dt <> timestamp '2000-01-01 01:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1 #### A masked pattern was here #### 20 PREHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt between timestamp '1999-12-30 12:00:00' and timestamp '2000-01-03 12:00:00' and region = '1' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: -- 10 select count(*) from partition_timestamp_1 where dt between timestamp '1999-12-30 12:00:00' and timestamp '2000-01-03 12:00:00' and region = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 10 PREHOOK: query: -- Try a string key with timestamp-like strings @@ -278,7 +278,7 @@ PREHOOK: query: -- Try a string key with timestamp-like strings select count(*) from partition_timestamp_1 where region = '2020-20-20' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 #### A masked pattern was here #### POSTHOOK: query: -- Try a string key with timestamp-like strings @@ -286,20 +286,20 @@ POSTHOOK: query: -- Try a string key with timestamp-like strings select count(*) from partition_timestamp_1 where region = '2020-20-20' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 #### A masked pattern was here #### 5 PREHOOK: query: -- 5 select count(*) from partition_timestamp_1 where region > '2010-01-01' PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp_1 -PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 +PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 #### A masked pattern was here #### POSTHOOK: query: -- 5 select count(*) from partition_timestamp_1 where region > '2010-01-01' POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp_1 -POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20 +POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20 #### A masked pattern was here #### 5 PREHOOK: query: drop table partition_timestamp_1 diff --git a/ql/src/test/results/clientpositive/partition_timestamp2.q.out b/ql/src/test/results/clientpositive/partition_timestamp2.q.out index 365df69..772df1a 100644 --- a/ql/src/test/results/clientpositive/partition_timestamp2.q.out +++ b/ql/src/test/results/clientpositive/partition_timestamp2.q.out @@ -18,10 +18,10 @@ insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01 insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01 01:00:00', region=2) select * PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 POSTHOOK: query: -- test timestamp literal syntax from (select * from src tablesample (1 rows)) x insert overwrite table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1) select * @@ -30,33 +30,33 @@ insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01 insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01 01:00:00', region=2) select * POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00.0,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00.0,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00.0,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00.0,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select distinct dt from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select distinct dt from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 1999-01-01 00:00:00 1999-01-01 01:00:00 @@ -65,18 +65,18 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio PREHOOK: query: select * from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1999-01-01 00:00:00 2 238 val_238 1999-01-01 01:00:00 2 @@ -87,30 +87,30 @@ insert overwrite table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 select 'changed_key', 'changed_value' from src tablesample (2 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 +PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 POSTHOOK: query: -- insert overwrite insert overwrite table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1) select 'changed_key', 'changed_value' from src tablesample (2 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).key SIMPLE [] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).value SIMPLE [] +POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).key SIMPLE [] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).value SIMPLE [] PREHOOK: query: select * from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1999-01-01 00:00:00 2 238 val_238 1999-01-01 01:00:00 2 @@ -120,26 +120,26 @@ changed_key changed_value 2000-01-01 00:00:00 1 PREHOOK: query: -- truncate truncate table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1) PREHOOK: type: TRUNCATETABLE -PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 +PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 POSTHOOK: query: -- truncate truncate table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1) POSTHOOK: type: TRUNCATETABLE -POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 +POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 PREHOOK: query: select distinct dt from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select distinct dt from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 1999-01-01 00:00:00 1999-01-01 01:00:00 @@ -148,18 +148,18 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio PREHOOK: query: select * from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1999-01-01 00:00:00 2 238 val_238 1999-01-01 01:00:00 2 @@ -172,24 +172,24 @@ POSTHOOK: query: -- alter table add partition alter table partition_timestamp2_1 add partition (dt=timestamp '1980-01-02 00:00:00', region=3) POSTHOOK: type: ALTERTABLE_ADDPARTS POSTHOOK: Output: default@partition_timestamp2_1 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 PREHOOK: query: select distinct dt from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select distinct dt from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 1980-01-02 00:00:00 1999-01-01 00:00:00 @@ -199,20 +199,20 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio PREHOOK: query: select * from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1999-01-01 00:00:00 2 238 val_238 1999-01-01 01:00:00 2 @@ -221,27 +221,27 @@ PREHOOK: query: -- alter table drop alter table partition_timestamp2_1 drop partition (dt=timestamp '1999-01-01 01:00:00', region=2) PREHOOK: type: ALTERTABLE_DROPPARTS PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 +PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 POSTHOOK: query: -- alter table drop alter table partition_timestamp2_1 drop partition (dt=timestamp '1999-01-01 01:00:00', region=2) POSTHOOK: type: ALTERTABLE_DROPPARTS POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2 PREHOOK: query: select distinct dt from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select distinct dt from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 1980-01-02 00:00:00 1999-01-01 00:00:00 @@ -250,18 +250,18 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio PREHOOK: query: select * from partition_timestamp2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1999-01-01 00:00:00 2 238 val_238 2000-01-01 01:00:00 1 @@ -270,27 +270,27 @@ alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' PREHOOK: type: ALTERPARTITION_SERIALIZER PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 POSTHOOK: query: -- alter table set serde alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' POSTHOOK: type: ALTERPARTITION_SERIALIZER POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 PREHOOK: query: -- alter table set fileformat alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) set fileformat rcfile PREHOOK: type: ALTERPARTITION_FILEFORMAT PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 POSTHOOK: query: -- alter table set fileformat alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) set fileformat rcfile POSTHOOK: type: ALTERPARTITION_FILEFORMAT POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 PREHOOK: query: describe extended partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) PREHOOK: type: DESCTABLE PREHOOK: Input: default@partition_timestamp2_1 @@ -313,29 +313,29 @@ PREHOOK: query: insert overwrite table partition_timestamp2_1 partition(dt=times select * from src tablesample (2 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 POSTHOOK: query: insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) select * from src tablesample (2 rows) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00,region=3).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00,region=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00.0,region=3).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00.0,region=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from partition_timestamp2_1 order by key,value,dt,region PREHOOK: type: QUERY PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### POSTHOOK: query: select * from partition_timestamp2_1 order by key,value,dt,region POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1 +POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1 #### A masked pattern was here #### 238 val_238 1980-01-02 00:00:00 3 238 val_238 1999-01-01 00:00:00 2 @@ -346,15 +346,15 @@ alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', #### A masked pattern was here #### PREHOOK: type: ALTERPARTITION_LOCATION PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 #### A masked pattern was here #### POSTHOOK: query: -- alter table set location alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) #### A masked pattern was here #### POSTHOOK: type: ALTERPARTITION_LOCATION POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 #### A masked pattern was here #### PREHOOK: query: describe extended partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3) PREHOOK: type: DESCTABLE @@ -378,13 +378,13 @@ PREHOOK: query: -- alter table touch alter table partition_timestamp2_1 touch partition(dt=timestamp '1980-01-02 00:00:00', region=3) PREHOOK: type: ALTERTABLE_TOUCH PREHOOK: Input: default@partition_timestamp2_1 -PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 POSTHOOK: query: -- alter table touch alter table partition_timestamp2_1 touch partition(dt=timestamp '1980-01-02 00:00:00', region=3) POSTHOOK: type: ALTERTABLE_TOUCH POSTHOOK: Input: default@partition_timestamp2_1 -POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 -POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3 +POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 +POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3 PREHOOK: query: drop table partition_timestamp2_1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@partition_timestamp2_1