diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java index b4b726a6b8..3f0c699eb7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java @@ -43,18 +43,18 @@ public static String getSchema(boolean colStats) { private final String resFile; private final String tableName; - private final Map partSpec; - private final String colPath; - private final boolean isExt; + private final Map partitionSpec; + private final String columnPath; + private final boolean isExtended; private final boolean isFormatted; - public DescTableDesc(Path resFile, String tableName, Map partSpec, String colPath, boolean isExt, - boolean isFormatted) { + public DescTableDesc(Path resFile, String tableName, Map partitionSpec, String columnPath, + boolean isExtended, boolean isFormatted) { this.resFile = resFile.toString(); this.tableName = tableName; - this.partSpec = partSpec; - this.colPath = colPath; - this.isExt = isExt; + this.partitionSpec = partitionSpec; + this.columnPath = columnPath; + this.isExtended = isExtended; this.isFormatted = isFormatted; } @@ -69,18 +69,22 @@ public String getTableName() { } @Explain(displayName = "partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public Map getPartSpec() { - return partSpec; + public Map getPartitionSpec() { + return partitionSpec; } public String getColumnPath() { - return colPath; + return columnPath; } - public boolean isExt() { - return isExt; + @Explain(displayName = "extended", displayOnlyOnTrue = true, + explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public boolean isExtended() { + return isExtended; } + @Explain(displayName = "formatted", displayOnlyOnTrue = true, + explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public boolean isFormatted() { return isFormatted; } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java index 2c6e35fb8c..d48ae0485b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java @@ -21,7 +21,6 @@ import java.io.DataOutputStream; import java.sql.SQLException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -40,24 +39,21 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.DDLUtils; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.lockmgr.LockException; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.ddl.DDLOperation; -import org.apache.hadoop.hive.ql.metadata.CheckConstraint; -import org.apache.hadoop.hive.ql.metadata.DefaultConstraint; -import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.NotNullConstraint; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.PartitionIterable; -import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo; -import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.metadata.UniqueConstraint; +import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ColStatistics; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.stats.StatsUtils; @@ -66,7 +62,8 @@ import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; -import org.apache.hadoop.io.IOUtils; + +import avro.shaded.com.google.common.collect.Lists; /** * Operation process of dropping a table. @@ -78,185 +75,190 @@ public DescTableOperation(DDLOperationContext context, DescTableDesc desc) { @Override public int execute() throws Exception { - String colPath = desc.getColumnPath(); - String tableName = desc.getTableName(); + Table table = getTable(); + Partition part = getPartition(table); + + try (DataOutputStream outStream = DDLUtils.getOutputStream(new Path(desc.getResFile()), context)) { + LOG.debug("DDLTask: got data for {}", desc.getTableName()); + + List cols = new ArrayList<>(); + List colStats = new ArrayList<>(); + + Deserializer deserializer = getDeserializer(table); + + if (desc.getColumnPath() == null) { + getColumnsNoColumnPath(table, part, cols); + } else { + if (desc.isFormatted()) { + getColumnDataColPathSpecified(table, part, cols, colStats, deserializer); + } else { + cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer)); + } + } + fixDecimalColumnTypeName(cols); + + setConstraintsAndStorageHandlerInfo(table); + handleMaterializedView(table); + // In case the query is served by HiveServer2, don't pad it with spaces, + // as HiveServer2 output is consumed by JDBC/ODBC clients. + boolean isOutputPadded = !SessionState.get().isHiveServerQuery(); + context.getFormatter().describeTable(outStream, desc.getColumnPath(), desc.getTableName(), table, part, cols, + desc.isFormatted(), desc.isExtended(), isOutputPadded, colStats); + + LOG.debug("DDLTask: written data for {}", desc.getTableName()); - // describe the table - populate the output stream - Table tbl = context.getDb().getTable(tableName, false); - if (tbl == null) { - throw new HiveException(ErrorMsg.INVALID_TABLE, tableName); + } catch (SQLException e) { + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, desc.getTableName()); + } + + return 0; + } + + private Table getTable() throws HiveException { + Table table = context.getDb().getTable(desc.getTableName(), false); + if (table == null) { + throw new HiveException(ErrorMsg.INVALID_TABLE, desc.getTableName()); } + return table; + } + + private Partition getPartition(Table table) throws HiveException { Partition part = null; - if (desc.getPartSpec() != null) { - part = context.getDb().getPartition(tbl, desc.getPartSpec(), false); + if (desc.getPartitionSpec() != null) { + part = context.getDb().getPartition(table, desc.getPartitionSpec(), false); if (part == null) { throw new HiveException(ErrorMsg.INVALID_PARTITION, - StringUtils.join(desc.getPartSpec().keySet(), ','), tableName); + StringUtils.join(desc.getPartitionSpec().keySet(), ','), desc.getTableName()); } - tbl = part.getTable(); + } + return part; + } + + private Deserializer getDeserializer(Table table) throws SQLException { + Deserializer deserializer = table.getDeserializer(true); + if (deserializer instanceof AbstractSerDe) { + String errorMsgs = ((AbstractSerDe) deserializer).getConfigurationErrors(); + if (StringUtils.isNotEmpty(errorMsgs)) { + throw new SQLException(errorMsgs); + } + } + return deserializer; + } + + private void getColumnsNoColumnPath(Table table, Partition partition, List cols) throws HiveException { + cols.addAll(partition == null || table.getTableType() == TableType.VIRTUAL_VIEW ? + table.getCols() : partition.getCols()); + if (!desc.isFormatted()) { + cols.addAll(table.getPartCols()); } - DataOutputStream outStream = DDLUtils.getOutputStream(new Path(desc.getResFile()), context); - try { - LOG.debug("DDLTask: got data for {}", tableName); + if (table.isPartitioned() && partition == null) { + // No partition specified for partitioned table, lets fetch all. + Map tblProps = table.getParameters() == null ? + new HashMap() : table.getParameters(); - List cols = null; - List colStats = null; + Map valueMap = new HashMap<>(); + Map stateMap = new HashMap<>(); + for (String stat : StatsSetupConst.SUPPORTED_STATS) { + valueMap.put(stat, 0L); + stateMap.put(stat, true); + } - Deserializer deserializer = tbl.getDeserializer(true); - if (deserializer instanceof AbstractSerDe) { - String errorMsgs = ((AbstractSerDe) deserializer).getConfigurationErrors(); - if (errorMsgs != null && !errorMsgs.isEmpty()) { - throw new SQLException(errorMsgs); + PartitionIterable partitions = new PartitionIterable(context.getDb(), table, null, + MetastoreConf.getIntVar(context.getConf(), MetastoreConf.ConfVars.BATCH_RETRIEVE_MAX)); + int numParts = 0; + for (Partition p : partitions) { + Map partitionProps = p.getParameters(); + Boolean state = StatsSetupConst.areBasicStatsUptoDate(partitionProps); + for (String stat : StatsSetupConst.SUPPORTED_STATS) { + stateMap.put(stat, stateMap.get(stat) && state); + if (partitionProps != null && partitionProps.get(stat) != null) { + valueMap.put(stat, valueMap.get(stat) + Long.parseLong(partitionProps.get(stat))); + } } + numParts++; } + tblProps.put(StatsSetupConst.NUM_PARTITIONS, Integer.toString(numParts)); - if (colPath.equals(tableName)) { - cols = (part == null || tbl.getTableType() == TableType.VIRTUAL_VIEW) ? - tbl.getCols() : part.getCols(); + for (String stat : StatsSetupConst.SUPPORTED_STATS) { + StatsSetupConst.setBasicStatsState(tblProps, Boolean.toString(stateMap.get(stat))); + tblProps.put(stat, valueMap.get(stat).toString()); + } + table.setParameters(tblProps); + } + } - if (!desc.isFormatted()) { - cols.addAll(tbl.getPartCols()); - } + private void getColumnDataColPathSpecified(Table table, Partition part, List cols, + List colStats, Deserializer deserializer) + throws SemanticException, HiveException, MetaException { + // when column name is specified in describe table DDL, colPath will be table_name.column_name + String colName = desc.getColumnPath().split("\\.")[1]; + List colNames = Lists.newArrayList(colName.toLowerCase()); - if (tbl.isPartitioned() && part == null) { - // No partitioned specified for partitioned table, lets fetch all. - Map tblProps = tbl.getParameters() == null ? - new HashMap() : tbl.getParameters(); - Map valueMap = new HashMap<>(); - Map stateMap = new HashMap<>(); - for (String stat : StatsSetupConst.SUPPORTED_STATS) { - valueMap.put(stat, 0L); - stateMap.put(stat, true); - } - PartitionIterable parts = new PartitionIterable(context.getDb(), tbl, null, - context.getConf().getIntVar(HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_MAX)); - int numParts = 0; - for (Partition partition : parts) { - Map props = partition.getParameters(); - Boolean state = StatsSetupConst.areBasicStatsUptoDate(props); - for (String stat : StatsSetupConst.SUPPORTED_STATS) { - stateMap.put(stat, stateMap.get(stat) && state); - if (props != null && props.get(stat) != null) { - valueMap.put(stat, valueMap.get(stat) + Long.parseLong(props.get(stat))); - } - } - numParts++; - } - for (String stat : StatsSetupConst.SUPPORTED_STATS) { - StatsSetupConst.setBasicStatsState(tblProps, Boolean.toString(stateMap.get(stat))); - tblProps.put(stat, valueMap.get(stat).toString()); - } - tblProps.put(StatsSetupConst.NUM_PARTITIONS, Integer.toString(numParts)); - tbl.setParameters(tblProps); - } - } else { - if (desc.isFormatted()) { - // when column name is specified in describe table DDL, colPath will - // will be table_name.column_name - String colName = colPath.split("\\.")[1]; - String[] dbTab = Utilities.getDbTableName(tableName); - List colNames = new ArrayList(); - colNames.add(colName.toLowerCase()); - if (null == part) { - if (tbl.isPartitioned()) { - Map tblProps = tbl.getParameters() == null ? - new HashMap() : tbl.getParameters(); - if (tbl.isPartitionKey(colNames.get(0))) { - FieldSchema partCol = tbl.getPartColByName(colNames.get(0)); - cols = Collections.singletonList(partCol); - PartitionIterable parts = new PartitionIterable(context.getDb(), tbl, null, - context.getConf().getIntVar(HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_MAX)); - ColumnInfo ci = new ColumnInfo(partCol.getName(), - TypeInfoUtils.getTypeInfoFromTypeString(partCol.getType()), null, false); - ColStatistics cs = StatsUtils.getColStatsForPartCol(ci, parts, context.getConf()); - ColumnStatisticsData data = new ColumnStatisticsData(); - ColStatistics.Range r = cs.getRange(); - StatObjectConverter.fillColumnStatisticsData(partCol.getType(), data, r == null ? null : r.minValue, - r == null ? null : r.maxValue, r == null ? null : r.minValue, r == null ? null : r.maxValue, - r == null ? null : r.minValue.toString(), r == null ? null : r.maxValue.toString(), - cs.getNumNulls(), cs.getCountDistint(), null, cs.getAvgColLen(), cs.getAvgColLen(), - cs.getNumTrues(), cs.getNumFalses()); - ColumnStatisticsObj cso = new ColumnStatisticsObj(partCol.getName(), partCol.getType(), data); - colStats = Collections.singletonList(cso); - StatsSetupConst.setColumnStatsState(tblProps, colNames); - } else { - cols = Hive.getFieldsFromDeserializer(colPath, deserializer); - List parts = context.getDb().getPartitionNames(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), - (short) -1); - AggrStats aggrStats = context.getDb().getAggrColStatsFor( - dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, parts, false); - colStats = aggrStats.getColStats(); - if (parts.size() == aggrStats.getPartsFound()) { - StatsSetupConst.setColumnStatsState(tblProps, colNames); - } else { - StatsSetupConst.removeColumnStatsState(tblProps, colNames); - } - } - tbl.setParameters(tblProps); - } else { - cols = Hive.getFieldsFromDeserializer(colPath, deserializer); - colStats = context.getDb().getTableColumnStatistics( - dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, false); - } - } else { - List partitions = new ArrayList(); - partitions.add(part.getName()); - cols = Hive.getFieldsFromDeserializer(colPath, deserializer); - colStats = context.getDb().getPartitionColumnStatistics(dbTab[0].toLowerCase(), - dbTab[1].toLowerCase(), partitions, colNames, false).get(part.getName()); - } + String[] dbTab = Utilities.getDbTableName(desc.getTableName()); + if (null == part) { + if (table.isPartitioned()) { + Map tableProps = table.getParameters() == null ? + new HashMap() : table.getParameters(); + if (table.isPartitionKey(colNames.get(0))) { + getColumnDataForPartitionKeyColumn(table, cols, colStats, colNames, tableProps); } else { - cols = Hive.getFieldsFromDeserializer(colPath, deserializer); + getColumnsForNotPartitionKeyColumn(cols, colStats, deserializer, colNames, dbTab, tableProps); } + table.setParameters(tableProps); + } else { + cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer)); + colStats.addAll( + context.getDb().getTableColumnStatistics(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, false)); } - PrimaryKeyInfo pkInfo = null; - ForeignKeyInfo fkInfo = null; - UniqueConstraint ukInfo = null; - NotNullConstraint nnInfo = null; - DefaultConstraint dInfo = null; - CheckConstraint cInfo = null; - StorageHandlerInfo storageHandlerInfo = null; - if (desc.isExt() || desc.isFormatted()) { - pkInfo = context.getDb().getPrimaryKeys(tbl.getDbName(), tbl.getTableName()); - fkInfo = context.getDb().getForeignKeys(tbl.getDbName(), tbl.getTableName()); - ukInfo = context.getDb().getUniqueConstraints(tbl.getDbName(), tbl.getTableName()); - nnInfo = context.getDb().getNotNullConstraints(tbl.getDbName(), tbl.getTableName()); - dInfo = context.getDb().getDefaultConstraints(tbl.getDbName(), tbl.getTableName()); - cInfo = context.getDb().getCheckConstraints(tbl.getDbName(), tbl.getTableName()); - storageHandlerInfo = context.getDb().getStorageHandlerInfo(tbl); - } - fixDecimalColumnTypeName(cols); - // Information for materialized views - if (tbl.isMaterializedView()) { - final String validTxnsList = context.getDb().getConf().get(ValidTxnList.VALID_TXNS_KEY); - if (validTxnsList != null) { - List tablesUsed = new ArrayList<>(tbl.getCreationMetadata().getTablesUsed()); - ValidTxnWriteIdList currentTxnWriteIds = - SessionState.get().getTxnMgr().getValidWriteIds(tablesUsed, validTxnsList); - long defaultTimeWindow = HiveConf.getTimeVar(context.getDb().getConf(), - HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_TIME_WINDOW, TimeUnit.MILLISECONDS); - tbl.setOutdatedForRewriting(Hive.isOutdatedMaterializedView(tbl, - currentTxnWriteIds, defaultTimeWindow, tablesUsed, false)); - } + } else { + List partitions = new ArrayList(); + partitions.add(part.getName()); + cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer)); + List partitionColStat = context.getDb().getPartitionColumnStatistics(dbTab[0].toLowerCase(), + dbTab[1].toLowerCase(), partitions, colNames, false).get(part.getName()); + if (partitionColStat != null) { + colStats.addAll(partitionColStat); } - // In case the query is served by HiveServer2, don't pad it with spaces, - // as HiveServer2 output is consumed by JDBC/ODBC clients. - boolean isOutputPadded = !SessionState.get().isHiveServerQuery(); - context.getFormatter().describeTable(outStream, colPath, tableName, tbl, part, - cols, desc.isFormatted(), desc.isExt(), isOutputPadded, - colStats, pkInfo, fkInfo, ukInfo, nnInfo, dInfo, cInfo, - storageHandlerInfo); + } + } - LOG.debug("DDLTask: written data for {}", tableName); + private void getColumnDataForPartitionKeyColumn(Table table, List cols, + List colStats, List colNames, Map tableProps) + throws HiveException, MetaException { + FieldSchema partCol = table.getPartColByName(colNames.get(0)); + cols.add(partCol); + PartitionIterable parts = new PartitionIterable(context.getDb(), table, null, + MetastoreConf.getIntVar(context.getConf(), MetastoreConf.ConfVars.BATCH_RETRIEVE_MAX)); + ColumnInfo ci = new ColumnInfo(partCol.getName(), + TypeInfoUtils.getTypeInfoFromTypeString(partCol.getType()), null, false); + ColStatistics cs = StatsUtils.getColStatsForPartCol(ci, parts, context.getConf()); + ColumnStatisticsData data = new ColumnStatisticsData(); + ColStatistics.Range r = cs.getRange(); + StatObjectConverter.fillColumnStatisticsData(partCol.getType(), data, r == null ? null : r.minValue, + r == null ? null : r.maxValue, r == null ? null : r.minValue, r == null ? null : r.maxValue, + r == null ? null : r.minValue.toString(), r == null ? null : r.maxValue.toString(), + cs.getNumNulls(), cs.getCountDistint(), null, cs.getAvgColLen(), cs.getAvgColLen(), + cs.getNumTrues(), cs.getNumFalses()); + ColumnStatisticsObj cso = new ColumnStatisticsObj(partCol.getName(), partCol.getType(), data); + colStats.add(cso); + StatsSetupConst.setColumnStatsState(tableProps, colNames); + } - } catch (SQLException e) { - throw new HiveException(e, ErrorMsg.GENERIC_ERROR, tableName); - } finally { - IOUtils.closeStream(outStream); + private void getColumnsForNotPartitionKeyColumn(List cols, List colStats, + Deserializer deserializer, List colNames, String[] dbTab, Map tableProps) + throws HiveException { + cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer)); + List parts = context.getDb().getPartitionNames(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), + (short) -1); + AggrStats aggrStats = context.getDb().getAggrColStatsFor( + dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, parts, false); + colStats.addAll(aggrStats.getColStats()); + if (parts.size() == aggrStats.getPartsFound()) { + StatsSetupConst.setColumnStatsState(tableProps, colNames); + } else { + StatsSetupConst.removeColumnStatsState(tableProps, colNames); } - - return 0; } /** @@ -274,4 +276,31 @@ private static void fixDecimalColumnTypeName(List cols) { } } } + + private void setConstraintsAndStorageHandlerInfo(Table table) throws HiveException { + if (desc.isExtended() || desc.isFormatted()) { + table.setPrimaryKeyInfo(context.getDb().getPrimaryKeys(table.getDbName(), table.getTableName())); + table.setForeignKeyInfo(context.getDb().getForeignKeys(table.getDbName(), table.getTableName())); + table.setUniqueKeyInfo(context.getDb().getUniqueConstraints(table.getDbName(), table.getTableName())); + table.setNotNullConstraint(context.getDb().getNotNullConstraints(table.getDbName(), table.getTableName())); + table.setDefaultConstraint(context.getDb().getDefaultConstraints(table.getDbName(), table.getTableName())); + table.setCheckConstraint(context.getDb().getCheckConstraints(table.getDbName(), table.getTableName())); + table.setStorageHandlerInfo(context.getDb().getStorageHandlerInfo(table)); + } + } + + private void handleMaterializedView(Table table) throws LockException { + if (table.isMaterializedView()) { + String validTxnsList = context.getDb().getConf().get(ValidTxnList.VALID_TXNS_KEY); + if (validTxnsList != null) { + List tablesUsed = new ArrayList<>(table.getCreationMetadata().getTablesUsed()); + ValidTxnWriteIdList currentTxnWriteIds = + SessionState.get().getTxnMgr().getValidWriteIds(tablesUsed, validTxnsList); + long defaultTimeWindow = HiveConf.getTimeVar(context.getDb().getConf(), + HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_TIME_WINDOW, TimeUnit.MILLISECONDS); + table.setOutdatedForRewriting(Hive.isOutdatedMaterializedView(table, + currentTxnWriteIds, defaultTimeWindow, tablesUsed, false)); + } + } + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java index af457883de..5eb986e15e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java @@ -118,4 +118,8 @@ public String toString() { sb.append("]"); return sb.toString(); } + + public static boolean isCheckConstraintNotEmpty(CheckConstraint info) { + return info != null && !info.getCheckConstraints().isEmpty(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java index 59df3daf6e..c101f3dfa6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java @@ -118,4 +118,8 @@ public String toString() { sb.append("]"); return sb.toString(); } + + public static boolean isCheckConstraintNotEmpty(DefaultConstraint info) { + return info != null && !info.getDefaultConstraints().isEmpty(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java index 9ae14cd848..f2c978aa63 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java @@ -133,4 +133,8 @@ public String toString() { sb.append("]"); return sb.toString(); } + + public static boolean isForeignKeyInfoNotEmpty(ForeignKeyInfo info) { + return info != null && !info.getForeignKeys().isEmpty(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java index ffd42f2127..8b50b7ce1f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java @@ -83,4 +83,7 @@ public String toString() { return sb.toString(); } + public static boolean isNotNullConstraintNotEmpty(NotNullConstraint info) { + return info != null && !info.getNotNullConstraints().isEmpty(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/PartitionIterable.java ql/src/java/org/apache/hadoop/hive/ql/metadata/PartitionIterable.java index e635670932..79c329d19b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/PartitionIterable.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/PartitionIterable.java @@ -92,11 +92,11 @@ public Partition next() { } private void getNextBatch() { - int batch_counter = 0; + int batchCounter = 0; List nameBatch = new ArrayList(); - while (batch_counter < batch_size && partitionNamesIter.hasNext()){ + while (batchCounter < batchSize && partitionNamesIter.hasNext()){ nameBatch.add(partitionNamesIter.next()); - batch_counter++; + batchCounter++; } try { batchIter = db.getPartitionsByNames(table, nameBatch, getColStats).iterator(); @@ -128,7 +128,7 @@ public void remove() { private Table table = null; private Map partialPartitionSpec = null; private List partitionNames = null; - private int batch_size; + private int batchSize; private boolean getColStats = false; /** @@ -146,8 +146,8 @@ public PartitionIterable(Collection ptnsProvided){ * a Hive object and a table object, and a partial partition spec. */ public PartitionIterable(Hive db, Table table, Map partialPartitionSpec, - int batch_size) throws HiveException { - this(db, table, partialPartitionSpec, batch_size, false); + int batchSize) throws HiveException { + this(db, table, partialPartitionSpec, batchSize, false); } /** @@ -155,12 +155,12 @@ public PartitionIterable(Hive db, Table table, Map partialPartit * a Hive object and a table object, and a partial partition spec. */ public PartitionIterable(Hive db, Table table, Map partialPartitionSpec, - int batch_size, boolean getColStats) throws HiveException { + int batchSize, boolean getColStats) throws HiveException { this.currType = Type.LAZY_FETCH_PARTITIONS; this.db = db; this.table = table; this.partialPartitionSpec = partialPartitionSpec; - this.batch_size = batch_size; + this.batchSize = batchSize; this.getColStats = getColStats; if (this.partialPartitionSpec == null){ diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java index c50bd7dca7..f9348c644e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java @@ -104,4 +104,7 @@ public String toString() { return sb.toString(); } + public static boolean isPrimaryKeyInfoNotEmpty(PrimaryKeyInfo info) { + return info != null && !info.getColNames().isEmpty(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index aba30359ba..33a450585b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -103,6 +103,7 @@ private Path path; private transient HiveStorageHandler storageHandler; + private transient StorageHandlerInfo storageHandlerInfo; private transient TableSpec tableSpec; @@ -117,6 +118,8 @@ private transient ForeignKeyInfo fki; private transient UniqueConstraint uki; private transient NotNullConstraint nnc; + private transient DefaultConstraint dc; + private transient CheckConstraint cc; /** * Used only for serialization. @@ -321,6 +324,14 @@ public HiveStorageHandler getStorageHandler() { return storageHandler; } + public StorageHandlerInfo getStorageHandlerInfo() { + return storageHandlerInfo; + } + + public void setStorageHandlerInfo(StorageHandlerInfo storageHandlerInfo) { + this.storageHandlerInfo = storageHandlerInfo; + } + final public Class getInputFormatClass() { if (inputFormatClass == null) { try { @@ -1115,7 +1126,7 @@ public Boolean isOutdatedForRewriting() { return outdatedForRewritingMaterializedView; } - /* These are only populated during optimization */ + /* These are only populated during optimization and describing */ public PrimaryKeyInfo getPrimaryKeyInfo() { return pki; } @@ -1148,6 +1159,22 @@ public void setNotNullConstraint(NotNullConstraint nnc) { this.nnc = nnc; } + public DefaultConstraint getDefaultConstraint() { + return dc; + } + + public void setDefaultConstraint(DefaultConstraint dc) { + this.dc = dc; + } + + public CheckConstraint getCheckConstraint() { + return cc; + } + + public void setCheckConstraint(CheckConstraint cc) { + this.cc = cc; + } + public ColumnStatistics getColStats() { return tTable.isSetColStats() ? tTable.getColStats() : null; diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java index 24817a2dc4..1fbe76d8b9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java @@ -108,4 +108,8 @@ public String toString() { sb.append("]"); return sb.toString(); } + + public static boolean isUniqueConstraintNotEmpty(UniqueConstraint info) { + return info != null && !info.getUniqueConstraints().isEmpty(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java index f28d68fafe..78578dbc60 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java @@ -33,7 +33,6 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.hive.ql.session.SessionState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; @@ -55,7 +54,6 @@ import org.apache.hadoop.hive.ql.metadata.NotNullConstraint; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo; -import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.UniqueConstraint; import org.codehaus.jackson.JsonGenerator; @@ -186,13 +184,9 @@ public void showMaterializedViews(DataOutputStream out, List materialized * Describe table. */ @Override - public void describeTable(DataOutputStream out, String colPath, - String tableName, Table tbl, Partition part, List cols, - boolean isFormatted, boolean isExt, - boolean isOutputPadded, List colStats, - PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo, - UniqueConstraint ukInfo, NotNullConstraint nnInfo, DefaultConstraint dInfo, - CheckConstraint cInfo, StorageHandlerInfo storageHandlerInfo) throws HiveException { + public void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part, + List cols, boolean isFormatted, boolean isExt, boolean isOutputPadded, + List colStats) throws HiveException { MapBuilder builder = MapBuilder.create(); builder.put("columns", makeColsUnformatted(cols)); @@ -203,26 +197,26 @@ public void describeTable(DataOutputStream out, String colPath, else { builder.put("tableInfo", tbl.getTTable()); } - if (pkInfo != null && !pkInfo.getColNames().isEmpty()) { - builder.put("primaryKeyInfo", pkInfo); + if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(tbl.getPrimaryKeyInfo())) { + builder.put("primaryKeyInfo", tbl.getPrimaryKeyInfo()); } - if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) { - builder.put("foreignKeyInfo", fkInfo); + if (ForeignKeyInfo.isForeignKeyInfoNotEmpty(tbl.getForeignKeyInfo())) { + builder.put("foreignKeyInfo", tbl.getForeignKeyInfo()); } - if (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) { - builder.put("uniqueConstraintInfo", ukInfo); + if (UniqueConstraint.isUniqueConstraintNotEmpty(tbl.getUniqueKeyInfo())) { + builder.put("uniqueConstraintInfo", tbl.getUniqueKeyInfo()); } - if (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty()) { - builder.put("notNullConstraintInfo", nnInfo); + if (NotNullConstraint.isNotNullConstraintNotEmpty(tbl.getNotNullConstraint())) { + builder.put("notNullConstraintInfo", tbl.getNotNullConstraint()); } - if (dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) { - builder.put("defaultConstraintInfo", dInfo); + if (DefaultConstraint.isCheckConstraintNotEmpty(tbl.getDefaultConstraint())) { + builder.put("defaultConstraintInfo", tbl.getDefaultConstraint()); } - if (cInfo != null && !cInfo.getCheckConstraints().isEmpty()) { - builder.put("checkConstraintInfo", cInfo); + if (CheckConstraint.isCheckConstraintNotEmpty(tbl.getCheckConstraint())) { + builder.put("checkConstraintInfo", tbl.getCheckConstraint()); } - if(storageHandlerInfo != null) { - builder.put("storageHandlerInfo", storageHandlerInfo.toString()); + if (tbl.getStorageHandlerInfo() != null) { + builder.put("storageHandlerInfo", tbl.getStorageHandlerInfo().toString()); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java index 2bf7af7287..0b994651a1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.metadata.formatting; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringEscapeUtils; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -124,7 +125,7 @@ private static String convertToString(byte[] buf) { static ColumnStatisticsObj getColumnStatisticsObject(String colName, String colType, List colStats) { - if (colStats != null && !colStats.isEmpty()) { + if (CollectionUtils.isNotEmpty(colStats)) { for (ColumnStatisticsObj cso : colStats) { if (cso.getColName().equalsIgnoreCase(colName) && cso.getColType().equalsIgnoreCase(colType)) { @@ -135,34 +136,33 @@ static ColumnStatisticsObj getColumnStatisticsObject(String colName, return null; } - public static String getConstraintsInformation(PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo, - UniqueConstraint ukInfo, NotNullConstraint nnInfo, DefaultConstraint dInfo, CheckConstraint cInfo) { + public static String getConstraintsInformation(Table table) { StringBuilder constraintsInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); constraintsInfo.append(LINE_DELIM).append("# Constraints").append(LINE_DELIM); - if (pkInfo != null && !pkInfo.getColNames().isEmpty()) { + if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(table.getPrimaryKeyInfo())) { constraintsInfo.append(LINE_DELIM).append("# Primary Key").append(LINE_DELIM); - getPrimaryKeyInformation(constraintsInfo, pkInfo); + getPrimaryKeyInformation(constraintsInfo, table.getPrimaryKeyInfo()); } - if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) { + if (ForeignKeyInfo.isForeignKeyInfoNotEmpty(table.getForeignKeyInfo())) { constraintsInfo.append(LINE_DELIM).append("# Foreign Keys").append(LINE_DELIM); - getForeignKeysInformation(constraintsInfo, fkInfo); + getForeignKeysInformation(constraintsInfo, table.getForeignKeyInfo()); } - if (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) { + if (UniqueConstraint.isUniqueConstraintNotEmpty(table.getUniqueKeyInfo())) { constraintsInfo.append(LINE_DELIM).append("# Unique Constraints").append(LINE_DELIM); - getUniqueConstraintsInformation(constraintsInfo, ukInfo); + getUniqueConstraintsInformation(constraintsInfo, table.getUniqueKeyInfo()); } - if (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty()) { + if (NotNullConstraint.isNotNullConstraintNotEmpty(table.getNotNullConstraint())) { constraintsInfo.append(LINE_DELIM).append("# Not Null Constraints").append(LINE_DELIM); - getNotNullConstraintsInformation(constraintsInfo, nnInfo); + getNotNullConstraintsInformation(constraintsInfo, table.getNotNullConstraint()); } - if (dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) { + if (DefaultConstraint.isCheckConstraintNotEmpty(table.getDefaultConstraint())) { constraintsInfo.append(LINE_DELIM).append("# Default Constraints").append(LINE_DELIM); - getDefaultConstraintsInformation(constraintsInfo, dInfo); + getDefaultConstraintsInformation(constraintsInfo, table.getDefaultConstraint()); } - if (cInfo != null && !cInfo.getCheckConstraints().isEmpty()) { + if (CheckConstraint.isCheckConstraintNotEmpty(table.getCheckConstraint())) { constraintsInfo.append(LINE_DELIM).append("# Check Constraints").append(LINE_DELIM); - getCheckConstraintsInformation(constraintsInfo, cInfo); + getCheckConstraintsInformation(constraintsInfo, table.getCheckConstraint()); } return constraintsInfo.toString(); } @@ -752,14 +752,6 @@ private static void printPadding(StringBuilder tableInfo, int[] columnWidths) { } } - public static String[] getColumnsHeader(List colStats) { - boolean showColStats = false; - if (colStats != null) { - showColStats = true; - } - return DescTableDesc.getSchema(showColStats).split("#")[0].split(","); - } - public static MetaDataFormatter getFormatter(HiveConf conf) { if ("json".equals(conf.get(HiveConf.ConfVars.HIVE_DDL_OUTPUT_FORMAT.varname, "text"))) { return new JsonMetaDataFormatter(); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java index b7e5ebe7c0..fcce9ec747 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java @@ -31,17 +31,10 @@ import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan; import org.apache.hadoop.hive.metastore.api.WMResourcePlan; import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse; -import org.apache.hadoop.hive.ql.metadata.CheckConstraint; -import org.apache.hadoop.hive.ql.metadata.DefaultConstraint; -import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.NotNullConstraint; import org.apache.hadoop.hive.ql.metadata.Partition; -import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo; -import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.metadata.UniqueConstraint; /** * Interface to format table and index information. We can format it @@ -53,83 +46,58 @@ * Write an error message. * @param sqlState if {@code null}, will be ignored */ - public void error(OutputStream out, String msg, int errorCode, String sqlState) + void error(OutputStream out, String msg, int errorCode, String sqlState) throws HiveException; /** * @param sqlState if {@code null}, will be skipped in output * @param errorDetail usually string version of some Exception, if {@code null}, will be ignored */ - public void error(OutputStream out, String errorMessage, int errorCode, String sqlState, String errorDetail) + void error(OutputStream out, String errorMessage, int errorCode, String sqlState, String errorDetail) throws HiveException; /** * Show a list of tables. */ - public void showTables(DataOutputStream out, Set tables) + void showTables(DataOutputStream out, Set tables) throws HiveException; /** * Show a list of tables including table types. */ - public void showTablesExtended(DataOutputStream out, List
tables) + void showTablesExtended(DataOutputStream out, List
tables) throws HiveException; /** * Show a list of materialized views. */ - public void showMaterializedViews(DataOutputStream out, List
materializedViews) + void showMaterializedViews(DataOutputStream out, List
materializedViews) throws HiveException; /** * Describe table. - * @param out - * @param colPath - * @param tableName - * @param tbl - * @param part - * @param cols - * @param isFormatted - describe with formatted keyword - * @param isExt - * @param isOutputPadded - if true, add spacing and indentation - * @param colStats - * @param fkInfo foreign keys information - * @param pkInfo primary key information - * @param ukInfo unique constraint information - * @param nnInfo not null constraint information - * @throws HiveException */ - public void describeTable(DataOutputStream out, String colPath, - String tableName, Table tbl, Partition part, List cols, - boolean isFormatted, boolean isExt, - boolean isOutputPadded, List colStats, - PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo, - UniqueConstraint ukInfo, NotNullConstraint nnInfo, DefaultConstraint dInfo, CheckConstraint cInfo, - StorageHandlerInfo storageHandlerInfo) - throws HiveException; + void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part, + List cols, boolean isFormatted, boolean isExtended, boolean isOutputPadded, + List colStats) throws HiveException; /** * Show the table status. */ - public void showTableStatus(DataOutputStream out, - Hive db, - HiveConf conf, - List
tbls, - Map part, + void showTableStatus(DataOutputStream out, Hive db, HiveConf conf, List
tbls, Map part, Partition par) throws HiveException; /** * Show the table partitions. */ - public void showTablePartitions(DataOutputStream out, - List parts) + void showTablePartitions(DataOutputStream out, List parts) throws HiveException; /** - * Show the databases + * Show the databases. */ - public void showDatabases(DataOutputStream out, List databases) + void showDatabases(DataOutputStream out, List databases) throws HiveException; /** diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java index f7704bdd13..674644ae43 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java @@ -30,10 +30,10 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; -import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo; import org.apache.hive.common.util.HiveStringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringEscapeUtils; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -47,6 +47,7 @@ import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan; import org.apache.hadoop.hive.metastore.api.WMResourcePlan; import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse; +import org.apache.hadoop.hive.ql.ddl.table.info.DescTableDesc; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.CheckConstraint; import org.apache.hadoop.hive.ql.metadata.DefaultConstraint; @@ -201,26 +202,21 @@ public void showMaterializedViews(DataOutputStream out, List
materialized } @Override - public void describeTable(DataOutputStream outStream, String colPath, - String tableName, Table tbl, Partition part, List cols, - boolean isFormatted, boolean isExt, - boolean isOutputPadded, List colStats, - PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo, - UniqueConstraint ukInfo, NotNullConstraint nnInfo, DefaultConstraint dInfo, CheckConstraint cInfo, - StorageHandlerInfo storageHandlerInfo) - throws HiveException { + public void describeTable(DataOutputStream outStream, String colPath, String tableName, Table tbl, Partition part, + List cols, boolean isFormatted, boolean isExt, boolean isOutputPadded, + List colStats) throws HiveException { try { List partCols = tbl.isPartitioned() ? tbl.getPartCols() : null; String output = ""; - boolean isColStatsAvailable = colStats != null; + boolean isColStatsAvailable = CollectionUtils.isNotEmpty(colStats); TextMetaDataTable mdt = new TextMetaDataTable(); if (isFormatted && !isColStatsAvailable) { output = "# "; } if (isFormatted) { - mdt.addRow(MetaDataFormatUtils.getColumnsHeader(colStats)); + mdt.addRow(DescTableDesc.getSchema(isColStatsAvailable).split("#")[0].split(",")); } for (FieldSchema col : cols) { mdt.addRow(MetaDataFormatUtils.extractColumnValues(col, isColStatsAvailable, @@ -231,11 +227,11 @@ public void describeTable(DataOutputStream outStream, String colPath, } output += mdt.renderTable(isOutputPadded); - if (colPath.equals(tableName)) { + if (colPath == null) { if ((partCols != null) && !partCols.isEmpty() && showPartColsSeparately) { mdt = new TextMetaDataTable(); output += MetaDataFormatUtils.LINE_DELIM + "# Partition Information" + MetaDataFormatUtils.LINE_DELIM + "# "; - mdt.addRow(MetaDataFormatUtils.getColumnsHeader(null)); + mdt.addRow(DescTableDesc.getSchema(false).split("#")[0].split(",")); for (FieldSchema col : partCols) { mdt.addRow(MetaDataFormatUtils.extractColumnValues(col)); } @@ -253,7 +249,7 @@ public void describeTable(DataOutputStream outStream, String colPath, } outStream.write(output.getBytes("UTF-8")); - if (tableName.equals(colPath)) { + if (colPath == null) { if (isFormatted) { if (part != null) { output = MetaDataFormatUtils.getPartitionInformation(part); @@ -262,13 +258,13 @@ public void describeTable(DataOutputStream outStream, String colPath, } outStream.write(output.getBytes("UTF-8")); - if ((pkInfo != null && !pkInfo.getColNames().isEmpty()) || - (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) || - (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) || - (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty()) || - cInfo != null && !cInfo.getCheckConstraints().isEmpty() || - dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) { - output = MetaDataFormatUtils.getConstraintsInformation(pkInfo, fkInfo, ukInfo, nnInfo, dInfo, cInfo); + if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(tbl.getPrimaryKeyInfo()) || + ForeignKeyInfo.isForeignKeyInfoNotEmpty(tbl.getForeignKeyInfo()) || + UniqueConstraint.isUniqueConstraintNotEmpty(tbl.getUniqueKeyInfo()) || + NotNullConstraint.isNotNullConstraintNotEmpty(tbl.getNotNullConstraint()) || + tbl.getCheckConstraint() != null && !tbl.getCheckConstraint().getCheckConstraints().isEmpty() || + tbl.getDefaultConstraint() != null && !tbl.getDefaultConstraint().getDefaultConstraints().isEmpty()) { + output = MetaDataFormatUtils.getConstraintsInformation(tbl); outStream.write(output.getBytes("UTF-8")); } } @@ -294,44 +290,44 @@ public void describeTable(DataOutputStream outStream, String colPath, outStream.write(separator); outStream.write(terminator); } - if ((pkInfo != null && !pkInfo.getColNames().isEmpty()) || - (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) || - (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) || - (dInfo!= null && !dInfo.getDefaultConstraints().isEmpty()) || - (cInfo != null && !cInfo.getCheckConstraints().isEmpty()) || - (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty())) { + if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(tbl.getPrimaryKeyInfo()) || + ForeignKeyInfo.isForeignKeyInfoNotEmpty(tbl.getForeignKeyInfo()) || + UniqueConstraint.isUniqueConstraintNotEmpty(tbl.getUniqueKeyInfo()) || + NotNullConstraint.isNotNullConstraintNotEmpty(tbl.getNotNullConstraint()) || + DefaultConstraint.isCheckConstraintNotEmpty(tbl.getDefaultConstraint()) || + CheckConstraint.isCheckConstraintNotEmpty(tbl.getCheckConstraint())) { outStream.write(("Constraints").getBytes("UTF-8")); outStream.write(separator); - if (pkInfo != null && !pkInfo.getColNames().isEmpty()) { - outStream.write(pkInfo.toString().getBytes("UTF-8")); + if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(tbl.getPrimaryKeyInfo())) { + outStream.write(tbl.getPrimaryKeyInfo().toString().getBytes("UTF-8")); outStream.write(terminator); } - if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) { - outStream.write(fkInfo.toString().getBytes("UTF-8")); + if (ForeignKeyInfo.isForeignKeyInfoNotEmpty(tbl.getForeignKeyInfo())) { + outStream.write(tbl.getForeignKeyInfo().toString().getBytes("UTF-8")); outStream.write(terminator); } - if (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) { - outStream.write(ukInfo.toString().getBytes("UTF-8")); + if (UniqueConstraint.isUniqueConstraintNotEmpty(tbl.getUniqueKeyInfo())) { + outStream.write(tbl.getUniqueKeyInfo().toString().getBytes("UTF-8")); outStream.write(terminator); } - if (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty()) { - outStream.write(nnInfo.toString().getBytes("UTF-8")); + if (NotNullConstraint.isNotNullConstraintNotEmpty(tbl.getNotNullConstraint())) { + outStream.write(tbl.getNotNullConstraint().toString().getBytes("UTF-8")); outStream.write(terminator); } - if (dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) { - outStream.write(dInfo.toString().getBytes("UTF-8")); + if (DefaultConstraint.isCheckConstraintNotEmpty(tbl.getDefaultConstraint())) { + outStream.write(tbl.getDefaultConstraint().toString().getBytes("UTF-8")); outStream.write(terminator); } - if (cInfo != null && !cInfo.getCheckConstraints().isEmpty()) { - outStream.write(cInfo.toString().getBytes("UTF-8")); + if (CheckConstraint.isCheckConstraintNotEmpty(tbl.getCheckConstraint())) { + outStream.write(tbl.getCheckConstraint().toString().getBytes("UTF-8")); outStream.write(terminator); } } - if (storageHandlerInfo!= null) { + if (tbl.getStorageHandlerInfo() != null) { outStream.write(("StorageHandlerInfo").getBytes("UTF-8")); outStream.write(terminator); - outStream.write(storageHandlerInfo.formatAsText().getBytes("UTF-8")); + outStream.write(tbl.getStorageHandlerInfo().formatAsText().getBytes("UTF-8")); outStream.write(terminator); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index ff7f9a8583..9b7b6aad74 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -2315,7 +2315,7 @@ static public String getColPath( // if this ast has only one child, then no column name specified. if (node.getChildCount() == 1) { - return tableName; + return null; } ASTNode columnNode = null; @@ -2336,7 +2336,7 @@ static public String getColPath( QualifiedNameUtil.getFullyQualifiedName(columnNode); } } else { - return tableName; + return null; } } @@ -2495,7 +2495,7 @@ private void analyzeDescribeTable(ASTNode ast) throws SemanticException { // will contain tablename.column_name. If column_name is not specified // colPath will be equal to tableName. This is how we can differentiate // if we are describing a table or column - if (!colPath.equalsIgnoreCase(tableName) && isFormatted) { + if (colPath != null && isFormatted) { showColStats = true; } } diff --git ql/src/test/results/clientpositive/alterColumnStats.q.out ql/src/test/results/clientpositive/alterColumnStats.q.out index eb8934be76..ca8d7b40b7 100644 --- ql/src/test/results/clientpositive/alterColumnStats.q.out +++ ql/src/test/results/clientpositive/alterColumnStats.q.out @@ -144,18 +144,8 @@ PREHOOK: Input: default@p_n0 POSTHOOK: query: desc formatted p_n0 c1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n0 -col_name c1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +c1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}} PREHOOK: query: desc formatted p_n0 c2 PREHOOK: type: DESCTABLE @@ -163,16 +153,6 @@ PREHOOK: Input: default@p_n0 POSTHOOK: query: desc formatted p_n0 c2 POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n0 -col_name c2 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +c2 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}} diff --git ql/src/test/results/clientpositive/alter_table_column_stats.q.out ql/src/test/results/clientpositive/alter_table_column_stats.q.out index f8c91d3e14..533bcfe17e 100644 --- ql/src/test/results/clientpositive/alter_table_column_stats.q.out +++ ql/src/test/results/clientpositive/alter_table_column_stats.q.out @@ -368,18 +368,8 @@ PREHOOK: Input: statsdb1@testtable1 POSTHOOK: query: describe formatted statsdb1.testtable1 col4 POSTHOOK: type: DESCTABLE POSTHOOK: Input: statsdb1@testtable1 -col_name col4 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +col4 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col1\":\"true\",\"col2\":\"true\"}} PREHOOK: query: alter table testtable1 change col1 col1 string PREHOOK: type: ALTERTABLE_RENAMECOL @@ -432,18 +422,8 @@ PREHOOK: Input: statsdb1@testtable1 POSTHOOK: query: describe formatted statsdb1.testtable1 col1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: statsdb1@testtable1 -col_name col1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +col1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}} PREHOOK: query: describe formatted statsdb1.testtable1 col2 PREHOOK: type: DESCTABLE @@ -470,18 +450,8 @@ PREHOOK: Input: statsdb1@testtable1 POSTHOOK: query: describe formatted statsdb1.testtable1 col4 POSTHOOK: type: DESCTABLE POSTHOOK: Input: statsdb1@testtable1 -col_name col4 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +col4 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}} PREHOOK: query: alter table statsdb1.testtable1 rename to statsdb2.testtable2 PREHOOK: type: ALTERTABLE_RENAME @@ -535,18 +505,8 @@ PREHOOK: Input: statsdb2@testtable2 POSTHOOK: query: describe formatted statsdb2.testtable2 col1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: statsdb2@testtable2 -col_name col1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +col1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}} PREHOOK: query: describe formatted statsdb2.testtable2 col2 PREHOOK: type: DESCTABLE @@ -573,18 +533,8 @@ PREHOOK: Input: statsdb2@testtable2 POSTHOOK: query: describe formatted statsdb2.testtable2 col4 POSTHOOK: type: DESCTABLE POSTHOOK: Input: statsdb2@testtable2 -col_name col4 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +col4 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}} PREHOOK: query: analyze table testpart0 compute statistics for columns PREHOOK: type: ANALYZE_TABLE @@ -2249,18 +2199,8 @@ PREHOOK: Input: statsdb1@testtable1 POSTHOOK: query: describe formatted statsdb1.testtable1 col4 POSTHOOK: type: DESCTABLE POSTHOOK: Input: statsdb1@testtable1 -col_name col4 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +col4 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col1\":\"true\",\"col2\":\"true\"}} PREHOOK: query: alter table testtable1 change col1 col1 string PREHOOK: type: ALTERTABLE_RENAMECOL @@ -2313,18 +2253,8 @@ PREHOOK: Input: statsdb1@testtable1 POSTHOOK: query: describe formatted statsdb1.testtable1 col1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: statsdb1@testtable1 -col_name col1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +col1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}} PREHOOK: query: describe formatted statsdb1.testtable1 col2 PREHOOK: type: DESCTABLE @@ -2351,18 +2281,8 @@ PREHOOK: Input: statsdb1@testtable1 POSTHOOK: query: describe formatted statsdb1.testtable1 col4 POSTHOOK: type: DESCTABLE POSTHOOK: Input: statsdb1@testtable1 -col_name col4 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +col4 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}} PREHOOK: query: alter table statsdb1.testtable1 rename to statsdb2.testtable2 PREHOOK: type: ALTERTABLE_RENAME @@ -2416,18 +2336,8 @@ PREHOOK: Input: statsdb2@testtable2 POSTHOOK: query: describe formatted statsdb2.testtable2 col1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: statsdb2@testtable2 -col_name col1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +col1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}} PREHOOK: query: describe formatted statsdb2.testtable2 col2 PREHOOK: type: DESCTABLE @@ -2454,18 +2364,8 @@ PREHOOK: Input: statsdb2@testtable2 POSTHOOK: query: describe formatted statsdb2.testtable2 col4 POSTHOOK: type: DESCTABLE POSTHOOK: Input: statsdb2@testtable2 -col_name col4 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +col4 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}} PREHOOK: query: analyze table testpart0 compute statistics for columns PREHOOK: type: ANALYZE_TABLE diff --git ql/src/test/results/clientpositive/autoColumnStats_10.q.out ql/src/test/results/clientpositive/autoColumnStats_10.q.out index 8ed8f30c26..41282bff3e 100644 --- ql/src/test/results/clientpositive/autoColumnStats_10.q.out +++ ql/src/test/results/clientpositive/autoColumnStats_10.q.out @@ -163,18 +163,8 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 c1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name c1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +c1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}} PREHOOK: query: insert into p_n1 values (2,11,111) PREHOOK: type: QUERY @@ -249,18 +239,8 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 c1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name c1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +c1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}} PREHOOK: query: drop table p_n1 PREHOOK: type: DROPTABLE @@ -412,18 +392,8 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 insert_num POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name insert_num -data_type int -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +insert_num int from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} PREHOOK: query: desc formatted p_n1 c1 PREHOOK: type: DESCTABLE @@ -431,18 +401,8 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 c1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name c1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +c1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} PREHOOK: query: insert into p_n1 values (2,11,111) PREHOOK: type: QUERY @@ -498,18 +458,8 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 insert_num POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name insert_num -data_type int -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +insert_num int from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} PREHOOK: query: desc formatted p_n1 c1 PREHOOK: type: DESCTABLE @@ -517,16 +467,6 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 c1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name c1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +c1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} diff --git ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out index a9206f7070..35b40e9c55 100644 --- ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out +++ ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out @@ -48,18 +48,8 @@ PREHOOK: Input: default@space POSTHOOK: query: desc formatted space ` left` POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@space -col_name left -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment + left string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\" left\":\"true\",\" middle \":\"true\",\"right \":\"true\"}} PREHOOK: query: insert into space values ("1", "2", "3") PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/compustat_avro.q.out ql/src/test/results/clientpositive/compustat_avro.q.out index 45fd76495b..cd7cdaba0b 100644 --- ql/src/test/results/clientpositive/compustat_avro.q.out +++ ql/src/test/results/clientpositive/compustat_avro.q.out @@ -30,18 +30,8 @@ PREHOOK: Input: default@testavro POSTHOOK: query: describe formatted testAvro col1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@testavro -col_name col1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +col1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col1\":\"true\",\"col2\":\"true\",\"col3\":\"true\",\"col4\":\"true\",\"col5\":\"true\",\"col6\":\"true\"}} PREHOOK: query: analyze table testAvro compute statistics for columns col1,col3 PREHOOK: type: ANALYZE_TABLE diff --git ql/src/test/results/clientpositive/describe_syntax.q.out ql/src/test/results/clientpositive/describe_syntax.q.out index 79c44dec62..158d8bda90 100644 --- ql/src/test/results/clientpositive/describe_syntax.q.out +++ ql/src/test/results/clientpositive/describe_syntax.q.out @@ -205,18 +205,8 @@ PREHOOK: Input: db1@t1 POSTHOOK: query: DESCRIBE FORMATTED t1 key1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: db1@t1 -col_name key1 -data_type int -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +key1 int from deserializer COLUMN_STATS_ACCURATE {} PREHOOK: query: DESCRIBE db1.t1 key1 PREHOOK: type: DESCTABLE @@ -238,18 +228,8 @@ PREHOOK: Input: db1@t1 POSTHOOK: query: DESCRIBE FORMATTED db1.t1 key1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: db1@t1 -col_name key1 -data_type int -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +key1 int from deserializer COLUMN_STATS_ACCURATE {} PREHOOK: query: DESCRIBE t1 key1 PREHOOK: type: DESCTABLE @@ -271,18 +251,8 @@ PREHOOK: Input: db1@t1 POSTHOOK: query: DESCRIBE FORMATTED t1 key1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: db1@t1 -col_name key1 -data_type int -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +key1 int from deserializer COLUMN_STATS_ACCURATE {} PREHOOK: query: DESCRIBE t1 PARTITION(ds='4', part='5') PREHOOK: type: DESCTABLE diff --git ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out index 386b7d8ea6..d5ab761b83 100644 --- ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out +++ ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out @@ -51,18 +51,8 @@ PREHOOK: Input: default@uservisits_web_text_none_n0 POSTHOOK: query: desc formatted UserVisits_web_text_none_n0 sourceIP POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@uservisits_web_text_none_n0 -col_name sourceIP -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +sourceIP string from deserializer PREHOOK: query: explain analyze table UserVisits_web_text_none_n0 compute statistics for columns sourceIP, avgTimeOnSite, adRevenue PREHOOK: type: ANALYZE_TABLE @@ -350,18 +340,8 @@ PREHOOK: Input: default@empty_tab_n0 POSTHOOK: query: desc formatted empty_tab_n0 a POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@empty_tab_n0 -col_name a -data_type int -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +a int from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\",\"e\":\"true\"}} PREHOOK: query: explain analyze table empty_tab_n0 compute statistics for columns a,b,c,d,e @@ -546,36 +526,16 @@ PREHOOK: Input: test@uservisits_web_text_none_n0 POSTHOOK: query: desc formatted UserVisits_web_text_none_n0 sourceIP POSTHOOK: type: DESCTABLE POSTHOOK: Input: test@uservisits_web_text_none_n0 -col_name sourceIP -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +sourceIP string from deserializer PREHOOK: query: desc formatted test.UserVisits_web_text_none_n0 sourceIP PREHOOK: type: DESCTABLE PREHOOK: Input: test@uservisits_web_text_none_n0 POSTHOOK: query: desc formatted test.UserVisits_web_text_none_n0 sourceIP POSTHOOK: type: DESCTABLE POSTHOOK: Input: test@uservisits_web_text_none_n0 -col_name sourceIP -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +sourceIP string from deserializer PREHOOK: query: desc formatted default.UserVisits_web_text_none_n0 sourceIP PREHOOK: type: DESCTABLE PREHOOK: Input: default@uservisits_web_text_none_n0 diff --git ql/src/test/results/clientpositive/inputddl6.q.out ql/src/test/results/clientpositive/inputddl6.q.out index 47cc61f10e..155c4b3264 100644 --- ql/src/test/results/clientpositive/inputddl6.q.out +++ ql/src/test/results/clientpositive/inputddl6.q.out @@ -95,6 +95,7 @@ STAGE PLANS: partition: ds 2008-04-09 table: INPUTDDL6 + extended: true Stage: Stage-1 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/autoColumnStats_10.q.out ql/src/test/results/clientpositive/llap/autoColumnStats_10.q.out index 8ed8f30c26..41282bff3e 100644 --- ql/src/test/results/clientpositive/llap/autoColumnStats_10.q.out +++ ql/src/test/results/clientpositive/llap/autoColumnStats_10.q.out @@ -163,18 +163,8 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 c1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name c1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +c1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}} PREHOOK: query: insert into p_n1 values (2,11,111) PREHOOK: type: QUERY @@ -249,18 +239,8 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 c1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name c1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +c1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}} PREHOOK: query: drop table p_n1 PREHOOK: type: DROPTABLE @@ -412,18 +392,8 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 insert_num POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name insert_num -data_type int -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +insert_num int from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} PREHOOK: query: desc formatted p_n1 c1 PREHOOK: type: DESCTABLE @@ -431,18 +401,8 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 c1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name c1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +c1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} PREHOOK: query: insert into p_n1 values (2,11,111) PREHOOK: type: QUERY @@ -498,18 +458,8 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 insert_num POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name insert_num -data_type int -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +insert_num int from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} PREHOOK: query: desc formatted p_n1 c1 PREHOOK: type: DESCTABLE @@ -517,16 +467,6 @@ PREHOOK: Input: default@p_n1 POSTHOOK: query: desc formatted p_n1 c1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@p_n1 -col_name c1 -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +c1 string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} diff --git ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out index a9206f7070..35b40e9c55 100644 --- ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out +++ ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out @@ -48,18 +48,8 @@ PREHOOK: Input: default@space POSTHOOK: query: desc formatted space ` left` POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@space -col_name left -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment + left string from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\" left\":\"true\",\" middle \":\"true\",\"right \":\"true\"}} PREHOOK: query: insert into space values ("1", "2", "3") PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out index f255177f3b..2ec8ee6707 100644 --- ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out +++ ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out @@ -140,18 +140,8 @@ PREHOOK: Input: default@uservisits_web_text_none POSTHOOK: query: desc formatted UserVisits_web_text_none sourceIP POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@uservisits_web_text_none -col_name sourceIP -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +sourceIP string from deserializer PREHOOK: query: explain analyze table UserVisits_web_text_none compute statistics for columns sourceIP, avgTimeOnSite, adRevenue PREHOOK: type: ANALYZE_TABLE @@ -482,18 +472,8 @@ PREHOOK: Input: default@empty_tab POSTHOOK: query: desc formatted empty_tab a POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@empty_tab -col_name a -data_type int -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +a int from deserializer COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\",\"e\":\"true\"}} PREHOOK: query: explain analyze table empty_tab compute statistics for columns a,b,c,d,e @@ -678,36 +658,16 @@ PREHOOK: Input: test@uservisits_web_text_none POSTHOOK: query: desc formatted UserVisits_web_text_none sourceIP POSTHOOK: type: DESCTABLE POSTHOOK: Input: test@uservisits_web_text_none -col_name sourceIP -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +sourceIP string from deserializer PREHOOK: query: desc formatted test.UserVisits_web_text_none sourceIP PREHOOK: type: DESCTABLE PREHOOK: Input: test@uservisits_web_text_none POSTHOOK: query: desc formatted test.UserVisits_web_text_none sourceIP POSTHOOK: type: DESCTABLE POSTHOOK: Input: test@uservisits_web_text_none -col_name sourceIP -data_type string -min -max -num_nulls -distinct_count -avg_col_len -max_col_len -num_trues -num_falses -bitVector -comment from deserializer +# col_name data_type comment +sourceIP string from deserializer PREHOOK: query: desc formatted default.UserVisits_web_text_none sourceIP PREHOOK: type: DESCTABLE PREHOOK: Input: default@uservisits_web_text_none