diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java index d2e22c8388..041c25b685 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java @@ -35,9 +35,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; /** * Table Scan Descriptor Currently, data is only read from a base source as part @@ -153,7 +155,7 @@ public TableScanDesc(final String alias, List vcs, Table tblMetad @Override public Object clone() { - List vcs = new ArrayList(getVirtualCols()); + List vcs = new ArrayList<>(getVirtualCols()); return new TableScanDesc(getAlias(), vcs, this.tableMetadata); } @@ -194,7 +196,7 @@ public boolean isTemporary() { public String getTbl() { StringBuilder sb = new StringBuilder(); sb.append(this.tableMetadata.getCompleteName()); - sb.append("," + alias); + sb.append(',').append(alias); if (AcidUtils.isFullAcidTable(tableMetadata)) { sb.append(", ACID table"); } else if (isTranscationalTable()) { @@ -437,14 +439,21 @@ public String getIncludedBucketExplain() { } StringBuilder sb = new StringBuilder(); - sb.append("["); - for (int i = 0; i < this.includedBuckets.size(); i++) { - if (this.includedBuckets.get(i)) { - sb.append(i); - sb.append(','); + sb.append('['); + int nextSetBitIdx = includedBuckets.nextSetBit(0); + if (nextSetBitIdx != -1) { + sb.append(nextSetBitIdx); + for (;;) { + if ((nextSetBitIdx = includedBuckets.nextSetBit(nextSetBitIdx)) < 0) { + break; + } + int endOfRun = includedBuckets.nextClearBit(nextSetBitIdx); + do { + sb.append(", ").append(nextSetBitIdx); + } while (++nextSetBitIdx != endOfRun); } } - sb.append(String.format("] of %d", numBuckets)); + sb.append(']').append(" of ").append(numBuckets); return sb.toString(); } @@ -457,18 +466,17 @@ public void setNumBuckets(int numBuckets) { } public boolean isNeedSkipHeaderFooters() { - boolean rtn = false; + int hcount = 0; + int fcount = 0; if (tableMetadata != null && tableMetadata.getTTable() != null) { - Map params = tableMetadata.getTTable().getParameters(); - if (params != null) { - String skipHVal = params.get(serdeConstants.HEADER_COUNT); - int hcount = skipHVal == null? 0 : Integer.parseInt(skipHVal); - String skipFVal = params.get(serdeConstants.FOOTER_COUNT); - int fcount = skipFVal == null? 0 : Integer.parseInt(skipFVal); - rtn = (hcount != 0 || fcount !=0 ); - } + final Map params = + Optional.ofNullable(tableMetadata.getTTable().getParameters()).orElse(Collections.emptyMap()); + final String skipHVal = params.getOrDefault(serdeConstants.HEADER_COUNT, "0"); + final String skipFVal = params.getOrDefault(serdeConstants.FOOTER_COUNT, "0"); + hcount = Integer.parseInt(skipHVal); + fcount = Integer.parseInt(skipFVal); } - return rtn; + return (hcount != 0 || fcount != 0); } @Override public Map getOpProps() { @@ -485,14 +493,12 @@ public boolean isNeedSkipHeaderFooters() { public class TableScanOperatorExplainVectorization extends OperatorExplainVectorization { - private final TableScanDesc tableScanDesc; private final VectorTableScanDesc vectorTableScanDesc; public TableScanOperatorExplainVectorization(TableScanDesc tableScanDesc, VectorTableScanDesc vectorTableScanDesc) { // Native vectorization supported. super(vectorTableScanDesc, true); - this.tableScanDesc = tableScanDesc; this.vectorTableScanDesc = vectorTableScanDesc; }