diff --git hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyPredicateDecomposer.java hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyPredicateDecomposer.java new file mode 100644 index 0000000..0cc21fa --- /dev/null +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyPredicateDecomposer.java @@ -0,0 +1,80 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.hbase; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer; +import org.apache.hadoop.hive.ql.index.IndexSearchCondition; +import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler.DecomposedPredicate; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; + +/** + * Simple abstract class to help with creation of a {@link DecomposedPredicate}. In order to create + * one, consumers should extend this class and override the "getScanRange" method to define the + * start/stop keys and/or filters on their hbase scans + * */ +public abstract class AbstractHBaseKeyPredicateDecomposer { + + public static final Log LOG = LogFactory.getLog(AbstractHBaseKeyPredicateDecomposer.class); + + public DecomposedPredicate decomposePredicate(String keyColName, ExprNodeDesc predicate) { + IndexPredicateAnalyzer analyzer = IndexPredicateAnalyzer.createAnalyzer(true); + analyzer.allowColumnName(keyColName); + analyzer.setAcceptsFields(true); + analyzer.setFieldValidator(getFieldValidator()); + + DecomposedPredicate decomposed = new DecomposedPredicate(); + + List conditions = new ArrayList(); + decomposed.residualPredicate = + (ExprNodeGenericFuncDesc) analyzer.analyzePredicate(predicate, conditions); + if (!conditions.isEmpty()) { + decomposed.pushedPredicate = analyzer.translateSearchConditions(conditions); + try { + decomposed.pushedPredicateObject = getScanRange(conditions); + } catch (Exception e) { + LOG.warn("Failed to decompose predicates", e); + return null; + } + } + + return decomposed; + } + + /** + * Get the scan range that specifies the start/stop keys and/or filters to be applied onto the + * hbase scan + * */ + protected abstract HBaseScanRange getScanRange(List searchConditions) + throws Exception; + + /** + * Get an optional {@link IndexPredicateAnalyzer.FieldValidator validator}. A validator can be + * used to optinally filter out the predicates which need not be decomposed. By default this + * method returns {@code null} which means that all predicates are pushed but consumers can choose + * to override this to provide a custom validator as well. + * */ + protected IndexPredicateAnalyzer.FieldValidator getFieldValidator() { + return null; + } +} \ No newline at end of file diff --git hbase-handler/src/java/org/apache/hadoop/hive/hbase/CompositeHBaseKeyFactory.java hbase-handler/src/java/org/apache/hadoop/hive/hbase/CompositeHBaseKeyFactory.java index 53fa995..9e608d2 100644 --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/CompositeHBaseKeyFactory.java +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/CompositeHBaseKeyFactory.java @@ -18,37 +18,21 @@ package org.apache.hadoop.hive.hbase; +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.util.Properties; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.CompareFilter; -import org.apache.hadoop.hbase.filter.FamilyFilter; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; -import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer; -import org.apache.hadoop.hive.ql.index.IndexSearchCondition; -import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.mapred.JobConf; -import java.io.IOException; -import java.io.Serializable; -import java.lang.reflect.Constructor; -import java.util.ArrayList; -import java.util.List; -import java.util.Properties; - -public class CompositeHBaseKeyFactory - extends DefaultHBaseKeyFactory implements Configurable { +public class CompositeHBaseKeyFactory extends DefaultHBaseKeyFactory { public static final Log LOG = LogFactory.getLog(CompositeHBaseKeyFactory.class); @@ -60,6 +44,7 @@ public CompositeHBaseKeyFactory(Class keyClass) throws Exception { // see javadoc of HBaseCompositeKey this.keyClass = keyClass; + this.conf = hbaseParams.getBaseConfiguration(); this.constructor = keyClass.getDeclaredConstructor( LazySimpleStructObjectInspector.class, Properties.class, Configuration.class); } @@ -71,16 +56,6 @@ public void configureJobConf(TableDesc tableDesc, JobConf jobConf) throws IOExce } @Override - public void setConf(Configuration conf) { - this.conf = conf; - } - - @Override - public Configuration getConf() { - return conf; - } - - @Override public T createKey(ObjectInspector inspector) throws SerDeException { try { return (T) constructor.newInstance(inspector, properties, conf); @@ -88,77 +63,4 @@ public T createKey(ObjectInspector inspector) throws SerDeException { throw new SerDeException(e); } } - - @Override - public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, - ExprNodeDesc predicate) { - String keyColName = hbaseParams.getKeyColumnMapping().columnName; - - IndexPredicateAnalyzer analyzer = IndexPredicateAnalyzer.createAnalyzer(true); - analyzer.allowColumnName(keyColName); - analyzer.setAcceptsFields(true); - analyzer.setFieldValidator(new Validator()); - - DecomposedPredicate decomposed = new DecomposedPredicate(); - - List conditions = new ArrayList(); - decomposed.residualPredicate = - (ExprNodeGenericFuncDesc)analyzer.analyzePredicate(predicate, conditions); - if (!conditions.isEmpty()) { - decomposed.pushedPredicate = analyzer.translateSearchConditions(conditions); - try { - decomposed.pushedPredicateObject = setupFilter(keyColName, conditions); - } catch (Exception e) { - LOG.warn("Failed to decompose predicates", e); - return null; - } - } - return decomposed; - } - - protected Serializable setupFilter(String keyColName, List conditions) - throws Exception { - HBaseScanRange scanRange = new HBaseScanRange(); - for (IndexSearchCondition condition : conditions) { - if (condition.getFields() == null) { - continue; - } - String field = condition.getFields()[0]; - Object value = condition.getConstantDesc().getValue(); - scanRange.addFilter(new FamilyFilter( - CompareFilter.CompareOp.EQUAL, new BinaryComparator(field.getBytes()))); - } - return scanRange; - } - - private static class Validator implements IndexPredicateAnalyzer.FieldValidator { - - /** - * Validates the field in the {@link ExprNodeFieldDesc}. Basically this validates that the given field is the first field in the given struct. - * This is important specially in case of structs as order of fields in the structs is important when using for any filter down the line - **/ - public boolean validate(ExprNodeFieldDesc fieldDesc) { - String fieldName = fieldDesc.getFieldName(); - - ExprNodeDesc nodeDesc = fieldDesc.getDesc(); - - TypeInfo typeInfo = nodeDesc.getTypeInfo(); - - if (!(typeInfo instanceof StructTypeInfo)) { - // since we are working off a ExprNodeFieldDesc which represents a field within a struct, this - // should never happen - throw new AssertionError("Expected StructTypeInfo. Found:" + typeInfo.getTypeName()); - } - - List allFieldNames = ((StructTypeInfo) typeInfo).getAllStructFieldNames(); - - if (allFieldNames == null || allFieldNames.size() == 0) { - return false; - } - - String firstElement = allFieldNames.get(0); - - return firstElement.equals(fieldName); - } - } } \ No newline at end of file diff --git hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java index 07db3af..8ea4408 100644 --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java @@ -18,6 +18,9 @@ package org.apache.hadoop.hive.hbase; +import java.util.List; +import java.util.Properties; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping; import org.apache.hadoop.hive.serde.serdeConstants; @@ -27,9 +30,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.util.ReflectionUtils; -import java.util.List; -import java.util.Properties; - /** * HBaseSerDeParameters encapsulates SerDeParameters and additional configurations that are specific for * HBaseSerDe. @@ -37,9 +37,11 @@ */ public class HBaseSerDeParameters { - private final String serdeName; private final SerDeParameters serdeParams; + private final Configuration job; + private final Properties tbl; + private final String columnMappingString; private final ColumnMappings columnMappings; private final boolean doColumnRegexMatching; @@ -48,7 +50,8 @@ private final HBaseKeyFactory keyFactory; HBaseSerDeParameters(Configuration job, Properties tbl, String serdeName) throws SerDeException { - this.serdeName = serdeName; + this.job = job; + this.tbl = tbl; this.serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName); this.putTimestamp = Long.valueOf(tbl.getProperty(HBaseSerDe.HBASE_PUT_TIMESTAMP, "-1")); @@ -130,6 +133,10 @@ public HBaseKeyFactory getKeyFactory() { return keyFactory; } + public Configuration getBaseConfiguration() { + return job; + } + public TypeInfo getTypeForName(String columnName) { List columnNames = serdeParams.getColumnNames(); List columnTypes = serdeParams.getColumnTypes(); diff --git hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory3.java hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory3.java new file mode 100644 index 0000000..dfcbaf5 --- /dev/null +++ hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory3.java @@ -0,0 +1,112 @@ +package org.apache.hadoop.hive.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.filter.BinaryComparator; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.RowFilter; +import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping; +import org.apache.hadoop.hive.ql.index.IndexSearchCondition; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.serde2.Deserializer; +import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; +import org.apache.hadoop.mapred.JobConf; + +/** + * Simple extension of {@link TestHBaseKeyFactory2} with exception of using filters instead of start + * and stop keys + * */ +public class TestHBaseKeyFactory3 extends TestHBaseKeyFactory2 { + + @Override + public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, + ExprNodeDesc predicate) { + TestHBasePredicateDecomposer decomposedPredicate = new TestHBasePredicateDecomposer(keyMapping); + return decomposedPredicate.decomposePredicate(keyMapping.columnName, predicate); + } +} + +class TestHBasePredicateDecomposer extends AbstractHBaseKeyPredicateDecomposer { + + private static final int FIXED_LENGTH = 10; + + private ColumnMapping keyMapping; + + TestHBasePredicateDecomposer(ColumnMapping keyMapping) { + this.keyMapping = keyMapping; + } + + @Override + public HBaseScanRange getScanRange(List searchConditions) + throws Exception { + Map> fieldConds = + new HashMap>(); + for (IndexSearchCondition condition : searchConditions) { + String fieldName = condition.getFields()[0]; + List fieldCond = fieldConds.get(fieldName); + if (fieldCond == null) { + fieldConds.put(fieldName, fieldCond = new ArrayList()); + } + fieldCond.add(condition); + } + Filter filter = null; + HBaseScanRange range = new HBaseScanRange(); + + StructTypeInfo type = (StructTypeInfo) keyMapping.columnType; + for (String name : type.getAllStructFieldNames()) { + List fieldCond = fieldConds.get(name); + if (fieldCond == null || fieldCond.size() > 2) { + continue; + } + for (IndexSearchCondition condition : fieldCond) { + if (condition.getConstantDesc().getValue() == null) { + continue; + } + String comparisonOp = condition.getComparisonOp(); + String constantVal = String.valueOf(condition.getConstantDesc().getValue()); + + byte[] valueAsBytes = toBinary(constantVal, FIXED_LENGTH, false, false); + + if (comparisonOp.endsWith("UDFOPEqual")) { + filter = new RowFilter(CompareOp.EQUAL, new BinaryComparator(valueAsBytes)); + } else if (comparisonOp.endsWith("UDFOPEqualOrGreaterThan")) { + filter = new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryComparator(valueAsBytes)); + } else if (comparisonOp.endsWith("UDFOPGreaterThan")) { + filter = new RowFilter(CompareOp.GREATER, new BinaryComparator(valueAsBytes)); + } else if (comparisonOp.endsWith("UDFOPEqualOrLessThan")) { + filter = new RowFilter(CompareOp.LESS_OR_EQUAL, new BinaryComparator(valueAsBytes)); + } else if (comparisonOp.endsWith("UDFOPLessThan")) { + filter = new RowFilter(CompareOp.LESS, new BinaryComparator(valueAsBytes)); + } else { + throw new IOException(comparisonOp + " is not a supported comparison operator"); + } + } + } + if (filter != null) { + range.addFilter(filter); + } + return range; + } + + private byte[] toBinary(String value, int max, boolean end, boolean nextBA) { + return toBinary(value.getBytes(), max, end, nextBA); + } + + private byte[] toBinary(byte[] value, int max, boolean end, boolean nextBA) { + byte[] bytes = new byte[max + 1]; + System.arraycopy(value, 0, bytes, 0, Math.min(value.length, max)); + if (end) { + Arrays.fill(bytes, value.length, max, (byte) 0xff); + } + if (nextBA) { + bytes[max] = 0x01; + } + return bytes; + } +} \ No newline at end of file diff --git hbase-handler/src/test/queries/positive/hbase_custom_key3.q hbase-handler/src/test/queries/positive/hbase_custom_key3.q new file mode 100644 index 0000000..3d4a687 --- /dev/null +++ hbase-handler/src/test/queries/positive/hbase_custom_key3.q @@ -0,0 +1,38 @@ +CREATE TABLE hbase_ck_5(key struct, value string) +STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' +WITH SERDEPROPERTIES ( + "hbase.table.name" = "hbase_custom3", + "hbase.columns.mapping" = ":key,cf:string", + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory3"); + +from src tablesample (5 rows) +insert into table hbase_ck_5 select +struct( + cast(key as string), + cast(cast(key + 1000 as int) as string), + cast(cast(key + 2000 as int) as string)), +value; + +set hive.fetch.task.conversion=more; + +-- 165,238,27,311,86 +select * from hbase_ck_5; + +-- 238 +explain +select * from hbase_ck_5 where key.col1 = '238' AND key.col2 = '1238'; +select * from hbase_ck_5 where key.col1 = '238' AND key.col2 = '1238'; + +-- 165,238 +explain +select * from hbase_ck_5 where key.col1 >= '165' AND key.col1 < '27'; +select * from hbase_ck_5 where key.col1 >= '165' AND key.col1 < '27'; + +-- 238,311 +explain +select * from hbase_ck_5 where key.col1 > '100' AND key.col2 >= '1238'; +select * from hbase_ck_5 where key.col1 > '100' AND key.col2 >= '1238'; + +explain +select * from hbase_ck_5 where key.col1 < '50' AND key.col2 >= '3238'; +select * from hbase_ck_5 where key.col1 < '50' AND key.col2 >= '3238'; diff --git hbase-handler/src/test/results/positive/hbase_custom_key3.q.out hbase-handler/src/test/results/positive/hbase_custom_key3.q.out new file mode 100644 index 0000000..1777caf --- /dev/null +++ hbase-handler/src/test/results/positive/hbase_custom_key3.q.out @@ -0,0 +1,200 @@ +PREHOOK: query: CREATE TABLE hbase_ck_5(key struct, value string) +STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' +WITH SERDEPROPERTIES ( + "hbase.table.name" = "hbase_custom3", + "hbase.columns.mapping" = ":key,cf:string", + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory3") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +POSTHOOK: query: CREATE TABLE hbase_ck_5(key struct, value string) +STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' +WITH SERDEPROPERTIES ( + "hbase.table.name" = "hbase_custom3", + "hbase.columns.mapping" = ":key,cf:string", + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory3") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@hbase_ck_5 +PREHOOK: query: from src tablesample (5 rows) +insert into table hbase_ck_5 select +struct( + cast(key as string), + cast(cast(key + 1000 as int) as string), + cast(cast(key + 2000 as int) as string)), +value +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@hbase_ck_5 +POSTHOOK: query: from src tablesample (5 rows) +insert into table hbase_ck_5 select +struct( + cast(key as string), + cast(cast(key + 1000 as int) as string), + cast(cast(key + 2000 as int) as string)), +value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@hbase_ck_5 +PREHOOK: query: -- 165,238,27,311,86 +select * from hbase_ck_5 +PREHOOK: type: QUERY +PREHOOK: Input: default@hbase_ck_5 +#### A masked pattern was here #### +POSTHOOK: query: -- 165,238,27,311,86 +select * from hbase_ck_5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@hbase_ck_5 +#### A masked pattern was here #### +{"col1":"165","col2":"1165","col3":"2165"} val_165 +{"col1":"238","col2":"1238","col3":"2238"} val_238 +{"col1":"27","col2":"1027","col3":"2027"} val_27 +{"col1":"311","col2":"1311","col3":"2311"} val_311 +{"col1":"86","col2":"1086","col3":"2086"} val_86 +PREHOOK: query: -- 238 +explain +select * from hbase_ck_5 where key.col1 = '238' AND key.col2 = '1238' +PREHOOK: type: QUERY +POSTHOOK: query: -- 238 +explain +select * from hbase_ck_5 where key.col1 = '238' AND key.col2 = '1238' +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: hbase_ck_5 + filterExpr: ((key.col1 = '238') and (key.col2 = '1238')) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Filter Operator + predicate: ((key.col1 = '238') and (key.col2 = '1238')) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: key (type: struct), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + ListSink + +PREHOOK: query: select * from hbase_ck_5 where key.col1 = '238' AND key.col2 = '1238' +PREHOOK: type: QUERY +PREHOOK: Input: default@hbase_ck_5 +#### A masked pattern was here #### +POSTHOOK: query: select * from hbase_ck_5 where key.col1 = '238' AND key.col2 = '1238' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@hbase_ck_5 +#### A masked pattern was here #### +{"col1":"238","col2":"1238","col3":"2238"} val_238 +PREHOOK: query: -- 165,238 +explain +select * from hbase_ck_5 where key.col1 >= '165' AND key.col1 < '27' +PREHOOK: type: QUERY +POSTHOOK: query: -- 165,238 +explain +select * from hbase_ck_5 where key.col1 >= '165' AND key.col1 < '27' +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: hbase_ck_5 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Filter Operator + predicate: ((key.col1 >= '165') and (key.col1 < '27')) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: key (type: struct), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + ListSink + +PREHOOK: query: select * from hbase_ck_5 where key.col1 >= '165' AND key.col1 < '27' +PREHOOK: type: QUERY +PREHOOK: Input: default@hbase_ck_5 +#### A masked pattern was here #### +POSTHOOK: query: select * from hbase_ck_5 where key.col1 >= '165' AND key.col1 < '27' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@hbase_ck_5 +#### A masked pattern was here #### +{"col1":"165","col2":"1165","col3":"2165"} val_165 +{"col1":"238","col2":"1238","col3":"2238"} val_238 +PREHOOK: query: -- 238,311 +explain +select * from hbase_ck_5 where key.col1 > '100' AND key.col2 >= '1238' +PREHOOK: type: QUERY +POSTHOOK: query: -- 238,311 +explain +select * from hbase_ck_5 where key.col1 > '100' AND key.col2 >= '1238' +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: hbase_ck_5 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Filter Operator + predicate: ((key.col1 > '100') and (key.col2 >= '1238')) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: key (type: struct), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + ListSink + +PREHOOK: query: select * from hbase_ck_5 where key.col1 > '100' AND key.col2 >= '1238' +PREHOOK: type: QUERY +PREHOOK: Input: default@hbase_ck_5 +#### A masked pattern was here #### +POSTHOOK: query: select * from hbase_ck_5 where key.col1 > '100' AND key.col2 >= '1238' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@hbase_ck_5 +#### A masked pattern was here #### +{"col1":"238","col2":"1238","col3":"2238"} val_238 +{"col1":"311","col2":"1311","col3":"2311"} val_311 +PREHOOK: query: explain +select * from hbase_ck_5 where key.col1 < '50' AND key.col2 >= '3238' +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from hbase_ck_5 where key.col1 < '50' AND key.col2 >= '3238' +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: hbase_ck_5 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Filter Operator + predicate: ((key.col1 < '50') and (key.col2 >= '3238')) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: key (type: struct), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + ListSink + +PREHOOK: query: select * from hbase_ck_5 where key.col1 < '50' AND key.col2 >= '3238' +PREHOOK: type: QUERY +PREHOOK: Input: default@hbase_ck_5 +#### A masked pattern was here #### +POSTHOOK: query: select * from hbase_ck_5 where key.col1 < '50' AND key.col2 >= '3238' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@hbase_ck_5 +#### A masked pattern was here ####