diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/SampleURLHook.java b/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/SampleURLHook.java new file mode 100644 index 0000000..7f41f57 --- /dev/null +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/SampleURLHook.java @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.contrib.metastore.hooks; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.hooks.JDOConnectionURLHook; + +/** + * First returns a url for a blank DB, then returns a URL for the original DB. + * For testing the feature in url_hook.q + */ +public class SampleURLHook implements JDOConnectionURLHook { + + private String originalUrl; + + @Override + public String getJdoConnectionUrl(Configuration conf) throws Exception { + if (originalUrl == null) { + originalUrl = conf.get(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, ""); + return "jdbc:derby:;databaseName=target/tmp/junit_metastore_db_blank;create=true"; + } else { + return originalUrl; + } + + } + + @Override + public void notifyBadConnectionUrl(String url) { + + } + +} diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java b/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java deleted file mode 100644 index 07c5d98..0000000 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.contrib.metastore.hooks; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.hooks.JDOConnectionURLHook; - -/** - * First returns a url for a blank DB, then returns a URL for the original DB. - * For testing the feature in url_hook.q - */ -public class TestURLHook implements JDOConnectionURLHook { - - private String originalUrl; - - @Override - public String getJdoConnectionUrl(Configuration conf) throws Exception { - if (originalUrl == null) { - originalUrl = conf.get(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, ""); - return "jdbc:derby:;databaseName=target/tmp/junit_metastore_db_blank;create=true"; - } else { - return originalUrl; - } - - } - - @Override - public void notifyBadConnectionUrl(String url) { - - } - -} diff --git a/contrib/src/test/queries/clientpositive/url_hook.q b/contrib/src/test/queries/clientpositive/url_hook.q index db21432..1d56408 100644 --- a/contrib/src/test/queries/clientpositive/url_hook.q +++ b/contrib/src/test/queries/clientpositive/url_hook.q @@ -1,6 +1,6 @@ add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; SHOW TABLES 'src'; -set hive.metastore.ds.connection.url.hook=org.apache.hadoop.hive.contrib.metastore.hooks.TestURLHook; +set hive.metastore.ds.connection.url.hook=org.apache.hadoop.hive.contrib.metastore.hooks.SampleURLHook; -- changes to dummy derby store.. should return empty result SHOW TABLES 'src'; diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/SampleHBaseKeyFactory.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/SampleHBaseKeyFactory.java new file mode 100644 index 0000000..2d647e2 --- /dev/null +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/SampleHBaseKeyFactory.java @@ -0,0 +1,109 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.hbase; + +import org.apache.hadoop.hive.serde2.BaseStructObjectInspector; +import org.apache.hadoop.hive.serde2.ByteStream; +import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; +import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class SampleHBaseKeyFactory extends DefaultHBaseKeyFactory { + + private static final String DELIMITER_PATTERN = "\\$\\$"; + private static final byte[] DELIMITER_BINARY = "$$".getBytes(); + + @Override + public ObjectInspector createKeyObjectInspector(TypeInfo type) { + return new SlashSeparatedOI((StructTypeInfo)type); + } + + @Override + public LazyObjectBase createKey(ObjectInspector inspector) throws SerDeException { + return new DoubleDollarSeparated(); + } + + private final ByteStream.Output output = new ByteStream.Output(); + + @Override + public byte[] serializeKey(Object object, StructField field) throws IOException { + ObjectInspector inspector = field.getFieldObjectInspector(); + if (inspector.getCategory() != ObjectInspector.Category.STRUCT) { + throw new IllegalStateException("invalid type value " + inspector.getTypeName()); + } + output.reset(); + for (Object element : ((StructObjectInspector)inspector).getStructFieldsDataAsList(object)) { + if (output.getLength() > 0) { + output.write(DELIMITER_BINARY); + } + output.write(String.valueOf(element).getBytes()); + } + return output.getLength() > 0 ? output.toByteArray() : null; + } + + private static class DoubleDollarSeparated implements LazyObjectBase { + + private Object[] fields; + + @Override + public void init(ByteArrayRef bytes, int start, int length) { + fields = new String(bytes.getData(), start, length).split(DELIMITER_PATTERN); + } + + @Override + public Object getObject() { + return this; + } + } + + private static class SlashSeparatedOI extends BaseStructObjectInspector { + + private int length; + + private SlashSeparatedOI(StructTypeInfo type) { + List names = type.getAllStructFieldNames(); + List ois = new ArrayList(); + for (int i = 0; i < names.size(); i++) { + ois.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); + } + init(names, ois, null); + } + + @Override + public Object getStructFieldData(Object data, StructField fieldRef) { + return ((DoubleDollarSeparated)data).fields[((MyField)fieldRef).getFieldID()]; + } + + @Override + public List getStructFieldsDataAsList(Object data) { + return Arrays.asList(((DoubleDollarSeparated)data).fields); + } + } +} diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/SampleHBaseKeyFactory2.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/SampleHBaseKeyFactory2.java new file mode 100644 index 0000000..7086d57 --- /dev/null +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/SampleHBaseKeyFactory2.java @@ -0,0 +1,247 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.hbase; + +import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer; +import org.apache.hadoop.hive.ql.index.IndexSearchCondition; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.serde2.BaseStructObjectInspector; +import org.apache.hadoop.hive.serde2.ByteStream; +import org.apache.hadoop.hive.serde2.Deserializer; +import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; +import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.mapred.JobConf; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class SampleHBaseKeyFactory2 extends AbstractHBaseKeyFactory { + + private static final int FIXED_LENGTH = 10; + + @Override + public ObjectInspector createKeyObjectInspector(TypeInfo type) { + return new StringArrayOI((StructTypeInfo)type); + } + + @Override + public LazyObjectBase createKey(ObjectInspector inspector) throws SerDeException { + return new FixedLengthed(FIXED_LENGTH); + } + + private final ByteStream.Output output = new ByteStream.Output(); + + @Override + public byte[] serializeKey(Object object, StructField field) throws IOException { + ObjectInspector inspector = field.getFieldObjectInspector(); + if (inspector.getCategory() != ObjectInspector.Category.STRUCT) { + throw new IllegalStateException("invalid type value " + inspector.getTypeName()); + } + output.reset(); + for (Object element : ((StructObjectInspector)inspector).getStructFieldsDataAsList(object)) { + output.write(toBinary(String.valueOf(element).getBytes(), FIXED_LENGTH, false, false)); + } + return output.getLength() > 0 ? output.toByteArray() : null; + } + + private byte[] toBinary(String value, int max, boolean end, boolean nextBA) { + return toBinary(value.getBytes(), max, end, nextBA); + } + + private byte[] toBinary(byte[] value, int max, boolean end, boolean nextBA) { + byte[] bytes = new byte[max + 1]; + System.arraycopy(value, 0, bytes, 0, Math.min(value.length, max)); + if (end) { + Arrays.fill(bytes, value.length, max, (byte)0xff); + } + if (nextBA) { + bytes[max] = 0x01; + } + return bytes; + } + + @Override + public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, + ExprNodeDesc predicate) { + String keyColName = keyMapping.columnName; + + IndexPredicateAnalyzer analyzer = IndexPredicateAnalyzer.createAnalyzer(false); + analyzer.allowColumnName(keyColName); + analyzer.setAcceptsFields(true); + + DecomposedPredicate decomposed = new DecomposedPredicate(); + + List searchConditions = new ArrayList(); + decomposed.residualPredicate = + (ExprNodeGenericFuncDesc)analyzer.analyzePredicate(predicate, searchConditions); + if (!searchConditions.isEmpty()) { + decomposed.pushedPredicate = analyzer.translateSearchConditions(searchConditions); + try { + decomposed.pushedPredicateObject = setupFilter(keyColName, searchConditions); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + return decomposed; + } + + private HBaseScanRange setupFilter(String keyColName, List conditions) + throws IOException { + Map> fieldConds = + new HashMap>(); + for (IndexSearchCondition condition : conditions) { + assert keyColName.equals(condition.getColumnDesc().getColumn()); + String fieldName = condition.getFields()[0]; + List fieldCond = fieldConds.get(fieldName); + if (fieldCond == null) { + fieldConds.put(fieldName, fieldCond = new ArrayList()); + } + fieldCond.add(condition); + } + HBaseScanRange range = new HBaseScanRange(); + + ByteArrayOutputStream startRow = new ByteArrayOutputStream(); + ByteArrayOutputStream stopRow = new ByteArrayOutputStream(); + + StructTypeInfo type = (StructTypeInfo) keyMapping.columnType; + for (String name : type.getAllStructFieldNames()) { + List fieldCond = fieldConds.get(name); + if (fieldCond == null || fieldCond.size() > 2) { + continue; + } + byte[] startElement = null; + byte[] stopElement = null; + for (IndexSearchCondition condition : fieldCond) { + if (condition.getConstantDesc().getValue() == null) { + continue; + } + String comparisonOp = condition.getComparisonOp(); + String constantVal = String.valueOf(condition.getConstantDesc().getValue()); + + if (comparisonOp.endsWith("UDFOPEqual")) { + startElement = toBinary(constantVal, FIXED_LENGTH, false, false); + stopElement = toBinary(constantVal, FIXED_LENGTH, true, true); + } else if (comparisonOp.endsWith("UDFOPEqualOrGreaterThan")) { + startElement = toBinary(constantVal, FIXED_LENGTH, false, false); + } else if (comparisonOp.endsWith("UDFOPGreaterThan")) { + startElement = toBinary(constantVal, FIXED_LENGTH, false, true); + } else if (comparisonOp.endsWith("UDFOPEqualOrLessThan")) { + stopElement = toBinary(constantVal, FIXED_LENGTH, true, false); + } else if (comparisonOp.endsWith("UDFOPLessThan")) { + stopElement = toBinary(constantVal, FIXED_LENGTH, true, true); + } else { + throw new IOException(comparisonOp + " is not a supported comparison operator"); + } + } + if (startRow != null) { + if (startElement != null) { + startRow.write(startElement); + } else { + if (startRow.size() > 0) { + range.setStartRow(startRow.toByteArray()); + } + startRow = null; + } + } + if (stopRow != null) { + if (stopElement != null) { + stopRow.write(stopElement); + } else { + if (stopRow.size() > 0) { + range.setStopRow(stopRow.toByteArray()); + } + stopRow = null; + } + } + if (startElement == null && stopElement == null) { + break; + } + } + if (startRow != null && startRow.size() > 0) { + range.setStartRow(startRow.toByteArray()); + } + if (stopRow != null && stopRow.size() > 0) { + range.setStopRow(stopRow.toByteArray()); + } + return range; + } + + private static class FixedLengthed implements LazyObjectBase { + + private final int fixedLength; + private final List fields = new ArrayList(); + + public FixedLengthed(int fixedLength) { + this.fixedLength = fixedLength; + } + + @Override + public void init(ByteArrayRef bytes, int start, int length) { + fields.clear(); + byte[] data = bytes.getData(); + int rowStart = start; + int rowStop = rowStart + fixedLength; + for (; rowStart < length; rowStart = rowStop + 1, rowStop = rowStart + fixedLength) { + fields.add(new String(data, rowStart, rowStop - rowStart).trim()); + } + } + + @Override + public Object getObject() { + return this; + } + } + + private static class StringArrayOI extends BaseStructObjectInspector { + + private int length; + + private StringArrayOI(StructTypeInfo type) { + List names = type.getAllStructFieldNames(); + List ois = new ArrayList(); + for (int i = 0; i < names.size(); i++) { + ois.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); + } + init(names, ois, null); + } + + @Override + public Object getStructFieldData(Object data, StructField fieldRef) { + return ((FixedLengthed)data).fields.get(((MyField)fieldRef).getFieldID()); + } + + @Override + public List getStructFieldsDataAsList(Object data) { + return ((FixedLengthed)data).fields; + } + } +} diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/SampleHBaseKeyFactory3.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/SampleHBaseKeyFactory3.java new file mode 100644 index 0000000..712725f --- /dev/null +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/SampleHBaseKeyFactory3.java @@ -0,0 +1,130 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.filter.BinaryComparator; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.RowFilter; +import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping; +import org.apache.hadoop.hive.ql.index.IndexSearchCondition; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.serde2.Deserializer; +import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; +import org.apache.hadoop.mapred.JobConf; + +/** + * Simple extension of {@link SampleHBaseKeyFactory2} with exception of using filters instead of start + * and stop keys + * */ +public class SampleHBaseKeyFactory3 extends SampleHBaseKeyFactory2 { + + @Override + public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, + ExprNodeDesc predicate) { + SampleHBasePredicateDecomposer decomposedPredicate = new SampleHBasePredicateDecomposer(keyMapping); + return decomposedPredicate.decomposePredicate(keyMapping.columnName, predicate); + } +} + +class SampleHBasePredicateDecomposer extends AbstractHBaseKeyPredicateDecomposer { + + private static final int FIXED_LENGTH = 10; + + private ColumnMapping keyMapping; + + SampleHBasePredicateDecomposer(ColumnMapping keyMapping) { + this.keyMapping = keyMapping; + } + + @Override + public HBaseScanRange getScanRange(List searchConditions) + throws Exception { + Map> fieldConds = + new HashMap>(); + for (IndexSearchCondition condition : searchConditions) { + String fieldName = condition.getFields()[0]; + List fieldCond = fieldConds.get(fieldName); + if (fieldCond == null) { + fieldConds.put(fieldName, fieldCond = new ArrayList()); + } + fieldCond.add(condition); + } + Filter filter = null; + HBaseScanRange range = new HBaseScanRange(); + + StructTypeInfo type = (StructTypeInfo) keyMapping.columnType; + for (String name : type.getAllStructFieldNames()) { + List fieldCond = fieldConds.get(name); + if (fieldCond == null || fieldCond.size() > 2) { + continue; + } + for (IndexSearchCondition condition : fieldCond) { + if (condition.getConstantDesc().getValue() == null) { + continue; + } + String comparisonOp = condition.getComparisonOp(); + String constantVal = String.valueOf(condition.getConstantDesc().getValue()); + + byte[] valueAsBytes = toBinary(constantVal, FIXED_LENGTH, false, false); + + if (comparisonOp.endsWith("UDFOPEqual")) { + filter = new RowFilter(CompareOp.EQUAL, new BinaryComparator(valueAsBytes)); + } else if (comparisonOp.endsWith("UDFOPEqualOrGreaterThan")) { + filter = new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryComparator(valueAsBytes)); + } else if (comparisonOp.endsWith("UDFOPGreaterThan")) { + filter = new RowFilter(CompareOp.GREATER, new BinaryComparator(valueAsBytes)); + } else if (comparisonOp.endsWith("UDFOPEqualOrLessThan")) { + filter = new RowFilter(CompareOp.LESS_OR_EQUAL, new BinaryComparator(valueAsBytes)); + } else if (comparisonOp.endsWith("UDFOPLessThan")) { + filter = new RowFilter(CompareOp.LESS, new BinaryComparator(valueAsBytes)); + } else { + throw new IOException(comparisonOp + " is not a supported comparison operator"); + } + } + } + if (filter != null) { + range.addFilter(filter); + } + return range; + } + + private byte[] toBinary(String value, int max, boolean end, boolean nextBA) { + return toBinary(value.getBytes(), max, end, nextBA); + } + + private byte[] toBinary(byte[] value, int max, boolean end, boolean nextBA) { + byte[] bytes = new byte[max + 1]; + System.arraycopy(value, 0, bytes, 0, Math.min(value.length, max)); + if (end) { + Arrays.fill(bytes, value.length, max, (byte) 0xff); + } + if (nextBA) { + bytes[max] = 0x01; + } + return bytes; + } +} \ No newline at end of file diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory.java deleted file mode 100644 index 8962533..0000000 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory.java +++ /dev/null @@ -1,109 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.hbase; - -import org.apache.hadoop.hive.serde2.BaseStructObjectInspector; -import org.apache.hadoop.hive.serde2.ByteStream; -import org.apache.hadoop.hive.serde2.SerDeException; -import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; -import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.StructField; -import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; -import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -public class TestHBaseKeyFactory extends DefaultHBaseKeyFactory { - - private static final String DELIMITER_PATTERN = "\\$\\$"; - private static final byte[] DELIMITER_BINARY = "$$".getBytes(); - - @Override - public ObjectInspector createKeyObjectInspector(TypeInfo type) { - return new SlashSeparatedOI((StructTypeInfo)type); - } - - @Override - public LazyObjectBase createKey(ObjectInspector inspector) throws SerDeException { - return new DoubleDollarSeparated(); - } - - private final ByteStream.Output output = new ByteStream.Output(); - - @Override - public byte[] serializeKey(Object object, StructField field) throws IOException { - ObjectInspector inspector = field.getFieldObjectInspector(); - if (inspector.getCategory() != ObjectInspector.Category.STRUCT) { - throw new IllegalStateException("invalid type value " + inspector.getTypeName()); - } - output.reset(); - for (Object element : ((StructObjectInspector)inspector).getStructFieldsDataAsList(object)) { - if (output.getLength() > 0) { - output.write(DELIMITER_BINARY); - } - output.write(String.valueOf(element).getBytes()); - } - return output.getLength() > 0 ? output.toByteArray() : null; - } - - private static class DoubleDollarSeparated implements LazyObjectBase { - - private Object[] fields; - - @Override - public void init(ByteArrayRef bytes, int start, int length) { - fields = new String(bytes.getData(), start, length).split(DELIMITER_PATTERN); - } - - @Override - public Object getObject() { - return this; - } - } - - private static class SlashSeparatedOI extends BaseStructObjectInspector { - - private int length; - - private SlashSeparatedOI(StructTypeInfo type) { - List names = type.getAllStructFieldNames(); - List ois = new ArrayList(); - for (int i = 0; i < names.size(); i++) { - ois.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); - } - init(names, ois, null); - } - - @Override - public Object getStructFieldData(Object data, StructField fieldRef) { - return ((DoubleDollarSeparated)data).fields[((MyField)fieldRef).getFieldID()]; - } - - @Override - public List getStructFieldsDataAsList(Object data) { - return Arrays.asList(((DoubleDollarSeparated)data).fields); - } - } -} diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory2.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory2.java deleted file mode 100644 index ecd5061..0000000 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory2.java +++ /dev/null @@ -1,247 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.hbase; - -import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer; -import org.apache.hadoop.hive.ql.index.IndexSearchCondition; -import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.serde2.BaseStructObjectInspector; -import org.apache.hadoop.hive.serde2.ByteStream; -import org.apache.hadoop.hive.serde2.Deserializer; -import org.apache.hadoop.hive.serde2.SerDeException; -import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; -import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.StructField; -import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; -import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import org.apache.hadoop.mapred.JobConf; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class TestHBaseKeyFactory2 extends AbstractHBaseKeyFactory { - - private static final int FIXED_LENGTH = 10; - - @Override - public ObjectInspector createKeyObjectInspector(TypeInfo type) { - return new StringArrayOI((StructTypeInfo)type); - } - - @Override - public LazyObjectBase createKey(ObjectInspector inspector) throws SerDeException { - return new FixedLengthed(FIXED_LENGTH); - } - - private final ByteStream.Output output = new ByteStream.Output(); - - @Override - public byte[] serializeKey(Object object, StructField field) throws IOException { - ObjectInspector inspector = field.getFieldObjectInspector(); - if (inspector.getCategory() != ObjectInspector.Category.STRUCT) { - throw new IllegalStateException("invalid type value " + inspector.getTypeName()); - } - output.reset(); - for (Object element : ((StructObjectInspector)inspector).getStructFieldsDataAsList(object)) { - output.write(toBinary(String.valueOf(element).getBytes(), FIXED_LENGTH, false, false)); - } - return output.getLength() > 0 ? output.toByteArray() : null; - } - - private byte[] toBinary(String value, int max, boolean end, boolean nextBA) { - return toBinary(value.getBytes(), max, end, nextBA); - } - - private byte[] toBinary(byte[] value, int max, boolean end, boolean nextBA) { - byte[] bytes = new byte[max + 1]; - System.arraycopy(value, 0, bytes, 0, Math.min(value.length, max)); - if (end) { - Arrays.fill(bytes, value.length, max, (byte)0xff); - } - if (nextBA) { - bytes[max] = 0x01; - } - return bytes; - } - - @Override - public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, - ExprNodeDesc predicate) { - String keyColName = keyMapping.columnName; - - IndexPredicateAnalyzer analyzer = IndexPredicateAnalyzer.createAnalyzer(false); - analyzer.allowColumnName(keyColName); - analyzer.setAcceptsFields(true); - - DecomposedPredicate decomposed = new DecomposedPredicate(); - - List searchConditions = new ArrayList(); - decomposed.residualPredicate = - (ExprNodeGenericFuncDesc)analyzer.analyzePredicate(predicate, searchConditions); - if (!searchConditions.isEmpty()) { - decomposed.pushedPredicate = analyzer.translateSearchConditions(searchConditions); - try { - decomposed.pushedPredicateObject = setupFilter(keyColName, searchConditions); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - return decomposed; - } - - private HBaseScanRange setupFilter(String keyColName, List conditions) - throws IOException { - Map> fieldConds = - new HashMap>(); - for (IndexSearchCondition condition : conditions) { - assert keyColName.equals(condition.getColumnDesc().getColumn()); - String fieldName = condition.getFields()[0]; - List fieldCond = fieldConds.get(fieldName); - if (fieldCond == null) { - fieldConds.put(fieldName, fieldCond = new ArrayList()); - } - fieldCond.add(condition); - } - HBaseScanRange range = new HBaseScanRange(); - - ByteArrayOutputStream startRow = new ByteArrayOutputStream(); - ByteArrayOutputStream stopRow = new ByteArrayOutputStream(); - - StructTypeInfo type = (StructTypeInfo) keyMapping.columnType; - for (String name : type.getAllStructFieldNames()) { - List fieldCond = fieldConds.get(name); - if (fieldCond == null || fieldCond.size() > 2) { - continue; - } - byte[] startElement = null; - byte[] stopElement = null; - for (IndexSearchCondition condition : fieldCond) { - if (condition.getConstantDesc().getValue() == null) { - continue; - } - String comparisonOp = condition.getComparisonOp(); - String constantVal = String.valueOf(condition.getConstantDesc().getValue()); - - if (comparisonOp.endsWith("UDFOPEqual")) { - startElement = toBinary(constantVal, FIXED_LENGTH, false, false); - stopElement = toBinary(constantVal, FIXED_LENGTH, true, true); - } else if (comparisonOp.endsWith("UDFOPEqualOrGreaterThan")) { - startElement = toBinary(constantVal, FIXED_LENGTH, false, false); - } else if (comparisonOp.endsWith("UDFOPGreaterThan")) { - startElement = toBinary(constantVal, FIXED_LENGTH, false, true); - } else if (comparisonOp.endsWith("UDFOPEqualOrLessThan")) { - stopElement = toBinary(constantVal, FIXED_LENGTH, true, false); - } else if (comparisonOp.endsWith("UDFOPLessThan")) { - stopElement = toBinary(constantVal, FIXED_LENGTH, true, true); - } else { - throw new IOException(comparisonOp + " is not a supported comparison operator"); - } - } - if (startRow != null) { - if (startElement != null) { - startRow.write(startElement); - } else { - if (startRow.size() > 0) { - range.setStartRow(startRow.toByteArray()); - } - startRow = null; - } - } - if (stopRow != null) { - if (stopElement != null) { - stopRow.write(stopElement); - } else { - if (stopRow.size() > 0) { - range.setStopRow(stopRow.toByteArray()); - } - stopRow = null; - } - } - if (startElement == null && stopElement == null) { - break; - } - } - if (startRow != null && startRow.size() > 0) { - range.setStartRow(startRow.toByteArray()); - } - if (stopRow != null && stopRow.size() > 0) { - range.setStopRow(stopRow.toByteArray()); - } - return range; - } - - private static class FixedLengthed implements LazyObjectBase { - - private final int fixedLength; - private final List fields = new ArrayList(); - - public FixedLengthed(int fixedLength) { - this.fixedLength = fixedLength; - } - - @Override - public void init(ByteArrayRef bytes, int start, int length) { - fields.clear(); - byte[] data = bytes.getData(); - int rowStart = start; - int rowStop = rowStart + fixedLength; - for (; rowStart < length; rowStart = rowStop + 1, rowStop = rowStart + fixedLength) { - fields.add(new String(data, rowStart, rowStop - rowStart).trim()); - } - } - - @Override - public Object getObject() { - return this; - } - } - - private static class StringArrayOI extends BaseStructObjectInspector { - - private int length; - - private StringArrayOI(StructTypeInfo type) { - List names = type.getAllStructFieldNames(); - List ois = new ArrayList(); - for (int i = 0; i < names.size(); i++) { - ois.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); - } - init(names, ois, null); - } - - @Override - public Object getStructFieldData(Object data, StructField fieldRef) { - return ((FixedLengthed)data).fields.get(((MyField)fieldRef).getFieldID()); - } - - @Override - public List getStructFieldsDataAsList(Object data) { - return ((FixedLengthed)data).fields; - } - } -} diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory3.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory3.java deleted file mode 100644 index 2784767..0000000 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory3.java +++ /dev/null @@ -1,130 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.hbase; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; -import org.apache.hadoop.hbase.filter.Filter; -import org.apache.hadoop.hbase.filter.RowFilter; -import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping; -import org.apache.hadoop.hive.ql.index.IndexSearchCondition; -import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; -import org.apache.hadoop.hive.serde2.Deserializer; -import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; -import org.apache.hadoop.mapred.JobConf; - -/** - * Simple extension of {@link TestHBaseKeyFactory2} with exception of using filters instead of start - * and stop keys - * */ -public class TestHBaseKeyFactory3 extends TestHBaseKeyFactory2 { - - @Override - public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, - ExprNodeDesc predicate) { - TestHBasePredicateDecomposer decomposedPredicate = new TestHBasePredicateDecomposer(keyMapping); - return decomposedPredicate.decomposePredicate(keyMapping.columnName, predicate); - } -} - -class TestHBasePredicateDecomposer extends AbstractHBaseKeyPredicateDecomposer { - - private static final int FIXED_LENGTH = 10; - - private ColumnMapping keyMapping; - - TestHBasePredicateDecomposer(ColumnMapping keyMapping) { - this.keyMapping = keyMapping; - } - - @Override - public HBaseScanRange getScanRange(List searchConditions) - throws Exception { - Map> fieldConds = - new HashMap>(); - for (IndexSearchCondition condition : searchConditions) { - String fieldName = condition.getFields()[0]; - List fieldCond = fieldConds.get(fieldName); - if (fieldCond == null) { - fieldConds.put(fieldName, fieldCond = new ArrayList()); - } - fieldCond.add(condition); - } - Filter filter = null; - HBaseScanRange range = new HBaseScanRange(); - - StructTypeInfo type = (StructTypeInfo) keyMapping.columnType; - for (String name : type.getAllStructFieldNames()) { - List fieldCond = fieldConds.get(name); - if (fieldCond == null || fieldCond.size() > 2) { - continue; - } - for (IndexSearchCondition condition : fieldCond) { - if (condition.getConstantDesc().getValue() == null) { - continue; - } - String comparisonOp = condition.getComparisonOp(); - String constantVal = String.valueOf(condition.getConstantDesc().getValue()); - - byte[] valueAsBytes = toBinary(constantVal, FIXED_LENGTH, false, false); - - if (comparisonOp.endsWith("UDFOPEqual")) { - filter = new RowFilter(CompareOp.EQUAL, new BinaryComparator(valueAsBytes)); - } else if (comparisonOp.endsWith("UDFOPEqualOrGreaterThan")) { - filter = new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryComparator(valueAsBytes)); - } else if (comparisonOp.endsWith("UDFOPGreaterThan")) { - filter = new RowFilter(CompareOp.GREATER, new BinaryComparator(valueAsBytes)); - } else if (comparisonOp.endsWith("UDFOPEqualOrLessThan")) { - filter = new RowFilter(CompareOp.LESS_OR_EQUAL, new BinaryComparator(valueAsBytes)); - } else if (comparisonOp.endsWith("UDFOPLessThan")) { - filter = new RowFilter(CompareOp.LESS, new BinaryComparator(valueAsBytes)); - } else { - throw new IOException(comparisonOp + " is not a supported comparison operator"); - } - } - } - if (filter != null) { - range.addFilter(filter); - } - return range; - } - - private byte[] toBinary(String value, int max, boolean end, boolean nextBA) { - return toBinary(value.getBytes(), max, end, nextBA); - } - - private byte[] toBinary(byte[] value, int max, boolean end, boolean nextBA) { - byte[] bytes = new byte[max + 1]; - System.arraycopy(value, 0, bytes, 0, Math.min(value.length, max)); - if (end) { - Arrays.fill(bytes, value.length, max, (byte) 0xff); - } - if (nextBA) { - bytes[max] = 0x01; - } - return bytes; - } -} \ No newline at end of file diff --git a/hbase-handler/src/test/queries/positive/hbase_custom_key.q b/hbase-handler/src/test/queries/positive/hbase_custom_key.q index b7c00c7..9dbb2a0 100644 --- a/hbase-handler/src/test/queries/positive/hbase_custom_key.q +++ b/hbase-handler/src/test/queries/positive/hbase_custom_key.q @@ -3,7 +3,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( "hbase.table.name" = "hbase_custom", "hbase.columns.mapping" = ":key,cf:string", - "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory"); + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.SampleHBaseKeyFactory"); CREATE EXTERNAL TABLE hbase_ck_2(key string, value string) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' diff --git a/hbase-handler/src/test/queries/positive/hbase_custom_key2.q b/hbase-handler/src/test/queries/positive/hbase_custom_key2.q index 583de06..9fba4f6 100644 --- a/hbase-handler/src/test/queries/positive/hbase_custom_key2.q +++ b/hbase-handler/src/test/queries/positive/hbase_custom_key2.q @@ -3,7 +3,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( "hbase.table.name" = "hbase_custom2", "hbase.columns.mapping" = ":key,cf:string", - "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory2"); + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.SampleHBaseKeyFactory2"); from src tablesample (5 rows) insert into table hbase_ck_4 select diff --git a/hbase-handler/src/test/queries/positive/hbase_custom_key3.q b/hbase-handler/src/test/queries/positive/hbase_custom_key3.q index 3d4a687..22d2c9e 100644 --- a/hbase-handler/src/test/queries/positive/hbase_custom_key3.q +++ b/hbase-handler/src/test/queries/positive/hbase_custom_key3.q @@ -3,7 +3,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( "hbase.table.name" = "hbase_custom3", "hbase.columns.mapping" = ":key,cf:string", - "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory3"); + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.SampleHBaseKeyFactory3"); from src tablesample (5 rows) insert into table hbase_ck_5 select diff --git a/hbase-handler/src/test/results/positive/hbase_custom_key.q.out b/hbase-handler/src/test/results/positive/hbase_custom_key.q.out index b47fed2..e5bc947 100644 --- a/hbase-handler/src/test/results/positive/hbase_custom_key.q.out +++ b/hbase-handler/src/test/results/positive/hbase_custom_key.q.out @@ -3,7 +3,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( "hbase.table.name" = "hbase_custom", "hbase.columns.mapping" = ":key,cf:string", - "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory") + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.SampleHBaseKeyFactory") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@hbase_ck_1 @@ -12,7 +12,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( "hbase.table.name" = "hbase_custom", "hbase.columns.mapping" = ":key,cf:string", - "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory") + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.SampleHBaseKeyFactory") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@hbase_ck_1 diff --git a/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out b/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out index 53d58ab..c9b5a84 100644 --- a/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out +++ b/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out @@ -3,7 +3,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( "hbase.table.name" = "hbase_custom2", "hbase.columns.mapping" = ":key,cf:string", - "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory2") + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.SampleHBaseKeyFactory2") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@hbase_ck_4 @@ -12,7 +12,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( "hbase.table.name" = "hbase_custom2", "hbase.columns.mapping" = ":key,cf:string", - "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory2") + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.SampleHBaseKeyFactory2") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@hbase_ck_4 diff --git a/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out b/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out index c8aad2b..76848e0 100644 --- a/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out +++ b/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out @@ -3,7 +3,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( "hbase.table.name" = "hbase_custom3", "hbase.columns.mapping" = ":key,cf:string", - "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory3") + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.SampleHBaseKeyFactory3") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@hbase_ck_5 @@ -12,7 +12,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( "hbase.table.name" = "hbase_custom3", "hbase.columns.mapping" = ":key,cf:string", - "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory3") + "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.SampleHBaseKeyFactory3") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@hbase_ck_5 diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java new file mode 100644 index 0000000..f79372d --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java @@ -0,0 +1,211 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.junit.Test; + +/** + * Test case for {@link MetaStoreAuthzAPIAuthorizerEmbedOnly} The authorizer is + * supposed to allow api calls for metastore in embedded mode while disallowing + * them in remote metastore mode. Note that this is an abstract class, the + * subclasses that set the mode and the tests here get run as part of their + * testing. + */ +public abstract class AbstractTestAuthorizationApiAuthorizer { + protected static boolean isRemoteMetastoreMode; + private static HiveConf hiveConf; + private static HiveMetaStoreClient msc; + + protected static void setup() throws Exception { + System.err.println("Running with remoteMode = " + isRemoteMetastoreMode); + System.setProperty("hive.metastore.pre.event.listeners", + AuthorizationPreEventListener.class.getName()); + System.setProperty("hive.security.metastore.authorization.manager", + MetaStoreAuthzAPIAuthorizerEmbedOnly.class.getName()); + + hiveConf = new HiveConf(); + if (isRemoteMetastoreMode) { + int port = MetaStoreUtils.findFreePort(); + MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge()); + hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); + } + hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); + hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); + hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); + hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + + msc = new HiveMetaStoreClient(hiveConf, null); + + } + + interface FunctionInvoker { + public void invoke() throws Exception; + } + + /** + * Test the if authorization failed/passed for FunctionInvoker that invokes a metastore client + * api call + * @param mscFunctionInvoker + * @throws Exception + */ + private void testFunction(FunctionInvoker mscFunctionInvoker) throws Exception { + boolean caughtEx = false; + try { + try { + mscFunctionInvoker.invoke(); + } catch (RuntimeException e) { + // A hack to verify that authorization check passed. Exception can be thrown be cause + // the functions are not being called with valid params. + // verify that exception has come from ObjectStore code, which means that the + // authorization checks passed. + String exStackString = ExceptionUtils.getStackTrace(e); + assertTrue("Verifying this exception came after authorization check", + exStackString.contains("org.apache.hadoop.hive.metastore.ObjectStore")); + // If its not an exception caused by auth check, ignore it + } + assertFalse("Authz Exception should have been thrown in remote mode", isRemoteMetastoreMode); + System.err.println("No auth exception thrown"); + } catch (MetaException e) { + System.err.println("Caught exception"); + caughtEx = true; + assertTrue(e.getMessage().contains(MetaStoreAuthzAPIAuthorizerEmbedOnly.errMsg)); + } + if (!isRemoteMetastoreMode) { + assertFalse("No exception should be thrown in embedded mode", caughtEx); + } + } + + @Test + public void testGrantPriv() throws Exception { + FunctionInvoker invoker = new FunctionInvoker() { + @Override + public void invoke() throws Exception { + msc.grant_privileges(new PrivilegeBag(new ArrayList())); + } + }; + testFunction(invoker); + } + + @Test + public void testRevokePriv() throws Exception { + FunctionInvoker invoker = new FunctionInvoker() { + @Override + public void invoke() throws Exception { + msc.revoke_privileges(new PrivilegeBag(new ArrayList()), false); + } + }; + testFunction(invoker); + } + + @Test + public void testGrantRole() throws Exception { + FunctionInvoker invoker = new FunctionInvoker() { + @Override + public void invoke() throws Exception { + msc.grant_role(null, null, null, null, null, true); + } + }; + testFunction(invoker); + } + + @Test + public void testRevokeRole() throws Exception { + FunctionInvoker invoker = new FunctionInvoker() { + @Override + public void invoke() throws Exception { + msc.revoke_role(null, null, null, false); + } + }; + testFunction(invoker); + } + + @Test + public void testCreateRole() throws Exception { + FunctionInvoker invoker = new FunctionInvoker() { + @Override + public void invoke() throws Exception { + msc.create_role(new Role("role1", 0, "owner")); + } + }; + testFunction(invoker); + } + + @Test + public void testDropRole() throws Exception { + FunctionInvoker invoker = new FunctionInvoker() { + @Override + public void invoke() throws Exception { + msc.drop_role(null); + } + }; + testFunction(invoker); + } + + @Test + public void testListRoles() throws Exception { + FunctionInvoker invoker = new FunctionInvoker() { + @Override + public void invoke() throws Exception { + msc.list_roles(null, null); + } + }; + testFunction(invoker); + } + + @Test + public void testGetPrivSet() throws Exception { + FunctionInvoker invoker = new FunctionInvoker() { + @Override + public void invoke() throws Exception { + msc.get_privilege_set(new HiveObjectRef(), null, new ArrayList()); + } + }; + testFunction(invoker); + } + + @Test + public void testListPriv() throws Exception { + FunctionInvoker invoker = new FunctionInvoker() { + @Override + public void invoke() throws Exception { + msc.list_privileges(null, PrincipalType.USER, new HiveObjectRef()); + } + }; + testFunction(invoker); + } + + + +} diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java deleted file mode 100644 index e5182b2..0000000 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java +++ /dev/null @@ -1,211 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.util.ArrayList; - -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; -import org.apache.hadoop.hive.metastore.api.HiveObjectRef; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.PrincipalType; -import org.apache.hadoop.hive.metastore.api.PrivilegeBag; -import org.apache.hadoop.hive.metastore.api.Role; -import org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly; -import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener; -import org.apache.hadoop.hive.shims.ShimLoader; -import org.junit.Test; - -/** - * Test case for {@link MetaStoreAuthzAPIAuthorizerEmbedOnly} The authorizer is - * supposed to allow api calls for metastore in embedded mode while disallowing - * them in remote metastore mode. Note that this is an abstract class, the - * subclasses that set the mode and the tests here get run as part of their - * testing. - */ -public abstract class TestAuthorizationApiAuthorizer { - protected static boolean isRemoteMetastoreMode; - private static HiveConf hiveConf; - private static HiveMetaStoreClient msc; - - protected static void setup() throws Exception { - System.err.println("Running with remoteMode = " + isRemoteMetastoreMode); - System.setProperty("hive.metastore.pre.event.listeners", - AuthorizationPreEventListener.class.getName()); - System.setProperty("hive.security.metastore.authorization.manager", - MetaStoreAuthzAPIAuthorizerEmbedOnly.class.getName()); - - hiveConf = new HiveConf(); - if (isRemoteMetastoreMode) { - int port = MetaStoreUtils.findFreePort(); - MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge()); - hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); - } - hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); - - msc = new HiveMetaStoreClient(hiveConf, null); - - } - - interface FunctionInvoker { - public void invoke() throws Exception; - } - - /** - * Test the if authorization failed/passed for FunctionInvoker that invokes a metastore client - * api call - * @param mscFunctionInvoker - * @throws Exception - */ - private void testFunction(FunctionInvoker mscFunctionInvoker) throws Exception { - boolean caughtEx = false; - try { - try { - mscFunctionInvoker.invoke(); - } catch (RuntimeException e) { - // A hack to verify that authorization check passed. Exception can be thrown be cause - // the functions are not being called with valid params. - // verify that exception has come from ObjectStore code, which means that the - // authorization checks passed. - String exStackString = ExceptionUtils.getStackTrace(e); - assertTrue("Verifying this exception came after authorization check", - exStackString.contains("org.apache.hadoop.hive.metastore.ObjectStore")); - // If its not an exception caused by auth check, ignore it - } - assertFalse("Authz Exception should have been thrown in remote mode", isRemoteMetastoreMode); - System.err.println("No auth exception thrown"); - } catch (MetaException e) { - System.err.println("Caught exception"); - caughtEx = true; - assertTrue(e.getMessage().contains(MetaStoreAuthzAPIAuthorizerEmbedOnly.errMsg)); - } - if (!isRemoteMetastoreMode) { - assertFalse("No exception should be thrown in embedded mode", caughtEx); - } - } - - @Test - public void testGrantPriv() throws Exception { - FunctionInvoker invoker = new FunctionInvoker() { - @Override - public void invoke() throws Exception { - msc.grant_privileges(new PrivilegeBag(new ArrayList())); - } - }; - testFunction(invoker); - } - - @Test - public void testRevokePriv() throws Exception { - FunctionInvoker invoker = new FunctionInvoker() { - @Override - public void invoke() throws Exception { - msc.revoke_privileges(new PrivilegeBag(new ArrayList()), false); - } - }; - testFunction(invoker); - } - - @Test - public void testGrantRole() throws Exception { - FunctionInvoker invoker = new FunctionInvoker() { - @Override - public void invoke() throws Exception { - msc.grant_role(null, null, null, null, null, true); - } - }; - testFunction(invoker); - } - - @Test - public void testRevokeRole() throws Exception { - FunctionInvoker invoker = new FunctionInvoker() { - @Override - public void invoke() throws Exception { - msc.revoke_role(null, null, null, false); - } - }; - testFunction(invoker); - } - - @Test - public void testCreateRole() throws Exception { - FunctionInvoker invoker = new FunctionInvoker() { - @Override - public void invoke() throws Exception { - msc.create_role(new Role("role1", 0, "owner")); - } - }; - testFunction(invoker); - } - - @Test - public void testDropRole() throws Exception { - FunctionInvoker invoker = new FunctionInvoker() { - @Override - public void invoke() throws Exception { - msc.drop_role(null); - } - }; - testFunction(invoker); - } - - @Test - public void testListRoles() throws Exception { - FunctionInvoker invoker = new FunctionInvoker() { - @Override - public void invoke() throws Exception { - msc.list_roles(null, null); - } - }; - testFunction(invoker); - } - - @Test - public void testGetPrivSet() throws Exception { - FunctionInvoker invoker = new FunctionInvoker() { - @Override - public void invoke() throws Exception { - msc.get_privilege_set(new HiveObjectRef(), null, new ArrayList()); - } - }; - testFunction(invoker); - } - - @Test - public void testListPriv() throws Exception { - FunctionInvoker invoker = new FunctionInvoker() { - @Override - public void invoke() throws Exception { - msc.list_privileges(null, PrincipalType.USER, new HiveObjectRef()); - } - }; - testFunction(invoker); - } - - - -} diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java index b7d3cfa..90d732d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java @@ -3,14 +3,14 @@ import org.junit.BeforeClass; /** - * Test {@link TestAuthorizationApiAuthorizer} in embedded mode of metastore + * Test {@link AbstractTestAuthorizationApiAuthorizer} in embedded mode of metastore */ -public class TestAuthzApiEmbedAuthorizerInEmbed extends TestAuthorizationApiAuthorizer { +public class TestAuthzApiEmbedAuthorizerInEmbed extends AbstractTestAuthorizationApiAuthorizer { @BeforeClass public static void setup() throws Exception { isRemoteMetastoreMode = false; // embedded metastore mode - TestAuthorizationApiAuthorizer.setup(); + AbstractTestAuthorizationApiAuthorizer.setup(); } } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java index d775e72..ecba4c9 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java @@ -3,14 +3,14 @@ import org.junit.BeforeClass; /** - * Test {@link TestAuthorizationApiAuthorizer} in remote mode of metastore + * Test {@link AbstractTestAuthorizationApiAuthorizer} in remote mode of metastore */ -public class TestAuthzApiEmbedAuthorizerInRemote extends TestAuthorizationApiAuthorizer { +public class TestAuthzApiEmbedAuthorizerInRemote extends AbstractTestAuthorizationApiAuthorizer { @BeforeClass public static void setup() throws Exception { isRemoteMetastoreMode = true; // remote metastore mode - TestAuthorizationApiAuthorizer.setup(); + AbstractTestAuthorizationApiAuthorizer.setup(); } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java new file mode 100644 index 0000000..dc9315f --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java @@ -0,0 +1,93 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.tez; + +import java.io.IOException; +import java.net.URISyntaxException; + +import javax.security.auth.login.LoginException; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.tez.dag.api.TezException; + + +/** + * This class is needed for writing junit tests. For testing the multi-session + * use case from hive server 2, we need a session simulation. + * + */ +public class SampleTezSessionState extends TezSessionState { + + private boolean open; + private final String sessionId; + private HiveConf hiveConf; + private String user; + private boolean doAsEnabled; + + public SampleTezSessionState(String sessionId) { + super(sessionId); + this.sessionId = sessionId; + } + + @Override + public boolean isOpen() { + return open; + } + + public void setOpen(boolean open) { + this.open = open; + } + + @Override + public void open(HiveConf conf) throws IOException, LoginException, URISyntaxException, + TezException { + this.hiveConf = conf; + UserGroupInformation ugi; + ugi = ShimLoader.getHadoopShims().getUGIForConf(conf); + user = ShimLoader.getHadoopShims().getShortUserName(ugi); + this.doAsEnabled = conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS); + } + + @Override + public void close(boolean keepTmpDir) throws TezException, IOException { + open = keepTmpDir; + } + + @Override + public HiveConf getConf() { + return this.hiveConf; + } + + @Override + public String getSessionId() { + return sessionId; + } + + @Override + public String getUser() { + return user; + } + + @Override + public boolean getDoAsEnabled() { + return this.doAsEnabled; + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java index b6834be..37a84aa 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java @@ -42,7 +42,7 @@ public TestTezSessionPoolManager() { @Override public TezSessionState createSession(String sessionId) { - return new TestTezSessionState(sessionId); + return new SampleTezSessionState(sessionId); } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionState.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionState.java deleted file mode 100644 index 63687eb..0000000 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionState.java +++ /dev/null @@ -1,93 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec.tez; - -import java.io.IOException; -import java.net.URISyntaxException; - -import javax.security.auth.login.LoginException; - -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.shims.ShimLoader; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.tez.dag.api.TezException; - - -/** - * This class is needed for writing junit tests. For testing the multi-session - * use case from hive server 2, we need a session simulation. - * - */ -public class TestTezSessionState extends TezSessionState { - - private boolean open; - private final String sessionId; - private HiveConf hiveConf; - private String user; - private boolean doAsEnabled; - - public TestTezSessionState(String sessionId) { - super(sessionId); - this.sessionId = sessionId; - } - - @Override - public boolean isOpen() { - return open; - } - - public void setOpen(boolean open) { - this.open = open; - } - - @Override - public void open(HiveConf conf) throws IOException, LoginException, URISyntaxException, - TezException { - this.hiveConf = conf; - UserGroupInformation ugi; - ugi = ShimLoader.getHadoopShims().getUGIForConf(conf); - user = ShimLoader.getHadoopShims().getShortUserName(ugi); - this.doAsEnabled = conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS); - } - - @Override - public void close(boolean keepTmpDir) throws TezException, IOException { - open = keepTmpDir; - } - - @Override - public HiveConf getConf() { - return this.hiveConf; - } - - @Override - public String getSessionId() { - return sessionId; - } - - @Override - public String getUser() { - return user; - } - - @Override - public boolean getDoAsEnabled() { - return this.doAsEnabled; - } -} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/AbstractTestGenericUDFOPNumeric.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/AbstractTestGenericUDFOPNumeric.java new file mode 100644 index 0000000..252bdfd --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/AbstractTestGenericUDFOPNumeric.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf.generic; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.junit.Assert; + +public abstract class AbstractTestGenericUDFOPNumeric { + public AbstractTestGenericUDFOPNumeric() { + // Arithmetic operations rely on getting conf from SessionState, need to initialize here. + SessionState ss = new SessionState(new HiveConf()); + ss.getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest"); + SessionState.setCurrentSessionState(ss); + } + + protected void verifyReturnType(GenericUDF udf, + String typeStr1, String typeStr2, String expectedTypeStr) throws HiveException { + // Lookup type infos for our input types and expected return type + PrimitiveTypeInfo type1 = TypeInfoFactory.getPrimitiveTypeInfo(typeStr1); + PrimitiveTypeInfo type2 = TypeInfoFactory.getPrimitiveTypeInfo(typeStr2); + PrimitiveTypeInfo expectedType = TypeInfoFactory.getPrimitiveTypeInfo(expectedTypeStr); + + // Initialize UDF which will output the return type for the UDF. + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type1), + PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type2) + }; + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + + Assert.assertEquals("Return type for " + udf.getDisplayString(new String[] {typeStr1, typeStr2}), + expectedType, oi.getTypeInfo()); + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java index 4c5b3a5..6fa3b3f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java @@ -40,7 +40,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFOPDivide extends TestGenericUDFOPNumeric { +public class TestGenericUDFOPDivide extends AbstractTestGenericUDFOPNumeric { @Test public void testByteDivideShort() throws HiveException { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java index 2f430a6..24618c9 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java @@ -39,7 +39,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFOPMinus extends TestGenericUDFOPNumeric { +public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric { @Test public void testByteMinusShort() throws HiveException { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java index abc0069..e0c290e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java @@ -36,7 +36,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFOPMod extends TestGenericUDFOPNumeric { +public class TestGenericUDFOPMod extends AbstractTestGenericUDFOPNumeric { @Test public void testModByZero1() throws HiveException { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java index 4cd2574..696682f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java @@ -39,7 +39,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFOPMultiply extends TestGenericUDFOPNumeric { +public class TestGenericUDFOPMultiply extends AbstractTestGenericUDFOPNumeric { @Test public void testByteTimesShort() throws HiveException { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNumeric.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNumeric.java deleted file mode 100644 index 9a56ac1..0000000 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNumeric.java +++ /dev/null @@ -1,56 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.udf.generic; - -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; -import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.junit.Assert; - -public abstract class TestGenericUDFOPNumeric { - public TestGenericUDFOPNumeric() { - // Arithmetic operations rely on getting conf from SessionState, need to initialize here. - SessionState ss = new SessionState(new HiveConf()); - ss.getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest"); - SessionState.setCurrentSessionState(ss); - } - - protected void verifyReturnType(GenericUDF udf, - String typeStr1, String typeStr2, String expectedTypeStr) throws HiveException { - // Lookup type infos for our input types and expected return type - PrimitiveTypeInfo type1 = TypeInfoFactory.getPrimitiveTypeInfo(typeStr1); - PrimitiveTypeInfo type2 = TypeInfoFactory.getPrimitiveTypeInfo(typeStr2); - PrimitiveTypeInfo expectedType = TypeInfoFactory.getPrimitiveTypeInfo(expectedTypeStr); - - // Initialize UDF which will output the return type for the UDF. - ObjectInspector[] inputOIs = { - PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type1), - PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type2) - }; - PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); - - Assert.assertEquals("Return type for " + udf.getDisplayString(new String[] {typeStr1, typeStr2}), - expectedType, oi.getTypeInfo()); - } -} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java index 754b539..4b2f7fb 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java @@ -39,7 +39,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFOPPlus extends TestGenericUDFOPNumeric { +public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric { @Test public void testBytePlusShort() throws HiveException { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java index 30f04d9..57e9176 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java @@ -36,7 +36,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFPosMod extends TestGenericUDFOPNumeric { +public class TestGenericUDFPosMod extends AbstractTestGenericUDFOPNumeric { @Test public void testPosModByZero1() throws HiveException {