Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (revision 1143958) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (working copy) @@ -30,8 +30,8 @@ * */ public class HiveResultSetMetaData implements java.sql.ResultSetMetaData { - private List columnNames; - private List columnTypes; + private final List columnNames; + private final List columnTypes; public HiveResultSetMetaData(List columnNames, List columnTypes) { @@ -117,6 +117,8 @@ return Types.INTEGER; } else if ("bigint".equalsIgnoreCase(type)) { return Types.BIGINT; + } else if ("ip".equalsIgnoreCase(type)) { + return Types.VARCHAR; } else if (type.startsWith("map<")) { return Types.VARCHAR; } else if (type.startsWith("array<")) { @@ -157,6 +159,8 @@ return Constants.INT_TYPE_NAME; } else if ("bigint".equalsIgnoreCase(type)) { return Constants.BIGINT_TYPE_NAME; + } else if ("ip".equalsIgnoreCase(type)) { + return Constants.IP_TYPE_NAME; } else if (type.startsWith("map<")) { return Constants.STRING_TYPE_NAME; } else if (type.startsWith("array<")) { Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (working copy) @@ -22,12 +22,14 @@ import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.IpObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; @@ -41,7 +43,7 @@ /** * Returns the digit represented by character b. - * + * * @param b * The ascii code of the character * @param radix @@ -95,7 +97,7 @@ /** * Convert a UTF-8 byte array to String. - * + * * @param bytes * The byte[] containing the UTF-8 String. * @param start @@ -117,7 +119,7 @@ /** * Write the bytes with special characters escaped. - * + * * @param escaped * Whether the data should be written out in an escaped way. * @param escapeChar @@ -151,7 +153,7 @@ /** * Write out the text representation of a Primitive Object to a UTF8 byte * stream. - * + * * @param out * The UTF8 byte OutputStream * @param o @@ -208,6 +210,11 @@ needsEscape); break; } + case IP: { + IpWritable i = ((IpObjectInspector) oi).getPrimitiveWritableObject(o); + out.write(i.toString().getBytes()); + break; + } default: { throw new RuntimeException("Hive internal error."); } Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (working copy) @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDoubleObjectInspector; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyFloatObjectInspector; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyIntObjectInspector; +import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyIpObjectInspector; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyLongObjectInspector; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyShortObjectInspector; @@ -75,6 +76,8 @@ return new LazyDouble((LazyDoubleObjectInspector) oi); case STRING: return new LazyString((LazyStringObjectInspector) oi); + case IP: + return new LazyIp((LazyIpObjectInspector) oi); default: throw new RuntimeException("Internal error: no LazyObject for " + p); } @@ -104,7 +107,7 @@ /** * Create a hierarchical ObjectInspector for LazyObject with the given * typeInfo. - * + * * @param typeInfo * The type information for the LazyObject * @param separator @@ -174,7 +177,7 @@ /** * Create a hierarchical ObjectInspector for LazyStruct with the given * columnNames and columnTypeInfos. - * + * * @param lastColumnTakesRest * whether the last column of the struct should take the rest of the * row if there are extra fields. @@ -199,7 +202,7 @@ /** * Create a hierarchical ObjectInspector for ColumnarStruct with the given * columnNames and columnTypeInfos. - * + * * @see LazyFactory#createLazyObjectInspector(TypeInfo, byte[], int, Text, * boolean, byte) */ Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyIp.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyIp.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyIp.java (revision 0) @@ -0,0 +1,48 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.lazy; + +import org.apache.hadoop.hive.serde2.io.IpWritable; +import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyIpObjectInspector; + +public class LazyIp extends LazyPrimitive { + + private final StringBuilder sb; + + public LazyIp(LazyIpObjectInspector oi) { + super(oi); + data = new IpWritable(); + sb = new StringBuilder(); + } + + public LazyIp(LazyIp copy) { + super(copy); + data = new IpWritable(copy.data); + sb = new StringBuilder(); + } + + @Override + public void init(ByteArrayRef bytes, int start, int length) { + sb.setLength(0); + byte [] b = bytes.getData(); + for (int i = 0; i < length; i++) { + sb.append((char) b[start+i]); + } + data.set(sb.toString()); + } +} Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyIpObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyIpObjectInspector.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyIpObjectInspector.java (revision 0) @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive; + +import org.apache.hadoop.hive.serde2.io.IpWritable; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.IpObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; + +public class LazyIpObjectInspector extends + AbstractPrimitiveLazyObjectInspector implements + IpObjectInspector { + + LazyIpObjectInspector() { + super(PrimitiveObjectInspectorUtils.ipTypeEntry); + } + + public Object copyObject(Object o) { + return o; + } + + public byte[] getPrimitiveJavaObject(Object o) { + return getPrimitiveWritableObject(o).get(); + } +} Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java (working copy) @@ -53,6 +53,8 @@ new LazyDoubleObjectInspector(); public static final LazyVoidObjectInspector LAZY_VOID_OBJECT_INSPECTOR = new LazyVoidObjectInspector(); + public static final LazyIpObjectInspector LAZY_IP_OBJECT_INSPECTOR = + new LazyIpObjectInspector(); static HashMap, LazyStringObjectInspector> cachedLazyStringObjectInspector = new HashMap, LazyStringObjectInspector>(); @@ -93,6 +95,8 @@ return getLazyStringObjectInspector(escaped, escapeChar); case VOID: return LAZY_VOID_OBJECT_INSPECTOR; + case IP: + return LAZY_IP_OBJECT_INSPECTOR; default: throw new RuntimeException("Internal error: Cannot find ObjectInspector " + " for " + primitiveCategory); Index: serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (working copy) @@ -35,6 +35,7 @@ import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; @@ -49,6 +50,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.IpObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; @@ -72,17 +74,23 @@ * BinarySortableSerDe can be used to write data in a way that the data can be * compared byte-by-byte with the same order. * - * The data format: NULL: a single byte \0 NON-NULL Primitives: ALWAYS prepend a - * single byte \1, and then: Boolean: FALSE = \1, TRUE = \2 Byte: flip the - * sign-bit to make sure negative comes before positive Short: flip the sign-bit - * to make sure negative comes before positive Int: flip the sign-bit to make - * sure negative comes before positive Long: flip the sign-bit to make sure - * negative comes before positive Double: flip the sign-bit for positive double, - * and all bits for negative double values String: NULL-terminated UTF-8 string, - * with NULL escaped to \1 \1, and \1 escaped to \1 \2 NON-NULL Complex Types: - * ALWAYS prepend a single byte \1, and then: Struct: one field by one field. - * List: \1 followed by each element, and \0 to terminate Map: \1 followed by - * each key and then each value, and \0 to terminate + * The data format: + * NULL: a single byte \0 + * NON-NULL Primitives: ALWAYS prepend a single byte \1, and then: + * Boolean: FALSE = \1, TRUE = \2 + * Byte: flip the sign-bit to make sure negative comes before positive + * Short: flip the sign-bit to make sure negative comes before positive + * Int: flip the sign-bit to make sure negative comes before positive + * Long: flip the sign-bit to make sure negative comes before positive + * Double: flip the sign-bit for positive double, + * and all bits for negative double values + * String: NULL-terminated UTF-8 string, with NULL escaped to \1 \1, + * and \1 escaped to \1 \2 + * IP: Prepent \0 or IPv4, \1 for IPv6 + * NON-NULL Complex Types: ALWAYS prepend a single byte \1, and then: + * Struct: one field by one field. + * List: \1 followed by each element, and \0 to terminate + * Map: \1 followed by each key and then each value, and \0 to terminate * * This SerDe takes an additional parameter SERIALIZATION_SORT_ORDER which is a * string containing only "+" and "-". The length of the string should equal to @@ -310,6 +318,23 @@ } return r; } + case IP: { + IpWritable r = reuse == null ? new IpWritable() : (IpWritable) reuse; + byte b = buffer.read(invert); + byte[] bytes = null; + if (b == 0) { + // IPv4 + bytes = new byte[4]; + } else if (b == 1) { + // IPv6 + bytes = new byte[16]; + } + for (int i = 0; i < bytes.length; i++) { + bytes[i] = buffer.read(invert); + } + r.set(bytes); + return r; + } default: { throw new RuntimeException("Unrecognized type: " + ptype.getPrimitiveCategory()); @@ -539,6 +564,19 @@ buffer.write((byte) 0, invert); return; } + case IP: { + IpWritable iw = ((IpObjectInspector) poi).getPrimitiveWritableObject(o); + if (iw.getVersion() == 4) { + buffer.write((byte) 0, invert); + } else if (iw.getVersion() == 6) { + buffer.write((byte) 1, invert); + } + byte[] data = iw.get(); + for (int i = 0; i < data.length; i++) { + buffer.write(data[i]); + } + return; + } default: { throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory()); Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java (working copy) @@ -24,6 +24,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.serde2.ByteStream.Output; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; @@ -186,6 +187,10 @@ recordInfo.elementOffset = vInt.length; recordInfo.elementSize = vInt.value; break; + case IP: + recordInfo.elementOffset = 0; + recordInfo.elementSize = IpWritable.getIpLength(bytes[offset]); + break; default: { throw new RuntimeException("Unrecognized primitive type: " + primitiveCategory); Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (working copy) @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; @@ -46,6 +47,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.IpObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; @@ -343,6 +345,12 @@ byteStream.write(data, 0, length); return; } + case IP: { + IpObjectInspector ioi = (IpObjectInspector) poi; + IpWritable i = ioi.getPrimitiveWritableObject(obj); + i.writeToByteStream(byteStream); + return; + } default: { throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory()); Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java (working copy) @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableFloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIpObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector; @@ -64,6 +65,8 @@ return new LazyBinaryString((WritableStringObjectInspector) oi); case VOID: // for NULL return new LazyBinaryVoid((WritableVoidObjectInspector) oi); + case IP: + return new LazyBinaryIp((WritableIpObjectInspector) oi); default: throw new RuntimeException("Internal error: no LazyBinaryObject for " + p); } Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryIp.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryIp.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryIp.java (revision 0) @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.lazybinary; + +import org.apache.hadoop.hive.serde2.io.IpWritable; +import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIpObjectInspector; + +public class LazyBinaryIp extends + LazyBinaryPrimitive { + + LazyBinaryIp(WritableIpObjectInspector OI) { + super(OI); + data = new IpWritable(); + } + + public LazyBinaryIp(LazyBinaryIp copy) { + super(copy); + data = new IpWritable(copy.data); + } + + @Override + public void init(ByteArrayRef bytes, int start, int length) { + assert (length > -1); + data.setSerialized(bytes.getData(), start, length); + } +} Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (working copy) @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableDoubleObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableFloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIpObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector; @@ -107,6 +108,10 @@ return new PrimitiveObjectInspectorConverter.StringConverter( (PrimitiveObjectInspector) inputOI); } + case IP: + return new PrimitiveObjectInspectorConverter.IpConverter( + (PrimitiveObjectInspector) inputOI, + (SettableIpObjectInspector) outputOI); default: throw new RuntimeException("Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (working copy) @@ -21,6 +21,8 @@ import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.lang.reflect.Type; +import java.net.InetAddress; +import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; @@ -30,6 +32,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions; import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector; @@ -37,6 +40,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.IpObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; @@ -440,6 +444,15 @@ } return r; } + case IP: + try { + InetAddress ia = InetAddress + .getByAddress(((IpObjectInspector) poi).getPrimitiveJavaObject(o)); + return ia.hashCode(); + } catch (UnknownHostException e) { + LOG.error("Unknown Host, setting hashcode = 0"); + return 0; + } default: { throw new RuntimeException("Unknown type: " + poi.getPrimitiveCategory()); @@ -584,6 +597,10 @@ .compareTo(s2)); } } + case IP: + IpWritable i1 = ((IpObjectInspector) poi1).getPrimitiveWritableObject(o1); + IpWritable i2 = ((IpObjectInspector) poi2).getPrimitiveWritableObject(o2); + return i1.compareTo(i2); default: { throw new RuntimeException("Unknown type: " + poi1.getPrimitiveCategory()); Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java (working copy) @@ -27,7 +27,7 @@ * The primitive types supported by Hive. */ public static enum PrimitiveCategory { - VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, UNKNOWN + VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, IP, UNKNOWN }; /** @@ -61,7 +61,7 @@ /** * Get a copy of the Object in the same class, so the return value can be * stored independently of the parameter. - * + * * If the Object is a Primitive Java Object, we just return the parameter * since Primitive Java Object is immutable. */ Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableIpObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableIpObjectInspector.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableIpObjectInspector.java (revision 0) @@ -0,0 +1,26 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + + +public interface SettableIpObjectInspector extends IpObjectInspector { + + Object set(Object o, byte[] bytes); + + Object create(byte[] bytes); +} Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/IpObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/IpObjectInspector.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/IpObjectInspector.java (revision 0) @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.serde2.io.IpWritable; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; + + +public interface IpObjectInspector extends PrimitiveObjectInspector { + + IpWritable getPrimitiveWritableObject(Object o); + + byte[] getPrimitiveJavaObject(Object o); +} Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaIpObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaIpObjectInspector.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaIpObjectInspector.java (revision 0) @@ -0,0 +1,52 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.serde2.io.IpWritable; + + +public class JavaIpObjectInspector extends + AbstractPrimitiveJavaObjectInspector implements + SettableIpObjectInspector { + + protected JavaIpObjectInspector() { + super(PrimitiveObjectInspectorUtils.ipTypeEntry); + } + + @Override + public byte[] getPrimitiveJavaObject(Object o) { + return o == null ? null : (byte[]) o; + } + + public IpWritable getPrimitiveWritableObject(Object o) { + if (o == null) { + return null; + } + IpWritable i = new IpWritable(); + i.set((byte[]) o); + return i; + } + + public Object set(Object o, byte[] bytes) { + return bytes.clone(); + } + + public Object create(byte[] bytes) { + return bytes.clone(); + } +} \ No newline at end of file Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java (working copy) @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; import org.apache.hadoop.hive.serde2.lazy.LazyLong; @@ -44,7 +45,7 @@ /** * ObjectInspectorFactory is the primary way to create new ObjectInspector * instances. - * + * * SerDe classes should call the static functions in this library to create an * ObjectInspector to return to the caller of SerDe2.getObjectInspector(). */ @@ -167,6 +168,9 @@ public static final PrimitiveTypeEntry shortTypeEntry = new PrimitiveTypeEntry( PrimitiveCategory.SHORT, Constants.SMALLINT_TYPE_NAME, Short.TYPE, Short.class, ShortWritable.class); + public static final PrimitiveTypeEntry ipTypeEntry = new PrimitiveTypeEntry( + PrimitiveCategory.IP, Constants.IP_TYPE_NAME, null, Object.class, + IpWritable.class); // The following is a complex type for special handling public static final PrimitiveTypeEntry unknownTypeEntry = new PrimitiveTypeEntry( @@ -182,6 +186,7 @@ registerType(doubleTypeEntry); registerType(byteTypeEntry); registerType(shortTypeEntry); + registerType(ipTypeEntry); registerType(unknownTypeEntry); } @@ -341,6 +346,10 @@ .getPrimitiveWritableObject(o2); return t1.equals(t2); } + case IP: + Writable t1 = ((IpObjectInspector) oi1).getPrimitiveWritableObject(o1); + Writable t2 = ((IpObjectInspector) oi2).getPrimitiveWritableObject(o2); + return t1.equals(t2); default: return false; } @@ -367,6 +376,8 @@ return ((DoubleObjectInspector) oi).get(o); case STRING: return Double.valueOf(((StringObjectInspector) oi).getPrimitiveJavaObject(o)); + case IP: + return 0; default: throw new NumberFormatException(); } @@ -437,6 +448,9 @@ result = s.length() != 0; } break; + case IP: + result = ((IpObjectInspector) oi).getPrimitiveWritableObject(o).getInt() != 0; + break; default: throw new RuntimeException("Hive 2 Internal error: unknown type: " + oi.getTypeName()); @@ -513,6 +527,9 @@ } break; } + case IP: + result = ((IpObjectInspector) oi).getPrimitiveWritableObject(o).getInt(); + break; default: { throw new RuntimeException("Hive 2 Internal error: unknown type: " + oi.getTypeName()); @@ -563,6 +580,9 @@ result = Long.parseLong(s); } break; + case IP: + result = ((IpObjectInspector) oi).getPrimitiveWritableObject(o).getInt(); + break; default: throw new RuntimeException("Hive 2 Internal error: unknown type: " + oi.getTypeName()); @@ -607,6 +627,8 @@ String s = soi.getPrimitiveJavaObject(o); result = Double.parseDouble(s); break; + case IP: + break; default: throw new RuntimeException("Hive 2 Internal error: unknown type: " + oi.getTypeName()); @@ -664,6 +686,9 @@ StringObjectInspector soi = (StringObjectInspector) oi; result = soi.getPrimitiveJavaObject(o); break; + case IP: + result = ((IpObjectInspector) oi).getPrimitiveWritableObject(o).toString(); + break; default: throw new RuntimeException("Hive 2 Internal error: unknown type: " + oi.getTypeName()); @@ -671,6 +696,50 @@ return result; } + public static byte[] getIp(Object o, PrimitiveObjectInspector oi) { + if (o == null) { + return null; + } + byte[] result = null; + switch (oi.getPrimitiveCategory()) { + case VOID: + result = null; + break; + case BOOLEAN: + result = IpWritable.getByteArray((((BooleanObjectInspector) oi).get(o) ? 1 : 0)); + break; + case BYTE: + result = IpWritable.getByteArray((long) ((ByteObjectInspector) oi).get(o)); + break; + case SHORT: + result = IpWritable.getByteArray((long) ((ShortObjectInspector) oi).get(o)); + break; + case INT: + result = IpWritable.getByteArray((long) ((IntObjectInspector) oi).get(o)); + break; + case LONG: + result = IpWritable.getByteArray(((LongObjectInspector) oi).get(o)); + break; + case FLOAT: + result = IpWritable.getByteArray((long) ((FloatObjectInspector) oi).get(o)); + break; + case DOUBLE: + result = IpWritable.getByteArray((long) ((DoubleObjectInspector) oi).get(o)); + break; + case STRING: + result = IpWritable.getByteArray(((StringObjectInspector) oi) + .getPrimitiveJavaObject(o)); + break; + case IP: + result = ((IpObjectInspector) oi).getPrimitiveJavaObject(o); + break; + default: + throw new RuntimeException("Hive 2 Internal error: unknown type: " + + oi.getTypeName()); + } + return result; + } + private PrimitiveObjectInspectorUtils() { // prevent instantiation } Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java (working copy) @@ -28,7 +28,7 @@ /** * PrimitiveObjectInspectorFactory is the primary way to create new * PrimitiveObjectInspector instances. - * + * * The reason of having caches here is that ObjectInspector is because * ObjectInspectors do not have an internal state - so ObjectInspectors with the * same construction parameters should result in exactly the same @@ -54,6 +54,8 @@ new JavaStringObjectInspector(); public static final JavaVoidObjectInspector javaVoidObjectInspector = new JavaVoidObjectInspector(); + public static final JavaIpObjectInspector javaIpObjectInspector = + new JavaIpObjectInspector(); public static final WritableBooleanObjectInspector writableBooleanObjectInspector = new WritableBooleanObjectInspector(); @@ -73,6 +75,8 @@ new WritableStringObjectInspector(); public static final WritableVoidObjectInspector writableVoidObjectInspector = new WritableVoidObjectInspector(); + public static final WritableIpObjectInspector writableIpObjectInspector = + new WritableIpObjectInspector(); private static HashMap cachedPrimitiveWritableInspectorCache = new HashMap(); @@ -95,6 +99,8 @@ writableStringObjectInspector); cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.VOID, writableVoidObjectInspector); + cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.IP, + writableIpObjectInspector); } private static HashMap cachedPrimitiveJavaInspectorCache = @@ -118,11 +124,13 @@ javaStringObjectInspector); cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.VOID, javaVoidObjectInspector); + cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.IP, + javaIpObjectInspector); } /** * Returns the PrimitiveWritableObjectInspector for the PrimitiveCategory. - * + * * @param primitiveCategory */ public static AbstractPrimitiveWritableObjectInspector getPrimitiveWritableObjectInspector( @@ -138,7 +146,7 @@ /** * Returns the PrimitiveJavaObjectInspector for the PrimitiveCategory. - * + * * @param primitiveCategory */ public static AbstractPrimitiveJavaObjectInspector getPrimitiveJavaObjectInspector( @@ -155,7 +163,7 @@ /** * Returns an ObjectInspector for a primitive Class. The Class can be a Hive * Writable class, or a Java Primitive Class. - * + * * A runtimeException will be thrown if the class is not recognized as a * primitive type by Hive. */ Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java (working copy) @@ -21,8 +21,8 @@ import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; import org.apache.hadoop.hive.serde2.lazy.LazyLong; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; import org.apache.hadoop.io.Text; /** @@ -238,9 +238,9 @@ * A helper class to convert any primitive to Text. */ public static class TextConverter implements Converter { - private PrimitiveObjectInspector inputOI; - private Text t = new Text(); - private ByteStream.Output out = new ByteStream.Output(); + private final PrimitiveObjectInspector inputOI; + private final Text t = new Text(); + private final ByteStream.Output out = new ByteStream.Output(); private static byte[] trueBytes = {'T', 'R', 'U', 'E'}; private static byte[] falseBytes = {'F', 'A', 'L', 'S', 'E'}; @@ -314,4 +314,21 @@ } } + public static class IpConverter implements Converter { + PrimitiveObjectInspector inputOI; + SettableIpObjectInspector outputOI; + Object o; + + public IpConverter(PrimitiveObjectInspector inputOI, + SettableIpObjectInspector outputOI) { + this.inputOI = inputOI; + this.outputOI = outputOI; + o = outputOI.create(new byte[4]); + } + + public Object convert(Object input) { + return outputOI.set(o, PrimitiveObjectInspectorUtils.getIp(input, inputOI)); + } + } + } Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableIpObjectInspector.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableIpObjectInspector.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableIpObjectInspector.java (revision 0) @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.serde2.io.IpWritable; + + +public class WritableIpObjectInspector extends + AbstractPrimitiveWritableObjectInspector implements + SettableIpObjectInspector { + + public WritableIpObjectInspector() { + super(PrimitiveObjectInspectorUtils.ipTypeEntry); + } + + @Override + public IpWritable getPrimitiveWritableObject(Object o) { + return o == null ? null : (IpWritable) o; + } + + public byte[] getPrimitiveJavaObject(Object o) { + return o == null ? null : ((IpWritable) o).get(); + } + + public Object copyObject(Object o) { + return new IpWritable((IpWritable) o); + } + + public Object set(Object o, byte[] bytes) { + ((IpWritable) o).set(bytes); + return o; + } + + public Object create(byte[] bytes) { + IpWritable o = new IpWritable(); + o.set(bytes); + return o; + } + + +} Index: serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java (revision 1143958) +++ serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java (working copy) @@ -27,7 +27,7 @@ /** * TypeInfoFactory can be used to create the TypeInfo object for any types. - * + * * TypeInfo objects are all read-only so we can reuse them easily. * TypeInfoFactory has internal cache to make sure we don't create 2 TypeInfo * objects that represents the same type. @@ -62,7 +62,8 @@ public static final TypeInfo doubleTypeInfo = getPrimitiveTypeInfo(Constants.DOUBLE_TYPE_NAME); public static final TypeInfo byteTypeInfo = getPrimitiveTypeInfo(Constants.TINYINT_TYPE_NAME); public static final TypeInfo shortTypeInfo = getPrimitiveTypeInfo(Constants.SMALLINT_TYPE_NAME); - + public static final TypeInfo ipTypeInfo = getPrimitiveTypeInfo( + Constants.IP_TYPE_NAME); public static final TypeInfo unknownTypeInfo = getPrimitiveTypeInfo("unknown"); public static TypeInfo getPrimitiveTypeInfoFromPrimitiveWritable( Index: serde/src/java/org/apache/hadoop/hive/serde2/io/IpWritable.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/io/IpWritable.java (revision 0) +++ serde/src/java/org/apache/hadoop/hive/serde2/io/IpWritable.java (revision 0) @@ -0,0 +1,395 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.io; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.io.OutputStream; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.serde2.ByteStream.Output; +import org.apache.hadoop.io.WritableComparable; + +/** + * + * IpWritable + * Stores both IPv4 and IPv6 addresses + * + * To avoid using a byte to store the length, we use a schema similar + * to that used in the Hadoop VInt: + * + * First byte -> length mapping + * 0xFB -> 5 + * 0xFF -> 17 + * + * If the first byte is none of the above, then assume 4 byte IPv4 address + * + * We avoid using 0xFC, 0xFD, 0xFE since they include IPv6 unicast addresses + * which may be present extensively for addresses on a local network. + * + * http://www.iana.org/assignments/ipv6-address-space/ipv6-address-space.xml + */ +public class IpWritable implements WritableComparable { + private static final Log LOG = LogFactory.getLog(IpWritable.class); + + static final String IPV4_DELIMITER = "."; + static final String IPV6_DELIMITER = ":"; + + static final byte IPV4_HEADER = (byte) 0xFB; + static final byte IPV6_HEADER = (byte) 0xFF; + + static final int IPV4_LEN = 4; + static final int IPV4_LEN_WITH_HEADER = 5; + static final int IPV6_LEN_WITH_HEADER = 17; + + static final int IPV4_UNSERIALIZED_LEN = 4; + static final int IPV6_UNSERIALIZED_LEN = 16; + + private byte[] currentBytes; + private final byte[] internalBytes = new byte[IPV6_LEN_WITH_HEADER]; + private byte[] externalBytes; + private int offset; + + public IpWritable() { + currentBytes = internalBytes; + } + + public IpWritable(IpWritable copy) { + if (copy.usesInternal()) { + for (int i = 0; i < internalBytes.length; i++) { + internalBytes[i] = copy.internalBytes[i]; + } + currentBytes = internalBytes; + offset = 0; + } else { + // copy uses some external byte[] so we can share + externalBytes = copy.externalBytes; + currentBytes = externalBytes; + offset = copy.offset; + } + } + + private boolean usesInternal() { + return currentBytes == internalBytes; + } + + /** + * Data in bytes is already serialized, so just copy the pointers + * @param bytes + * @param offset + * @param length + */ + public void setSerialized(byte[] bytes, int offset, int length) { + externalBytes = bytes; + this.offset = offset; + currentBytes = externalBytes; + } + + /** + * For set(...) methods, data in bytes is NOT serialized + * so we must convert it + * @param bytes + */ + public void set(byte[] bytes) { + if (bytes == null) { + clearInternal(); + return; + } + set(bytes, 0, bytes.length); + } + + /** + * For set(...) methods, data in bytes is NOT serialized + * so we must convert it + * @param bytes + * @param offset + * @param length + */ + public void set(byte[] bytes, int offset, int length) { + int overhead = 0; + // Resolve collisions + if (length == IPV4_UNSERIALIZED_LEN) { + if (bytes[offset] == IPV4_HEADER || bytes[offset] == IPV6_HEADER) { + internalBytes[0] = (byte) IPV4_HEADER; + overhead = 1; + } + } else if (length == IPV6_UNSERIALIZED_LEN) { + internalBytes[0] = (byte) IPV6_HEADER; + overhead = 1; + } + + for (int i = 0; i < length; i++) { + internalBytes[i + overhead] = bytes[offset + i]; + } + currentBytes = internalBytes; + offset = 0; + } + + /** + * Converts a String representation of an IP address to byte array and set + * @param s + */ + public void set(String s) { + set(getByteArray(s)); + } + + /** + * + * @return Non-serialized byte[] with the address + */ + public byte[] get() { + int overhead = 0; + int len = IPV4_UNSERIALIZED_LEN; + if (currentBytes[offset] == IPV4_HEADER) { + overhead = 1; + } else if (currentBytes[offset] == IPV6_HEADER) { + len = IPV6_UNSERIALIZED_LEN; + overhead = 1; + } + byte[] b = createBytes(len); + + for (int i = 0; i < len; i++) { + b[i] = currentBytes[offset + overhead + i]; + } + return b; + } + + /** + * + * @return bottom 4 bytes of address as an integer + */ + public Integer getInt() { + int len = getLength(); + int retVal = 0; + int overhead = 0; + if (len == IPV4_LEN_WITH_HEADER || len == IPV6_LEN_WITH_HEADER) { + overhead = 1; + } + for (int i = overhead; i < len ; i++) { + retVal <<= 8; + retVal |= 0xFF & currentBytes[offset+i]; + } + return retVal; + } + + private void clearInternal() { + for (int i = 0; i < internalBytes.length; i++) { + internalBytes[i] = (byte) 0x0; + } + } + + public void writeToByteStream(Output o) { + o.write(currentBytes, offset, getLength()); + } + + public void writeToByteStream(OutputStream o) throws IOException { + o.write(currentBytes, offset, getLength()); + } + + /** + * + * @return 6 if IPv6 address, 4 if IPv4 + */ + public int getVersion() { + if (getLength() == IPV6_LEN_WITH_HEADER) { + return 6; + } + return 4; + } + + private static byte[] createBytes(int length) { + return new byte[length]; + } + + /** + * + * @return length of byte array, including overhead byte if present + */ + private int getLength() { + return getIpLength(currentBytes[offset]); + } + + // Interpret long as IPv4 address + public static byte[] getByteArray(long value) { + return IpWritable.getByteArray(value, false); + } + + public static byte[] getByteArray(long value, boolean ipv6) { + byte [] b; + if (ipv6) { + b = createBytes(IPV6_UNSERIALIZED_LEN); + } else { + b = createBytes(IPV4_UNSERIALIZED_LEN); + } + + for (int i = 0; i < Math.min(b.length, 8); i++) { + b[b.length-i-1] = (byte) ((value >> (8 * i)) & 0xFF); + } + return b; + } + + /** + * Converts a string to unserialized byte array + * + * IPv6 addresses are formed as 8 sets of 4 hexadecimal digits, 16 bytes total + * xxxx:yyyy:zzzz:... + * + * @param value + * @return + */ + public static byte[] getByteArray(String value) { + byte[] result = null; + if (value.indexOf('.') != -1) { + // Treat as IPv4 + String[] split = value.split("\\."); + if (split.length != 4) { + return null; + } + + result = createBytes(IPV4_UNSERIALIZED_LEN); + for (int i = 0; i < split.length; i++) { + result[i] = (byte) Integer.parseInt(split[i]); + } + } else if (value.indexOf(':') != -1) { + // Treat as IPv6 + String[] split = value.split(":"); + if (split.length != 8) { + return null; + } + result = createBytes(IPV6_UNSERIALIZED_LEN); + for (int i = 0; i < split.length; i++) { + // First two characters into a byte + result[i*2] = (byte) ((Character.digit(split[i].charAt(0), 16) << 4) + + Character.digit(split[i].charAt(1), 16)); + + // Second two characters into a byte + result[i*2+1] = (byte) ((Character.digit(split[i].charAt(2), 16) << 4) + + Character.digit(split[i].charAt(3), 16)); + } + } + return result; + } + + public void readFields(DataInput in) throws IOException { + in.readFully(internalBytes, 0, 1); + int len = 3; + if (internalBytes[0] == IPV4_HEADER) { + len = IPV4_UNSERIALIZED_LEN; + } else if (internalBytes[0] == IPV6_HEADER) { + len = IPV6_UNSERIALIZED_LEN; + } + in.readFully(internalBytes, 1, len); + currentBytes = internalBytes; + offset = 0; + } + + public void write(DataOutput out) throws IOException { + out.write(currentBytes, offset, getLength()); + } + + /** + * + * @param o + * @return + */ + public int compareTo(Object o) { + IpWritable iw = (IpWritable) o; + byte[] thisBytes = this.get(); + byte[] thatBytes = iw.get(); + int lenDiff = thisBytes.length - thatBytes.length; + if (lenDiff != 0) { + return lenDiff; + } + for (int i = 0; i < thisBytes.length; i++) { + if (thisBytes[i] != thatBytes[i]) { + return thisBytes[i] - thatBytes[i]; + } + } + return 0; + } + + @Override + public boolean equals(Object o) { + return compareTo(o) == 0; + } + + @Override + public String toString() { + int len = getLength(); + int overhead = 0; + String delimiter = IPV4_DELIMITER; + StringBuilder sb = new StringBuilder(); + if (len == IPV6_LEN_WITH_HEADER) { + delimiter = IPV6_DELIMITER; + + appendByteHexString(sb, currentBytes[offset+1]); + appendByteHexString(sb, currentBytes[offset+2]); + for (int i = 2; i < len-1; i+=2) { + sb.append(delimiter); + appendByteHexString(sb, currentBytes[offset+i+1]); + appendByteHexString(sb, currentBytes[offset+i+2]); + } + return sb.toString(); + } + + if (len == IPV4_LEN_WITH_HEADER) { + overhead = 1; + } + + sb.append((int) (currentBytes[offset+overhead] & 0xFF)); + for (int i = 1+overhead; i < len; i++) { + sb.append(delimiter); + sb.append((int) (currentBytes[offset+i] & 0xFF)); + } + return sb.toString(); + } + + /** + * Takes a byte and appends two hexadecimal digits to the StringBuilder + * corresponding to the byte + * + * @param sb + * @param b + */ + private void appendByteHexString(StringBuilder sb, byte b) { + String hex = Integer.toHexString(0xFF & b); + if (hex.length() == 1) { + sb.append('0'); + } + sb.append(hex); + } + + /** + * Return the + * @param b + * @return + */ + public static int getIpLength(byte b) { + if (b == IPV4_HEADER) { + return IPV4_LEN_WITH_HEADER; + } + if (b == IPV6_HEADER) { + return IPV6_LEN_WITH_HEADER; + } + return IPV4_LEN; + } + +} + Index: serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py =================================================================== --- serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py (revision 1143958) +++ serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py (working copy) @@ -33,6 +33,7 @@ DATE_TYPE_NAME = "date" DATETIME_TYPE_NAME = "datetime" TIMESTAMP_TYPE_NAME = "timestamp" +IP_TYPE_NAME = "ip" LIST_TYPE_NAME = "array" MAP_TYPE_NAME = "map" STRUCT_TYPE_NAME = "struct" @@ -52,6 +53,7 @@ "date", "datetime", "timestamp", + "ip", ]) CollectionTypes = set([ "array", Index: serde/src/gen/thrift/gen-cpp/serde_constants.cpp =================================================================== --- serde/src/gen/thrift/gen-cpp/serde_constants.cpp (revision 1143958) +++ serde/src/gen/thrift/gen-cpp/serde_constants.cpp (working copy) @@ -62,6 +62,8 @@ TIMESTAMP_TYPE_NAME = "timestamp"; + IP_TYPE_NAME = "ip"; + LIST_TYPE_NAME = "array"; MAP_TYPE_NAME = "map"; @@ -86,6 +88,7 @@ PrimitiveTypes.insert("date"); PrimitiveTypes.insert("datetime"); PrimitiveTypes.insert("timestamp"); + PrimitiveTypes.insert("ip"); CollectionTypes.insert("array"); CollectionTypes.insert("map"); Index: serde/src/gen/thrift/gen-cpp/serde_constants.h =================================================================== --- serde/src/gen/thrift/gen-cpp/serde_constants.h (revision 1143958) +++ serde/src/gen/thrift/gen-cpp/serde_constants.h (working copy) @@ -40,6 +40,7 @@ std::string DATE_TYPE_NAME; std::string DATETIME_TYPE_NAME; std::string TIMESTAMP_TYPE_NAME; + std::string IP_TYPE_NAME; std::string LIST_TYPE_NAME; std::string MAP_TYPE_NAME; std::string STRUCT_TYPE_NAME; Index: serde/src/gen/thrift/gen-rb/serde_constants.rb =================================================================== --- serde/src/gen/thrift/gen-rb/serde_constants.rb (revision 1143958) +++ serde/src/gen/thrift/gen-rb/serde_constants.rb (working copy) @@ -58,6 +58,8 @@ TIMESTAMP_TYPE_NAME = %q"timestamp" +IP_TYPE_NAME = %q"ip" + LIST_TYPE_NAME = %q"array" MAP_TYPE_NAME = %q"map" @@ -83,6 +85,7 @@ %q"date", %q"datetime", %q"timestamp", + %q"ip", ]) CollectionTypes = Set.new([ Index: serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/Constants.java =================================================================== --- serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/Constants.java (revision 1143958) +++ serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/Constants.java (working copy) @@ -74,6 +74,8 @@ public static final String TIMESTAMP_TYPE_NAME = "timestamp"; + public static final String IP_TYPE_NAME = "ip"; + public static final String LIST_TYPE_NAME = "array"; public static final String MAP_TYPE_NAME = "map"; @@ -100,6 +102,7 @@ PrimitiveTypes.add("date"); PrimitiveTypes.add("datetime"); PrimitiveTypes.add("timestamp"); + PrimitiveTypes.add("ip"); } public static final Set CollectionTypes = new HashSet(); Index: serde/src/gen/thrift/gen-php/serde/serde_constants.php =================================================================== --- serde/src/gen/thrift/gen-php/serde/serde_constants.php (revision 1143958) +++ serde/src/gen/thrift/gen-php/serde/serde_constants.php (working copy) @@ -60,6 +60,8 @@ $GLOBALS['serde_CONSTANTS']['TIMESTAMP_TYPE_NAME'] = "timestamp"; +$GLOBALS['serde_CONSTANTS']['IP_TYPE_NAME'] = "ip"; + $GLOBALS['serde_CONSTANTS']['LIST_TYPE_NAME'] = "array"; $GLOBALS['serde_CONSTANTS']['MAP_TYPE_NAME'] = "map"; @@ -85,6 +87,7 @@ "date" => true, "datetime" => true, "timestamp" => true, + "ip" => true, ); $GLOBALS['serde_CONSTANTS']['CollectionTypes'] = array( Index: serde/if/serde.thrift =================================================================== --- serde/if/serde.thrift (revision 1143958) +++ serde/if/serde.thrift (working copy) @@ -36,6 +36,7 @@ const string DATE_TYPE_NAME = "date"; const string DATETIME_TYPE_NAME = "datetime"; const string TIMESTAMP_TYPE_NAME = "timestamp"; +const string IP_TYPE_NAME = "ip"; const string LIST_TYPE_NAME = "array"; const string MAP_TYPE_NAME = "map"; @@ -45,7 +46,7 @@ const string LIST_COLUMNS = "columns"; const string LIST_COLUMN_TYPES = "columns.types"; -const set PrimitiveTypes = [ VOID_TYPE_NAME BOOLEAN_TYPE_NAME TINYINT_TYPE_NAME SMALLINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME ], +const set PrimitiveTypes = [ VOID_TYPE_NAME BOOLEAN_TYPE_NAME TINYINT_TYPE_NAME SMALLINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME IP_TYPE_NAME ], const set CollectionTypes = [ LIST_TYPE_NAME MAP_TYPE_NAME ], Index: ql/src/test/results/clientpositive/ip2.q.out =================================================================== --- ql/src/test/results/clientpositive/ip2.q.out (revision 0) +++ ql/src/test/results/clientpositive/ip2.q.out (revision 0) @@ -0,0 +1,322 @@ +PREHOOK: query: drop table ip2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table ip2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table ip2 (value ip) stored as sequencefile +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table ip2 (value ip) stored as sequencefile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@ip2 +PREHOOK: query: alter table ip2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@ip2 +PREHOOK: Output: default@ip2 +POSTHOOK: query: alter table ip2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: default@ip2 +PREHOOK: query: insert overwrite table ip2 select '0.255.255.0' from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@ip2 +POSTHOOK: query: insert overwrite table ip2 select '0.255.255.0' from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@ip2 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +PREHOOK: query: select cast(value as boolean) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-13_483_6722843075855207453/-mr-10000 +POSTHOOK: query: select cast(value as boolean) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-13_483_6722843075855207453/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +true +PREHOOK: query: select cast(value as tinyint) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-16_561_359193108896411480/-mr-10000 +POSTHOOK: query: select cast(value as tinyint) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-16_561_359193108896411480/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +0 +PREHOOK: query: select cast(value as smallint) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-19_625_6065177075200260785/-mr-10000 +POSTHOOK: query: select cast(value as smallint) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-19_625_6065177075200260785/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +-256 +PREHOOK: query: select cast(value as int) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-22_602_6522251719943579850/-mr-10000 +POSTHOOK: query: select cast(value as int) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-22_602_6522251719943579850/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +16776960 +PREHOOK: query: select cast(value as bigint) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-25_628_3036862961033812112/-mr-10000 +POSTHOOK: query: select cast(value as bigint) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-25_628_3036862961033812112/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +16776960 +PREHOOK: query: select cast(value as float) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-28_629_8488694284374953723/-mr-10000 +POSTHOOK: query: select cast(value as float) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-28_629_8488694284374953723/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +1.677696E7 +PREHOOK: query: select cast(value as double) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-31_562_2273091854326276562/-mr-10000 +POSTHOOK: query: select cast(value as double) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-31_562_2273091854326276562/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +1.677696E7 +PREHOOK: query: select cast(value as string) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-34_946_6006870627253329212/-mr-10000 +POSTHOOK: query: select cast(value as string) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-34_946_6006870627253329212/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +0.255.255.0 +PREHOOK: query: insert overwrite table ip2 select '255.255.255.0' from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@ip2 +POSTHOOK: query: insert overwrite table ip2 select '255.255.255.0' from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@ip2 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +PREHOOK: query: select cast(value as boolean) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-42_881_6605296776417490310/-mr-10000 +POSTHOOK: query: select cast(value as boolean) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-42_881_6605296776417490310/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +true +PREHOOK: query: select cast(value as tinyint) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-45_850_4101957595226266778/-mr-10000 +POSTHOOK: query: select cast(value as tinyint) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-45_850_4101957595226266778/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +0 +PREHOOK: query: select cast(value as smallint) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-48_795_8160550940800749977/-mr-10000 +POSTHOOK: query: select cast(value as smallint) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-48_795_8160550940800749977/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +-256 +PREHOOK: query: select cast(value as int) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-51_765_6595576051423533321/-mr-10000 +POSTHOOK: query: select cast(value as int) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-51_765_6595576051423533321/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +-256 +PREHOOK: query: select cast(value as bigint) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-54_719_8190735345989880546/-mr-10000 +POSTHOOK: query: select cast(value as bigint) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-54_719_8190735345989880546/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +-256 +PREHOOK: query: select cast(value as float) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-57_671_4458347041684582069/-mr-10000 +POSTHOOK: query: select cast(value as float) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-23-57_671_4458347041684582069/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +-256.0 +PREHOOK: query: select cast(value as double) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-00_656_5698929239941086342/-mr-10000 +POSTHOOK: query: select cast(value as double) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-00_656_5698929239941086342/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +-256.0 +PREHOOK: query: select cast(value as string) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-03_817_35917480720251808/-mr-10000 +POSTHOOK: query: select cast(value as string) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-03_817_35917480720251808/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +255.255.255.0 +PREHOOK: query: insert overwrite table ip2 select '251.255.255.0' from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@ip2 +POSTHOOK: query: insert overwrite table ip2 select '251.255.255.0' from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@ip2 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +PREHOOK: query: select cast(value as boolean) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-11_160_2289409062533686625/-mr-10000 +POSTHOOK: query: select cast(value as boolean) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-11_160_2289409062533686625/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +true +PREHOOK: query: select cast(value as tinyint) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-14_187_6216680304674200079/-mr-10000 +POSTHOOK: query: select cast(value as tinyint) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-14_187_6216680304674200079/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +0 +PREHOOK: query: select cast(value as smallint) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-17_158_6577596780311860621/-mr-10000 +POSTHOOK: query: select cast(value as smallint) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-17_158_6577596780311860621/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +-256 +PREHOOK: query: select cast(value as int) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-20_140_5091499598413900324/-mr-10000 +POSTHOOK: query: select cast(value as int) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-20_140_5091499598413900324/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +-67109120 +PREHOOK: query: select cast(value as bigint) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-23_109_7288581838090920332/-mr-10000 +POSTHOOK: query: select cast(value as bigint) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-23_109_7288581838090920332/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +-67109120 +PREHOOK: query: select cast(value as float) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-26_033_3031155754565526984/-mr-10000 +POSTHOOK: query: select cast(value as float) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-26_033_3031155754565526984/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +-6.710912E7 +PREHOOK: query: select cast(value as double) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-29_107_5839252842917538718/-mr-10000 +POSTHOOK: query: select cast(value as double) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-29_107_5839252842917538718/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +-6.710912E7 +PREHOOK: query: select cast(value as string) from ip2 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip2 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-32_211_3855430210596695823/-mr-10000 +POSTHOOK: query: select cast(value as string) from ip2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-32_211_3855430210596695823/-mr-10000 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +251.255.255.0 +PREHOOK: query: drop table ip2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@ip2 +PREHOOK: Output: default@ip2 +POSTHOOK: query: drop table ip2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@ip2 +POSTHOOK: Output: default@ip2 +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] +POSTHOOK: Lineage: ip2.value EXPRESSION [] Index: ql/src/test/results/clientpositive/ip3.q.out =================================================================== --- ql/src/test/results/clientpositive/ip3.q.out (revision 0) +++ ql/src/test/results/clientpositive/ip3.q.out (revision 0) @@ -0,0 +1,328 @@ +PREHOOK: query: drop table ip3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table ip3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table ip3 (value ip) stored as sequencefile +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table ip3 (value ip) stored as sequencefile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@ip3 +PREHOOK: query: alter table ip3 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@ip3 +PREHOOK: Output: default@ip3 +POSTHOOK: query: alter table ip3 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: default@ip3 +PREHOOK: query: insert overwrite table ip3 select '2001:0db8:85a3:0000:0000:8a2e:0370:7334' + from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@ip3 +POSTHOOK: query: insert overwrite table ip3 select '2001:0db8:85a3:0000:0000:8a2e:0370:7334' + from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@ip3 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +PREHOOK: query: select cast(value as boolean) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-42_763_4923551784101377388/-mr-10000 +POSTHOOK: query: select cast(value as boolean) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-42_763_4923551784101377388/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +true +PREHOOK: query: select cast(value as tinyint) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-45_852_5612919787054253039/-mr-10000 +POSTHOOK: query: select cast(value as tinyint) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-45_852_5612919787054253039/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +52 +PREHOOK: query: select cast(value as smallint) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-48_895_953407145799502004/-mr-10000 +POSTHOOK: query: select cast(value as smallint) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-48_895_953407145799502004/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +29492 +PREHOOK: query: select cast(value as int) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-51_914_2155498541839702441/-mr-10000 +POSTHOOK: query: select cast(value as int) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-51_914_2155498541839702441/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +57701172 +PREHOOK: query: select cast(value as bigint) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-54_828_4428546680142362788/-mr-10000 +POSTHOOK: query: select cast(value as bigint) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-54_828_4428546680142362788/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +57701172 +PREHOOK: query: select cast(value as float) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-57_813_4982281941354946558/-mr-10000 +POSTHOOK: query: select cast(value as float) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-24-57_813_4982281941354946558/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +5.7701172E7 +PREHOOK: query: select cast(value as double) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-00_725_1458005224706921311/-mr-10000 +POSTHOOK: query: select cast(value as double) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-00_725_1458005224706921311/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +5.7701172E7 +PREHOOK: query: select cast(value as string) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-03_998_4411776860659926746/-mr-10000 +POSTHOOK: query: select cast(value as string) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-03_998_4411776860659926746/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +2001:0db8:85a3:0000:0000:8a2e:0370:7334 +PREHOOK: query: insert overwrite table ip3 select 'fb01:0db8:85a3:0000:0000:8a2e:0370:7334' + from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@ip3 +POSTHOOK: query: insert overwrite table ip3 select 'fb01:0db8:85a3:0000:0000:8a2e:0370:7334' + from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@ip3 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +PREHOOK: query: select cast(value as boolean) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-11_285_3574469332168903132/-mr-10000 +POSTHOOK: query: select cast(value as boolean) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-11_285_3574469332168903132/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +true +PREHOOK: query: select cast(value as tinyint) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-14_242_7617760127674689328/-mr-10000 +POSTHOOK: query: select cast(value as tinyint) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-14_242_7617760127674689328/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +52 +PREHOOK: query: select cast(value as smallint) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-17_196_3785040320069725335/-mr-10000 +POSTHOOK: query: select cast(value as smallint) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-17_196_3785040320069725335/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +29492 +PREHOOK: query: select cast(value as int) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-20_111_350465386569151568/-mr-10000 +POSTHOOK: query: select cast(value as int) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-20_111_350465386569151568/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +57701172 +PREHOOK: query: select cast(value as bigint) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-23_018_3855645137186997832/-mr-10000 +POSTHOOK: query: select cast(value as bigint) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-23_018_3855645137186997832/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +57701172 +PREHOOK: query: select cast(value as float) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-25_912_6003595455476195782/-mr-10000 +POSTHOOK: query: select cast(value as float) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-25_912_6003595455476195782/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +5.7701172E7 +PREHOOK: query: select cast(value as double) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-28_879_7464684333949981028/-mr-10000 +POSTHOOK: query: select cast(value as double) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-28_879_7464684333949981028/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +5.7701172E7 +PREHOOK: query: select cast(value as string) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-31_792_7560669262818391034/-mr-10000 +POSTHOOK: query: select cast(value as string) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-31_792_7560669262818391034/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +fb01:0db8:85a3:0000:0000:8a2e:0370:7334 +PREHOOK: query: insert overwrite table ip3 select 'ff01:0db8:85a3:0000:0000:8a2e:0370:7334' + from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@ip3 +POSTHOOK: query: insert overwrite table ip3 select 'ff01:0db8:85a3:0000:0000:8a2e:0370:7334' + from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@ip3 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +PREHOOK: query: select cast(value as boolean) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-39_126_6963523891562493521/-mr-10000 +POSTHOOK: query: select cast(value as boolean) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-39_126_6963523891562493521/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +true +PREHOOK: query: select cast(value as tinyint) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-42_082_6997433524166043276/-mr-10000 +POSTHOOK: query: select cast(value as tinyint) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-42_082_6997433524166043276/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +52 +PREHOOK: query: select cast(value as smallint) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-45_041_6921557198304483112/-mr-10000 +POSTHOOK: query: select cast(value as smallint) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-45_041_6921557198304483112/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +29492 +PREHOOK: query: select cast(value as int) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-48_054_4551343596392311360/-mr-10000 +POSTHOOK: query: select cast(value as int) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-48_054_4551343596392311360/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +57701172 +PREHOOK: query: select cast(value as bigint) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-51_069_7022436699093023013/-mr-10000 +POSTHOOK: query: select cast(value as bigint) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-51_069_7022436699093023013/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +57701172 +PREHOOK: query: select cast(value as float) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-53_969_836081872226510821/-mr-10000 +POSTHOOK: query: select cast(value as float) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-53_969_836081872226510821/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +5.7701172E7 +PREHOOK: query: select cast(value as double) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-56_923_3240577205141553338/-mr-10000 +POSTHOOK: query: select cast(value as double) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-56_923_3240577205141553338/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +5.7701172E7 +PREHOOK: query: select cast(value as string) from ip3 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip3 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-59_851_8036738756949978458/-mr-10000 +POSTHOOK: query: select cast(value as string) from ip3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_16-25-59_851_8036738756949978458/-mr-10000 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +ff01:0db8:85a3:0000:0000:8a2e:0370:7334 +PREHOOK: query: drop table ip3 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@ip3 +PREHOOK: Output: default@ip3 +POSTHOOK: query: drop table ip3 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@ip3 +POSTHOOK: Output: default@ip3 +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] +POSTHOOK: Lineage: ip3.value EXPRESSION [] Index: ql/src/test/results/clientpositive/ip1.q.out =================================================================== --- ql/src/test/results/clientpositive/ip1.q.out (revision 0) +++ ql/src/test/results/clientpositive/ip1.q.out (revision 0) @@ -0,0 +1,214 @@ +PREHOOK: query: drop table ip1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table ip1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table ip1 (value ip) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table ip1 (value ip) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@ip1 +PREHOOK: query: alter table ip1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +PREHOOK: type: ALTERTABLE_SERIALIZER +PREHOOK: Input: default@ip1 +PREHOOK: Output: default@ip1 +POSTHOOK: query: alter table ip1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +POSTHOOK: type: ALTERTABLE_SERIALIZER +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: default@ip1 +PREHOOK: query: insert overwrite table ip1 select '123.234.1.23' from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@ip1 +POSTHOOK: query: insert overwrite table ip1 select '123.234.1.23' from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@ip1 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +PREHOOK: query: select cast(value as boolean) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-35_959_8897646043223729150/-mr-10000 +POSTHOOK: query: select cast(value as boolean) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-35_959_8897646043223729150/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +true +PREHOOK: query: select cast(value as tinyint) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-39_080_4639518214673449946/-mr-10000 +POSTHOOK: query: select cast(value as tinyint) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-39_080_4639518214673449946/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +23 +PREHOOK: query: select cast(value as smallint) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-42_056_4119350614929529618/-mr-10000 +POSTHOOK: query: select cast(value as smallint) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-42_056_4119350614929529618/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +279 +PREHOOK: query: select cast(value as int) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-45_069_3713408288489617905/-mr-10000 +POSTHOOK: query: select cast(value as int) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-45_069_3713408288489617905/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +2078933271 +PREHOOK: query: select cast(value as bigint) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-48_233_4466707818982656697/-mr-10000 +POSTHOOK: query: select cast(value as bigint) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-48_233_4466707818982656697/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +2078933271 +PREHOOK: query: select cast(value as float) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-51_250_8976267099740342342/-mr-10000 +POSTHOOK: query: select cast(value as float) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-51_250_8976267099740342342/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +2.07893325E9 +PREHOOK: query: select cast(value as double) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-54_244_5209333105445017553/-mr-10000 +POSTHOOK: query: select cast(value as double) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-54_244_5209333105445017553/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +2.078933271E9 +PREHOOK: query: select cast(value as string) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-57_262_4002362827603391458/-mr-10000 +POSTHOOK: query: select cast(value as string) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-13-57_262_4002362827603391458/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +123.234.1.23 +PREHOOK: query: insert overwrite table ip1 select '2001:0db8:85a3:0000:0000:8a2e:0370:7334' from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@ip1 +POSTHOOK: query: insert overwrite table ip1 select '2001:0db8:85a3:0000:0000:8a2e:0370:7334' from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@ip1 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +POSTHOOK: Lineage: ip1.value EXPRESSION [] +PREHOOK: query: select cast(value as boolean) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-04_800_1026910735303428924/-mr-10000 +POSTHOOK: query: select cast(value as boolean) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-04_800_1026910735303428924/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +POSTHOOK: Lineage: ip1.value EXPRESSION [] +true +PREHOOK: query: select cast(value as tinyint) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-07_816_2321761790174534037/-mr-10000 +POSTHOOK: query: select cast(value as tinyint) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-07_816_2321761790174534037/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +POSTHOOK: Lineage: ip1.value EXPRESSION [] +52 +PREHOOK: query: select cast(value as smallint) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-10_801_1663939840001264403/-mr-10000 +POSTHOOK: query: select cast(value as smallint) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-10_801_1663939840001264403/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +POSTHOOK: Lineage: ip1.value EXPRESSION [] +29492 +PREHOOK: query: select cast(value as int) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-13_784_3803676264909706413/-mr-10000 +POSTHOOK: query: select cast(value as int) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-13_784_3803676264909706413/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +POSTHOOK: Lineage: ip1.value EXPRESSION [] +57701172 +PREHOOK: query: select cast(value as bigint) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-16_776_368722932371264782/-mr-10000 +POSTHOOK: query: select cast(value as bigint) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-16_776_368722932371264782/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +POSTHOOK: Lineage: ip1.value EXPRESSION [] +57701172 +PREHOOK: query: select cast(value as float) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-19_762_2735526179466937695/-mr-10000 +POSTHOOK: query: select cast(value as float) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-19_762_2735526179466937695/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +POSTHOOK: Lineage: ip1.value EXPRESSION [] +5.7701172E7 +PREHOOK: query: select cast(value as double) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-22_748_2038975317800236410/-mr-10000 +POSTHOOK: query: select cast(value as double) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-22_748_2038975317800236410/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +POSTHOOK: Lineage: ip1.value EXPRESSION [] +5.7701172E7 +PREHOOK: query: select cast(value as string) from ip1 +PREHOOK: type: QUERY +PREHOOK: Input: default@ip1 +PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-25_679_2892168504869146211/-mr-10000 +POSTHOOK: query: select cast(value as string) from ip1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_14-14-25_679_2892168504869146211/-mr-10000 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +POSTHOOK: Lineage: ip1.value EXPRESSION [] +2001:0db8:85a3:0000:0000:8a2e:0370:7334 +PREHOOK: query: drop table ip1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@ip1 +PREHOOK: Output: default@ip1 +POSTHOOK: query: drop table ip1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@ip1 +POSTHOOK: Output: default@ip1 +POSTHOOK: Lineage: ip1.value EXPRESSION [] +POSTHOOK: Lineage: ip1.value EXPRESSION [] Index: ql/src/test/queries/clientpositive/ip1.q =================================================================== --- ql/src/test/queries/clientpositive/ip1.q (revision 0) +++ ql/src/test/queries/clientpositive/ip1.q (revision 0) @@ -0,0 +1,27 @@ +drop table ip1; + +create table ip1 (value ip); +alter table ip1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'; + +insert overwrite table ip1 select '123.234.1.23' from src limit 1; +select cast(value as boolean) from ip1; +select cast(value as tinyint) from ip1; +select cast(value as smallint) from ip1; +select cast(value as int) from ip1; +select cast(value as bigint) from ip1; +select cast(value as float) from ip1; +select cast(value as double) from ip1; +select cast(value as string) from ip1; + +insert overwrite table ip1 select '2001:0db8:85a3:0000:0000:8a2e:0370:7334' from src limit 1; +select cast(value as boolean) from ip1; +select cast(value as tinyint) from ip1; +select cast(value as smallint) from ip1; +select cast(value as int) from ip1; +select cast(value as bigint) from ip1; +select cast(value as float) from ip1; +select cast(value as double) from ip1; +select cast(value as string) from ip1; + + +drop table ip1; Index: ql/src/test/queries/clientpositive/ip2.q =================================================================== --- ql/src/test/queries/clientpositive/ip2.q (revision 0) +++ ql/src/test/queries/clientpositive/ip2.q (revision 0) @@ -0,0 +1,36 @@ +drop table ip2; + +create table ip2 (value ip) stored as sequencefile; +alter table ip2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe'; + +insert overwrite table ip2 select '0.255.255.0' from src limit 1; +select cast(value as boolean) from ip2; +select cast(value as tinyint) from ip2; +select cast(value as smallint) from ip2; +select cast(value as int) from ip2; +select cast(value as bigint) from ip2; +select cast(value as float) from ip2; +select cast(value as double) from ip2; +select cast(value as string) from ip2; + +insert overwrite table ip2 select '255.255.255.0' from src limit 1; +select cast(value as boolean) from ip2; +select cast(value as tinyint) from ip2; +select cast(value as smallint) from ip2; +select cast(value as int) from ip2; +select cast(value as bigint) from ip2; +select cast(value as float) from ip2; +select cast(value as double) from ip2; +select cast(value as string) from ip2; + +insert overwrite table ip2 select '251.255.255.0' from src limit 1; +select cast(value as boolean) from ip2; +select cast(value as tinyint) from ip2; +select cast(value as smallint) from ip2; +select cast(value as int) from ip2; +select cast(value as bigint) from ip2; +select cast(value as float) from ip2; +select cast(value as double) from ip2; +select cast(value as string) from ip2; + +drop table ip2; Index: ql/src/test/queries/clientpositive/ip3.q =================================================================== --- ql/src/test/queries/clientpositive/ip3.q (revision 0) +++ ql/src/test/queries/clientpositive/ip3.q (revision 0) @@ -0,0 +1,39 @@ +drop table ip3; + +create table ip3 (value ip) stored as sequencefile; +alter table ip3 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe'; + +insert overwrite table ip3 select '2001:0db8:85a3:0000:0000:8a2e:0370:7334' + from src limit 1; +select cast(value as boolean) from ip3; +select cast(value as tinyint) from ip3; +select cast(value as smallint) from ip3; +select cast(value as int) from ip3; +select cast(value as bigint) from ip3; +select cast(value as float) from ip3; +select cast(value as double) from ip3; +select cast(value as string) from ip3; + +insert overwrite table ip3 select 'fb01:0db8:85a3:0000:0000:8a2e:0370:7334' + from src limit 1; +select cast(value as boolean) from ip3; +select cast(value as tinyint) from ip3; +select cast(value as smallint) from ip3; +select cast(value as int) from ip3; +select cast(value as bigint) from ip3; +select cast(value as float) from ip3; +select cast(value as double) from ip3; +select cast(value as string) from ip3; + +insert overwrite table ip3 select 'ff01:0db8:85a3:0000:0000:8a2e:0370:7334' + from src limit 1; +select cast(value as boolean) from ip3; +select cast(value as tinyint) from ip3; +select cast(value as smallint) from ip3; +select cast(value as int) from ip3; +select cast(value as bigint) from ip3; +select cast(value as float) from ip3; +select cast(value as double) from ip3; +select cast(value as string) from ip3; + +drop table ip3; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (working copy) @@ -122,7 +122,6 @@ import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear; import org.apache.hadoop.hive.ql.udf.UDFYear; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEWAHBitmap; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFBridge; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCollectSet; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFContextNGrams; @@ -130,6 +129,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCount; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCovariance; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCovarianceSample; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEWAHBitmap; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFHistogramNumeric; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax; @@ -147,13 +147,13 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArray; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArrayContains; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapAnd; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapOr; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapEmpty; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCase; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCoalesce; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcatWS; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapAnd; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapEmpty; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapOr; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFElt; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFField; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash; @@ -161,6 +161,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIndex; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFInstr; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIp; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLocate; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMap; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; @@ -367,6 +368,8 @@ registerUDF(Constants.STRING_TYPE_NAME, UDFToString.class, false, UDFToString.class.getSimpleName()); + registerGenericUDF(Constants.IP_TYPE_NAME, GenericUDFIp.class); + // Aggregate functions registerGenericUDAF("max", new GenericUDAFMax()); registerGenericUDAF("min", new GenericUDAFMin()); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (working copy) @@ -447,6 +447,8 @@ Constants.DOUBLE_TYPE_NAME); conversionFunctionTextHashMap.put(HiveParser.TOK_STRING, Constants.STRING_TYPE_NAME); + conversionFunctionTextHashMap.put(HiveParser.TOK_IP, + Constants.IP_TYPE_NAME); } public static boolean isRedundantConversionFunction(ASTNode expr, Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -100,6 +100,7 @@ TOK_DATE; TOK_DATETIME; TOK_TIMESTAMP; +TOK_IP; TOK_STRING; TOK_LIST; TOK_STRUCT; @@ -1279,6 +1280,7 @@ | KW_DATETIME -> TOK_DATETIME | KW_TIMESTAMP -> TOK_TIMESTAMP | KW_STRING -> TOK_STRING + | KW_IP -> TOK_IP ; listType @@ -2159,6 +2161,7 @@ KW_DATE: 'DATE'; KW_DATETIME: 'DATETIME'; KW_TIMESTAMP: 'TIMESTAMP'; +KW_IP: 'IP'; KW_STRING: 'STRING'; KW_ARRAY: 'ARRAY'; KW_STRUCT: 'STRUCT'; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -26,7 +26,6 @@ import java.io.Serializable; import java.net.URI; -import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -34,9 +33,9 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Properties; import java.util.Set; -import java.util.Map.Entry; import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; @@ -59,8 +58,8 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.index.HiveIndex; +import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType; import org.apache.hadoop.hive.ql.index.HiveIndexHandler; -import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -71,7 +70,9 @@ import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; +import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; @@ -106,8 +107,6 @@ import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry; import org.apache.hadoop.hive.ql.session.SessionState; @@ -137,6 +136,7 @@ TokenToTypeName.put(HiveParser.TOK_DATE, Constants.DATE_TYPE_NAME); TokenToTypeName.put(HiveParser.TOK_DATETIME, Constants.DATETIME_TYPE_NAME); TokenToTypeName.put(HiveParser.TOK_TIMESTAMP, Constants.TIMESTAMP_TYPE_NAME); + TokenToTypeName.put(HiveParser.TOK_IP, Constants.IP_TYPE_NAME); } public static String getTypeName(int token) throws SemanticException { Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java (working copy) @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; import org.apache.hadoop.io.BooleanWritable; @@ -35,14 +36,14 @@ * */ public class UDFToInteger extends UDF { - private IntWritable intWritable = new IntWritable(); + private final IntWritable intWritable = new IntWritable(); public UDFToInteger() { } /** * Convert from void to an integer. This is called for CAST(... AS INT) - * + * * @param i * The void value to convert * @return Integer @@ -53,7 +54,7 @@ /** * Convert from boolean to an integer. This is called for CAST(... AS INT) - * + * * @param i * The boolean value to convert * @return IntWritable @@ -69,7 +70,7 @@ /** * Convert from byte to an integer. This is called for CAST(... AS INT) - * + * * @param i * The byte value to convert * @return IntWritable @@ -85,7 +86,7 @@ /** * Convert from short to an integer. This is called for CAST(... AS INT) - * + * * @param i * The short value to convert * @return IntWritable @@ -101,7 +102,7 @@ /** * Convert from long to an integer. This is called for CAST(... AS INT) - * + * * @param i * The long value to convert * @return IntWritable @@ -117,7 +118,7 @@ /** * Convert from float to an integer. This is called for CAST(... AS INT) - * + * * @param i * The float value to convert * @return IntWritable @@ -133,7 +134,7 @@ /** * Convert from double to an integer. This is called for CAST(... AS INT) - * + * * @param i * The double value to convert * @return IntWritable @@ -149,7 +150,7 @@ /** * Convert from string to an integer. This is called for CAST(... AS INT) - * + * * @param i * The string value to convert * @return IntWritable @@ -171,4 +172,12 @@ } } + public IntWritable evaluate(IpWritable i) { + if (i == null) { + return null; + } + intWritable.set(i.getInt()); + return intWritable; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java (working copy) @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.lazy.LazyLong; import org.apache.hadoop.io.BooleanWritable; @@ -35,14 +36,14 @@ * */ public class UDFToLong extends UDF { - private LongWritable longWritable = new LongWritable(); + private final LongWritable longWritable = new LongWritable(); public UDFToLong() { } /** * Convert from void to a long. This is called for CAST(... AS BIGINT) - * + * * @param i * The void value to convert * @return LongWritable @@ -53,7 +54,7 @@ /** * Convert from boolean to a long. This is called for CAST(... AS BIGINT) - * + * * @param i * The boolean value to convert * @return LongWritable @@ -69,7 +70,7 @@ /** * Convert from byte to a long. This is called for CAST(... AS BIGINT) - * + * * @param i * The byte value to convert * @return LongWritable @@ -85,7 +86,7 @@ /** * Convert from short to a long. This is called for CAST(... AS BIGINT) - * + * * @param i * The short value to convert * @return LongWritable @@ -101,7 +102,7 @@ /** * Convert from integer to a long. This is called for CAST(... AS BIGINT) - * + * * @param i * The integer value to convert * @return LongWritable @@ -117,7 +118,7 @@ /** * Convert from long to a long. This is called for CAST(... AS BIGINT) - * + * * @param i * The long value to convert * @return LongWritable @@ -128,7 +129,7 @@ /** * Convert from float to a long. This is called for CAST(... AS BIGINT) - * + * * @param i * The float value to convert * @return LongWritable @@ -144,7 +145,7 @@ /** * Convert from double to a long. This is called for CAST(... AS BIGINT) - * + * * @param i * The double value to convert * @return LongWritable @@ -160,7 +161,7 @@ /** * Convert from string to a long. This is called for CAST(... AS BIGINT) - * + * * @param i * The string value to convert * @return LongWritable @@ -182,4 +183,12 @@ } } + public LongWritable evaluate(IpWritable i) { + if (i == null) { + return null; + } + longWritable.set(i.getInt()); + return longWritable; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java (working copy) @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.lazy.LazyByte; import org.apache.hadoop.io.BooleanWritable; @@ -35,14 +36,14 @@ * */ public class UDFToByte extends UDF { - private ByteWritable byteWritable = new ByteWritable(); + private final ByteWritable byteWritable = new ByteWritable(); public UDFToByte() { } /** * Convert from void to a byte. This is called for CAST(... AS TINYINT) - * + * * @param i * The void value to convert * @return Byte @@ -53,7 +54,7 @@ /** * Convert from boolean to a byte. This is called for CAST(... AS TINYINT) - * + * * @param i * The boolean value to convert * @return Byte @@ -69,7 +70,7 @@ /** * Convert from short to a byte. This is called for CAST(... AS TINYINT) - * + * * @param i * The short value to convert * @return Byte @@ -85,7 +86,7 @@ /** * Convert from integer to a byte. This is called for CAST(... AS TINYINT) - * + * * @param i * The integer value to convert * @return Byte @@ -101,7 +102,7 @@ /** * Convert from long to a byte. This is called for CAST(... AS TINYINT) - * + * * @param i * The long value to convert * @return Byte @@ -117,7 +118,7 @@ /** * Convert from float to a byte. This is called for CAST(... AS TINYINT) - * + * * @param i * The float value to convert * @return Byte @@ -133,7 +134,7 @@ /** * Convert from double to a byte. This is called for CAST(... AS TINYINT) - * + * * @param i * The double value to convert * @return Byte @@ -149,7 +150,7 @@ /** * Convert from string to a byte. This is called for CAST(... AS TINYINT) - * + * * @param i * The string value to convert * @return Byte @@ -171,4 +172,12 @@ } } + public ByteWritable evaluate(IpWritable i) { + if (i == null) { + return null; + } + byteWritable.set(i.getInt().byteValue()); + return byteWritable; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java (working copy) @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.FloatWritable; @@ -34,14 +35,14 @@ * */ public class UDFToDouble extends UDF { - private DoubleWritable doubleWritable = new DoubleWritable(); + private final DoubleWritable doubleWritable = new DoubleWritable(); public UDFToDouble() { } /** * Convert from void to a double. This is called for CAST(... AS DOUBLE) - * + * * @param i * The void value to convert * @return DoubleWritable @@ -52,7 +53,7 @@ /** * Convert from boolean to a double. This is called for CAST(... AS DOUBLE) - * + * * @param i * The boolean value to convert * @return DoubleWritable @@ -68,7 +69,7 @@ /** * Convert from boolean to a double. This is called for CAST(... AS DOUBLE) - * + * * @param i * The byte value to convert * @return DoubleWritable @@ -84,7 +85,7 @@ /** * Convert from short to a double. This is called for CAST(... AS DOUBLE) - * + * * @param i * The short value to convert * @return DoubleWritable @@ -100,7 +101,7 @@ /** * Convert from integer to a double. This is called for CAST(... AS DOUBLE) - * + * * @param i * The integer value to convert * @return DoubleWritable @@ -116,7 +117,7 @@ /** * Convert from long to a double. This is called for CAST(... AS DOUBLE) - * + * * @param i * The long value to convert * @return DoubleWritable @@ -132,7 +133,7 @@ /** * Convert from float to a double. This is called for CAST(... AS DOUBLE) - * + * * @param i * The float value to convert * @return DoubleWritable @@ -148,7 +149,7 @@ /** * Convert from string to a double. This is called for CAST(... AS DOUBLE) - * + * * @param i * The string value to convert * @return DoubleWritable @@ -168,4 +169,12 @@ } } + public DoubleWritable evaluate(IpWritable i) { + if (i == null) { + return null; + } + doubleWritable.set(i.getInt()); + return doubleWritable; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (working copy) @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.FloatWritable; @@ -34,14 +35,14 @@ * */ public class UDFToBoolean extends UDF { - private BooleanWritable booleanWritable = new BooleanWritable(); + private final BooleanWritable booleanWritable = new BooleanWritable(); public UDFToBoolean() { } /** * Convert a void to boolean. This is called for CAST(... AS BOOLEAN) - * + * * @param i * The value of a void type * @return BooleanWritable @@ -52,7 +53,7 @@ /** * Convert from a byte to boolean. This is called for CAST(... AS BOOLEAN) - * + * * @param i * The byte value to convert * @return BooleanWritable @@ -68,7 +69,7 @@ /** * Convert from a short to boolean. This is called for CAST(... AS BOOLEAN) - * + * * @param i * The short value to convert * @return BooleanWritable @@ -84,7 +85,7 @@ /** * Convert from a integer to boolean. This is called for CAST(... AS BOOLEAN) - * + * * @param i * The integer value to convert * @return BooleanWritable @@ -100,7 +101,7 @@ /** * Convert from a long to boolean. This is called for CAST(... AS BOOLEAN) - * + * * @param i * The long value to convert * @return BooleanWritable @@ -116,7 +117,7 @@ /** * Convert from a float to boolean. This is called for CAST(... AS BOOLEAN) - * + * * @param i * The float value to convert * @return BooleanWritable @@ -132,7 +133,7 @@ /** * Convert from a double to boolean. This is called for CAST(... AS BOOLEAN) - * + * * @param i * The double value to convert * @return BooleanWritable @@ -148,7 +149,7 @@ /** * Convert from a string to boolean. This is called for CAST(... AS BOOLEAN) - * + * * @param i * The string value to convert * @return BooleanWritable @@ -162,4 +163,12 @@ } } + public BooleanWritable evaluate(IpWritable i) { + if (i == null) { + return null; + } + booleanWritable.set(i.getInt() != 0); + return booleanWritable; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java (working copy) @@ -60,6 +60,7 @@ case SHORT: case INT: case LONG: + case IP: return new GenericUDAFSumLong(); case FLOAT: case DOUBLE: Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIp.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIp.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIp.java (revision 0) @@ -0,0 +1,69 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.udf.generic; + +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.IpConverter; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; + +public class GenericUDFIp extends GenericUDF { + + private PrimitiveObjectInspector argumentOI; + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) + throws UDFArgumentException { + if (arguments.length < 1) { + throw new UDFArgumentLengthException( + "The function IP requires at least one argument, got " + + arguments.length); + } + try { + argumentOI = (PrimitiveObjectInspector) arguments[0]; + } catch (ClassCastException e) { + throw new UDFArgumentException( + "The function IP takes only primitive types"); + } + return PrimitiveObjectInspectorFactory.writableIpObjectInspector; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + Object o = arguments[0].get(); + if (o == null) { + return null; + } + IpConverter converter = new IpConverter(argumentOI, + PrimitiveObjectInspectorFactory.writableIpObjectInspector); + return converter.convert(o); + } + + @Override + public String getDisplayString(String[] children) { + assert (children.length == 1); + StringBuilder sb = new StringBuilder(); + sb.append("CAST "); + sb.append(children[0]); + sb.append(" AS IP"); + return sb.toString(); + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java (working copy) @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.lazy.LazyShort; import org.apache.hadoop.io.BooleanWritable; @@ -42,7 +43,7 @@ /** * Convert from void to a short. This is called for CAST(... AS SMALLINT) - * + * * @param i * The void value to convert * @return ShortWritable @@ -53,7 +54,7 @@ /** * Convert from boolean to a short. This is called for CAST(... AS SMALLINT) - * + * * @param i * The boolean value to convert * @return ShortWritable @@ -69,7 +70,7 @@ /** * Convert from byte to a short. This is called for CAST(... AS SMALLINT) - * + * * @param i * The byte value to convert * @return ShortWritable @@ -85,7 +86,7 @@ /** * Convert from integer to a short. This is called for CAST(... AS SMALLINT) - * + * * @param i * The integer value to convert * @return ShortWritable @@ -101,7 +102,7 @@ /** * Convert from long to a short. This is called for CAST(... AS SMALLINT) - * + * * @param i * The long value to convert * @return ShortWritable @@ -117,7 +118,7 @@ /** * Convert from float to a short. This is called for CAST(... AS SMALLINT) - * + * * @param i * The float value to convert * @return ShortWritable @@ -133,7 +134,7 @@ /** * Convert from double to a short. This is called for CAST(... AS SMALLINT) - * + * * @param i * The double value to convert * @return ShortWritable @@ -149,7 +150,7 @@ /** * Convert from string to a short. This is called for CAST(... AS SMALLINT) - * + * * @param i * The string value to convert * @return ShortWritable @@ -170,4 +171,12 @@ } } } + + public ShortWritable evaluate(IpWritable i) { + if (i == null) { + return null; + } + shortWritable.set(i.getInt().shortValue()); + return shortWritable; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java (working copy) @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.FloatWritable; @@ -34,14 +35,14 @@ * */ public class UDFToFloat extends UDF { - private FloatWritable floatWritable = new FloatWritable(); + private final FloatWritable floatWritable = new FloatWritable(); public UDFToFloat() { } /** * Convert from void to a float. This is called for CAST(... AS FLOAT) - * + * * @param i * The void value to convert * @return FloatWritable @@ -52,7 +53,7 @@ /** * Convert from boolean to a float. This is called for CAST(... AS FLOAT) - * + * * @param i * The boolean value to convert * @return FloatWritable @@ -68,7 +69,7 @@ /** * Convert from byte to a float. This is called for CAST(... AS FLOAT) - * + * * @param i * The byte value to convert * @return FloatWritable @@ -84,7 +85,7 @@ /** * Convert from short to a float. This is called for CAST(... AS FLOAT) - * + * * @param i * The short value to convert * @return FloatWritable @@ -100,7 +101,7 @@ /** * Convert from integer to a float. This is called for CAST(... AS FLOAT) - * + * * @param i * The integer value to convert * @return FloatWritable @@ -116,7 +117,7 @@ /** * Convert from long to a float. This is called for CAST(... AS FLOAT) - * + * * @param i * The long value to convert * @return FloatWritable @@ -132,7 +133,7 @@ /** * Convert from double to a float. This is called for CAST(... AS FLOAT) - * + * * @param i * The double value to convert * @return FloatWritable @@ -148,7 +149,7 @@ /** * Convert from string to a float. This is called for CAST(... AS FLOAT) - * + * * @param i * The string value to convert * @return FloatWritable @@ -168,4 +169,12 @@ } } + public FloatWritable evaluate(IpWritable i) { + if (i == null) { + return null; + } + floatWritable.set(i.getInt()); + return floatWritable; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (revision 1143958) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (working copy) @@ -22,6 +22,7 @@ import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.IpWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; import org.apache.hadoop.hive.serde2.lazy.LazyLong; @@ -37,8 +38,8 @@ * */ public class UDFToString extends UDF { - private Text t = new Text(); - private ByteStream.Output out = new ByteStream.Output(); + private final Text t = new Text(); + private final ByteStream.Output out = new ByteStream.Output(); public UDFToString() { } @@ -47,8 +48,8 @@ return null; } - private byte[] trueBytes = {'T', 'R', 'U', 'E'}; - private byte[] falseBytes = {'F', 'A', 'L', 'S', 'E'}; + private final byte[] trueBytes = {'T', 'R', 'U', 'E'}; + private final byte[] falseBytes = {'F', 'A', 'L', 'S', 'E'}; public Text evaluate(BooleanWritable i) { if (i == null) { @@ -122,4 +123,12 @@ } } + public Text evaluate(IpWritable i) { + if (i == null) { + return null; + } + t.set(i.toString()); + return t; + } + }