diff --git contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java index e46dbef..a4d3587 100644 --- contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java +++ contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java @@ -60,12 +60,13 @@ import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; /** * TypedBytesSerDe uses typed bytes to serialize/deserialize. - * + * * More info on the typedbytes stuff that Dumbo uses. * http://issues.apache.org/jira/browse/HADOOP-1722 A fast python decoder for * this, which is apparently 25% faster than the python version is available at @@ -184,7 +185,12 @@ public class TypedBytesSerDe implements SerDe { Object reuse) throws IOException { // read the type - in.readType(); + Class writableType = in.readType(); + if (writableType != null && + writableType.isAssignableFrom(NullWritable.class)) { + // indicates that the recorded value is null + return null; + } switch (type.getCategory()) { case PRIMITIVE: { diff --git contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/Type.java contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/Type.java index 1c539e7..af34efc 100644 --- contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/Type.java +++ contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/Type.java @@ -25,7 +25,7 @@ public enum Type { // codes for supported types (< 50): BYTES(0), BYTE(1), BOOL(2), INT(3), LONG(4), FLOAT(5), DOUBLE(6), STRING(7), VECTOR( - 8), LIST(9), MAP(10), SHORT(11), + 8), LIST(9), MAP(10), SHORT(11), NULL(12), // application-specific codes (50-200): WRITABLE(50), diff --git contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesOutput.java contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesOutput.java index d4850cd..46e0064 100644 --- contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesOutput.java +++ contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesOutput.java @@ -52,7 +52,7 @@ public class TypedBytesOutput { /** * Get a thread-local typed bytes output for the supplied {@link DataOutput}. - * + * * @param out * data output object * @return typed bytes output corresponding to the supplied {@link DataOutput} @@ -71,7 +71,7 @@ public class TypedBytesOutput { /** * Writes a Java object as a typed bytes sequence. - * + * * @param obj * the object to be written * @throws IOException @@ -106,7 +106,7 @@ public class TypedBytesOutput { /** * Writes a raw sequence of typed bytes. - * + * * @param bytes * the bytes to be written * @throws IOException @@ -117,7 +117,7 @@ public class TypedBytesOutput { /** * Writes a raw sequence of typed bytes. - * + * * @param bytes * the bytes to be written * @param offset @@ -132,7 +132,7 @@ public class TypedBytesOutput { /** * Writes a bytes array as a typed bytes sequence, using a given typecode. - * + * * @param bytes * the bytes array to be written * @param code @@ -147,7 +147,7 @@ public class TypedBytesOutput { /** * Writes a bytes array as a typed bytes sequence. - * + * * @param bytes * the bytes array to be written * @throws IOException @@ -158,7 +158,7 @@ public class TypedBytesOutput { /** * Writes a byte as a typed bytes sequence. - * + * * @param b * the byte to be written * @throws IOException @@ -170,7 +170,7 @@ public class TypedBytesOutput { /** * Writes a boolean as a typed bytes sequence. - * + * * @param b * the boolean to be written * @throws IOException @@ -182,7 +182,7 @@ public class TypedBytesOutput { /** * Writes an integer as a typed bytes sequence. - * + * * @param i * the integer to be written * @throws IOException @@ -194,7 +194,7 @@ public class TypedBytesOutput { /** * Writes a long as a typed bytes sequence. - * + * * @param l * the long to be written * @throws IOException @@ -206,7 +206,7 @@ public class TypedBytesOutput { /** * Writes a float as a typed bytes sequence. - * + * * @param f * the float to be written * @throws IOException @@ -218,7 +218,7 @@ public class TypedBytesOutput { /** * Writes a double as a typed bytes sequence. - * + * * @param d * the double to be written * @throws IOException @@ -230,7 +230,7 @@ public class TypedBytesOutput { /** * Writes a short as a typed bytes sequence. - * + * * @param s * the short to be written * @throws IOException @@ -242,7 +242,7 @@ public class TypedBytesOutput { /** * Writes a string as a typed bytes sequence. - * + * * @param s * the string to be written * @throws IOException @@ -254,7 +254,7 @@ public class TypedBytesOutput { /** * Writes a vector as a typed bytes sequence. - * + * * @param vector * the vector to be written * @throws IOException @@ -268,7 +268,7 @@ public class TypedBytesOutput { /** * Writes a vector header. - * + * * @param length * the number of elements in the vector * @throws IOException @@ -280,7 +280,7 @@ public class TypedBytesOutput { /** * Writes a list as a typed bytes sequence. - * + * * @param list * the list to be written * @throws IOException @@ -295,7 +295,7 @@ public class TypedBytesOutput { /** * Writes a list header. - * + * * @throws IOException */ public void writeListHeader() throws IOException { @@ -304,7 +304,7 @@ public class TypedBytesOutput { /** * Writes a list footer. - * + * * @throws IOException */ public void writeListFooter() throws IOException { @@ -313,7 +313,7 @@ public class TypedBytesOutput { /** * Writes a map as a typed bytes sequence. - * + * * @param map * the map to be written * @throws IOException @@ -330,7 +330,7 @@ public class TypedBytesOutput { /** * Writes a map header. - * + * * @param length * the number of key-value pairs in the map * @throws IOException @@ -344,4 +344,12 @@ public class TypedBytesOutput { out.write(Type.ENDOFRECORD.code); } + /** + * Writes a NULL type marker to the output. + * + * @throws IOException + */ + public void writeNull() throws IOException { + out.write(Type.NULL.code); + } } diff --git contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableInput.java contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableInput.java index 1f8264a..ace0838 100644 --- contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableInput.java +++ contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableInput.java @@ -35,6 +35,7 @@ import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.SortedMapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.VIntWritable; @@ -46,7 +47,7 @@ import org.apache.hadoop.util.ReflectionUtils; /** * Provides functionality for reading typed bytes as Writable objects. - * + * * @see TypedBytesInput */ public class TypedBytesWritableInput implements Configurable { @@ -72,7 +73,7 @@ public class TypedBytesWritableInput implements Configurable { /** * Get a thread-local typed bytes writable input for the supplied * {@link TypedBytesInput}. - * + * * @param in * typed bytes input object * @return typed bytes writable input corresponding to the supplied @@ -87,7 +88,7 @@ public class TypedBytesWritableInput implements Configurable { /** * Get a thread-local typed bytes writable input for the supplied * {@link DataInput}. - * + * * @param in * data input object * @return typed bytes writable input corresponding to the supplied @@ -140,6 +141,8 @@ public class TypedBytesWritableInput implements Configurable { return readWritable(); case ENDOFRECORD: return null; + case NULL: + return NullWritable.get(); default: throw new RuntimeException("unknown type"); } @@ -181,6 +184,8 @@ public class TypedBytesWritableInput implements Configurable { return Writable.class; case ENDOFRECORD: return null; + case NULL: + return NullWritable.class; default: throw new RuntimeException("unknown type"); } diff --git contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java index 440c221..df1bc03 100644 --- contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java +++ contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java @@ -35,6 +35,7 @@ import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.SortedMapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.VIntWritable; @@ -46,7 +47,7 @@ import org.apache.hadoop.record.Record; /** * Provides functionality for writing Writable objects as typed bytes. - * + * * @see TypedBytesOutput */ public class TypedBytesWritableOutput { @@ -70,7 +71,7 @@ public class TypedBytesWritableOutput { /** * Get a thread-local typed bytes writable input for the supplied * {@link TypedBytesOutput}. - * + * * @param out * typed bytes output object * @return typed bytes writable output corresponding to the supplied @@ -85,7 +86,7 @@ public class TypedBytesWritableOutput { /** * Get a thread-local typed bytes writable output for the supplied * {@link DataOutput}. - * + * * @param out * data output object * @return typed bytes writable output corresponding to the supplied @@ -139,6 +140,8 @@ public class TypedBytesWritableOutput { writeSortedMap((SortedMapWritable) w); } else if (w instanceof Record) { writeRecord((Record) w); + } else if (w instanceof NullWritable || w == null) { + writeNull(); } else { writeWritable(w); // last resort } @@ -221,6 +224,10 @@ public class TypedBytesWritableOutput { r.serialize(TypedBytesRecordOutput.get(out)); } + public void writeNull() throws IOException { + out.writeNull(); + } + public void writeWritable(Writable w) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); diff --git contrib/src/test/queries/clientpositive/serde_typedbytes_null.q contrib/src/test/queries/clientpositive/serde_typedbytes_null.q new file mode 100644 index 0000000..cbca108 --- /dev/null +++ contrib/src/test/queries/clientpositive/serde_typedbytes_null.q @@ -0,0 +1,16 @@ +add jar ../build/contrib/hive_contrib.jar; + +DROP TABLE table1; + +CREATE TABLE table1 (a STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' STORED AS SEQUENCEFILE; + +INSERT OVERWRITE TABLE table1 SELECT NULL FROM SRC; + +SELECT * FROM table1; + +SELECT a FROM table1 WHERE a IS NULL; + +SELECT a FROM table1 WHERE a IS NOT NULL; + +DROP TABLE table1; + diff --git contrib/src/test/results/clientpositive/serde_typedbytes_null.q.out contrib/src/test/results/clientpositive/serde_typedbytes_null.q.out new file mode 100644 index 0000000..b74aedc --- /dev/null +++ contrib/src/test/results/clientpositive/serde_typedbytes_null.q.out @@ -0,0 +1,1051 @@ +PREHOOK: query: DROP TABLE table1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE table1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE table1 (a STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' STORED AS SEQUENCEFILE +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE table1 (a STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' STORED AS SEQUENCEFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@table1 +PREHOOK: query: INSERT OVERWRITE TABLE table1 SELECT NULL FROM SRC +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@table1 +POSTHOOK: query: INSERT OVERWRITE TABLE table1 SELECT NULL FROM SRC +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@table1 +POSTHOOK: Lineage: table1.a EXPRESSION [] +PREHOOK: query: SELECT * FROM table1 +PREHOOK: type: QUERY +PREHOOK: Input: default@table1 +PREHOOK: Output: file:/Users/arvind/work/src/hive/trunk/build/contrib/scratchdir/hive_2010-05-16_18-40-18_809_975339691035477279/10000 +POSTHOOK: query: SELECT * FROM table1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table1 +POSTHOOK: Output: file:/Users/arvind/work/src/hive/trunk/build/contrib/scratchdir/hive_2010-05-16_18-40-18_809_975339691035477279/10000 +POSTHOOK: Lineage: table1.a EXPRESSION [] +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +PREHOOK: query: SELECT a FROM table1 WHERE a IS NULL +PREHOOK: type: QUERY +PREHOOK: Input: default@table1 +PREHOOK: Output: file:/Users/arvind/work/src/hive/trunk/build/contrib/scratchdir/hive_2010-05-16_18-40-19_082_3679978940089257475/10000 +POSTHOOK: query: SELECT a FROM table1 WHERE a IS NULL +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table1 +POSTHOOK: Output: file:/Users/arvind/work/src/hive/trunk/build/contrib/scratchdir/hive_2010-05-16_18-40-19_082_3679978940089257475/10000 +POSTHOOK: Lineage: table1.a EXPRESSION [] +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +PREHOOK: query: SELECT a FROM table1 WHERE a IS NOT NULL +PREHOOK: type: QUERY +PREHOOK: Input: default@table1 +PREHOOK: Output: file:/Users/arvind/work/src/hive/trunk/build/contrib/scratchdir/hive_2010-05-16_18-40-23_132_6415243174289744741/10000 +POSTHOOK: query: SELECT a FROM table1 WHERE a IS NOT NULL +POSTHOOK: type: QUERY +POSTHOOK: Input: default@table1 +POSTHOOK: Output: file:/Users/arvind/work/src/hive/trunk/build/contrib/scratchdir/hive_2010-05-16_18-40-23_132_6415243174289744741/10000 +POSTHOOK: Lineage: table1.a EXPRESSION [] +PREHOOK: query: DROP TABLE table1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE table1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Output: default@table1 +POSTHOOK: Lineage: table1.a EXPRESSION []