Index: src/contrib/hive/serde/src/gen-py/org_apache_hadoop_hive_serde/constants.py =================================================================== --- src/contrib/hive/serde/src/gen-py/org_apache_hadoop_hive_serde/constants.py (revision 712243) +++ src/contrib/hive/serde/src/gen-py/org_apache_hadoop_hive_serde/constants.py (working copy) @@ -15,6 +15,12 @@ SERIALIZATION_DDL = 'serialization.ddl' +SERIALIZATION_NULL_FORMAT = 'serialization.null.format' + +SERIALIZATION_LAST_COLUMN_TAKES_REST = 'serialization.last.column.takes.rest' + +SERIALIZATION_SORT_ORDER = 'serialization.sort.order' + FIELD_DELIM = 'field.delim' COLLECTION_DELIM = 'colelction.delim' @@ -23,8 +29,14 @@ MAPKEY_DELIM = 'mapkey.delim' +QUOTE_CHAR = 'quote.delim' + +BOOLEAN_TYPE_NAME = 'boolean' + TINYINT_TYPE_NAME = 'tinyint' +SMALLINT_TYPE_NAME = 'smallint' + INT_TYPE_NAME = 'int' BIGINT_TYPE_NAME = 'bigint' @@ -46,7 +58,9 @@ MAP_TYPE_NAME = 'map' PrimitiveTypes = set([ + 'boolean', 'tinyint', + 'smallint', 'int', 'bigint', 'float', Index: src/contrib/hive/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java =================================================================== --- src/contrib/hive/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java (revision 712243) +++ src/contrib/hive/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java (working copy) @@ -18,6 +18,9 @@ package org.apache.hadoop.hive.serde2; + +import org.apache.hadoop.hive.serde.Constants; + import junit.framework.TestCase; import java.io.*; import org.apache.hadoop.hive.serde2.*; @@ -27,6 +30,7 @@ import com.facebook.thrift.transport.*; import com.facebook.thrift.*; import com.facebook.thrift.protocol.*; +import org.apache.hadoop.conf.Configuration; public class TestTCTLSeparatedProtocol extends TestCase { @@ -71,7 +75,7 @@ // use 3 as the row buffer size to force lots of re-buffering. - TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 3); + TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 1024); prot.readStructBegin(); @@ -116,7 +120,7 @@ public void testWrites() throws Exception { try { TMemoryBuffer trans = new TMemoryBuffer(1024); - TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 3); + TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 1024); prot.writeStructBegin(new TStruct()); prot.writeFieldBegin(new TField()); @@ -164,8 +168,6 @@ String test = new String(b, 0, len); String testRef = "100348.55234.22hello world!key1val1key2val2key3val3elem1elem2bye!"; - // System.err.println("test=" + test + ">"); - // System.err.println(" ref=" + testRef + ">"); assertTrue(test.equals(testRef)); trans = new TMemoryBuffer(1023); @@ -242,4 +244,263 @@ } } + public void testQuotedWrites() throws Exception { + try { + TMemoryBuffer trans = new TMemoryBuffer(4096); + TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 4096); + Properties schema = new Properties(); + schema.setProperty(Constants.QUOTE_CHAR, "\""); + schema.setProperty(Constants.FIELD_DELIM, ","); + prot.initialize(new Configuration(), schema); + + String testStr = "\"hello, world!\""; + + prot.writeStructBegin(new TStruct()); + + prot.writeFieldBegin(new TField()); + prot.writeString(testStr); + prot.writeFieldEnd(); + + prot.writeFieldBegin(new TField()); + prot.writeListBegin(new TList()); + prot.writeString("elem1"); + prot.writeString("elem2"); + prot.writeListEnd(); + prot.writeFieldEnd(); + + prot.writeStructEnd(); + prot.writeString("\n"); + + trans.flush(); + + byte b[] = new byte[4096]; + int len = trans.read(b,0,b.length); + + + trans = new TMemoryBuffer(4096); + trans.write(b,0,len); + prot = new TCTLSeparatedProtocol(trans, 1024); + prot.initialize(new Configuration(), schema); + + prot.readStructBegin(); + prot.readFieldBegin(); + final String firstRead = prot.readString(); + prot.readFieldEnd(); + + testStr = testStr.replace("\"",""); + + assertEquals(testStr, firstRead); + + + // the 2 element list + prot.readFieldBegin(); + TList l = prot.readListBegin(); + assertTrue(l.size == 2); + assertTrue(prot.readString().equals("elem1")); + assertTrue(prot.readString().equals("elem2")); + prot.readListEnd(); + prot.readFieldEnd(); + + // shouldl return nulls at end + prot.readFieldBegin(); + assertTrue(prot.readString().equals("")); + prot.readFieldEnd(); + + // shouldl return nulls at end + prot.readFieldBegin(); + assertTrue(prot.readString().equals("")); + prot.readFieldEnd(); + + prot.readStructEnd(); + + + } catch(Exception e) { + e.printStackTrace(); + } + } + + + /** + * Tests a sample apache log format. This is actually better done in general with a more TRegexLike protocol, but for this + * case, TCTLSeparatedProtocol can do it. + */ + public void test1ApacheLogFormat() throws Exception { + try { + final String sample = "127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326"; + + TMemoryBuffer trans = new TMemoryBuffer(4096); + trans.write(sample.getBytes(), 0, sample.getBytes().length); + trans.flush(); + + TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 4096); + Properties schema = new Properties(); + + // this is a hacky way of doing the quotes since it will match any 2 of these, so + // "[ hello this is something to split [" would be considered to be quoted. + schema.setProperty(Constants.QUOTE_CHAR, "(\"|\\[|\\])"); + + schema.setProperty(Constants.FIELD_DELIM, " "); + schema.setProperty(Constants.SERIALIZATION_NULL_FORMAT, "-"); + prot.initialize(new Configuration(), schema); + + prot.readStructBegin(); + + // ip address + prot.readFieldBegin(); + final String ip = prot.readString(); + prot.readFieldEnd(); + + assertEquals("127.0.0.1", ip); + + // identd + prot.readFieldBegin(); + final String identd = prot.readString(); + prot.readFieldEnd(); + + assertEquals("", identd); + + // user + prot.readFieldBegin(); + final String user = prot.readString(); + prot.readFieldEnd(); + + assertEquals("frank",user); + + // finishTime + prot.readFieldBegin(); + final String finishTime = prot.readString(); + prot.readFieldEnd(); + + assertEquals("10/Oct/2000:13:55:36 -0700",finishTime); + + // requestLine + prot.readFieldBegin(); + final String requestLine = prot.readString(); + prot.readFieldEnd(); + + assertEquals("GET /apache_pb.gif HTTP/1.0",requestLine); + + // returncode + prot.readFieldBegin(); + final int returnCode = prot.readI32(); + prot.readFieldEnd(); + + assertEquals(200, returnCode); + + // return size + prot.readFieldBegin(); + final int returnSize = prot.readI32(); + prot.readFieldEnd(); + + assertEquals(2326, returnSize); + + prot.readStructEnd(); + + } catch(Exception e) { + e.printStackTrace(); + } + } + + + + public void testNulls() throws Exception { + try { + TMemoryBuffer trans = new TMemoryBuffer(1024); + TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 10); + + prot.writeStructBegin(new TStruct()); + + prot.writeFieldBegin(new TField()); + prot.writeString(null); + prot.writeFieldEnd(); + + prot.writeFieldBegin(new TField()); + prot.writeString(null); + prot.writeFieldEnd(); + + prot.writeFieldBegin(new TField()); + prot.writeI32(100); + prot.writeFieldEnd(); + + prot.writeFieldBegin(new TField()); + prot.writeString(null); + prot.writeFieldEnd(); + + prot.writeFieldBegin(new TField()); + prot.writeMapBegin(new TMap()); + prot.writeString(null); + prot.writeString(null); + prot.writeString("key2"); + prot.writeString(null); + prot.writeString(null); + prot.writeString("val3"); + prot.writeMapEnd(); + prot.writeFieldEnd(); + + prot.writeStructEnd(); + + byte b[] = new byte[3*1024]; + int len = trans.read(b,0,b.length); + String written = new String(b,0,len); + + String testRef = "\\N\\N100\\N\\N\\Nkey2\\N\\Nval3"; + + assertTrue(testRef.equals(written)); + + trans = new TMemoryBuffer(1023); + trans.write(b, 0, len); + + prot = new TCTLSeparatedProtocol(trans, 3); + + prot.readStructBegin(); + + prot.readFieldBegin(); + String ret = prot.readString(); + prot.readFieldEnd(); + + assertTrue(ret.equals("")); + + prot.readFieldBegin(); + ret = prot.readString(); + prot.readFieldEnd(); + + assertTrue(ret.equals("")); + + prot.readFieldBegin(); + int ret1 = prot.readI32(); + prot.readFieldEnd(); + + assertTrue(ret1 == 100); + + + prot.readFieldBegin(); + ret1 = prot.readI32(); + prot.readFieldEnd(); + + prot.readFieldBegin(); + TMap map = prot.readMapBegin(); + + assertTrue(map.size == 3); + + assertTrue(prot.readString().isEmpty()); + assertTrue(prot.readString().isEmpty()); + + assertTrue(prot.readString().equals("key2")); + assertTrue(prot.readString().isEmpty()); + + assertTrue(prot.readString().isEmpty()); + assertTrue(prot.readString().equals("val3")); + + prot.readMapEnd(); + prot.readFieldEnd(); + + assertTrue(ret1 == 0); + + } catch(Exception e) { + e.printStackTrace(); + } + } + + + } Index: src/contrib/hive/serde/src/test/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java =================================================================== --- src/contrib/hive/serde/src/test/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java (revision 712243) +++ src/contrib/hive/serde/src/test/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java (working copy) @@ -20,8 +20,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; +import java.util.Map; import java.util.List; import java.util.Properties; +import java.util.Random; +import java.util.Map.Entry; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.Constants; @@ -35,6 +38,13 @@ public class TestDynamicSerDe extends TestCase { + public static HashMap makeHashMap(String... params) { + HashMap r = new HashMap(); + for(int i=0; i protocols = new ArrayList(); ArrayList isBinaries = new ArrayList(); - + ArrayList> additionalParams = new ArrayList>(); + + protocols.add(org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol.class.getName()); + isBinaries.add(true); + additionalParams.add(makeHashMap("serialization.sort.order", "++++++")); + protocols.add(org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol.class.getName()); + isBinaries.add(true); + additionalParams.add(makeHashMap("serialization.sort.order", "------")); + + protocols.add(com.facebook.thrift.protocol.TBinaryProtocol.class.getName()); isBinaries.add(true); + additionalParams.add(null); protocols.add(com.facebook.thrift.protocol.TJSONProtocol.class.getName()); isBinaries.add(false); + additionalParams.add(null); // TSimpleJSONProtocol does not support deserialization. // protocols.add(com.facebook.thrift.protocol.TSimpleJSONProtocol.class.getName()); // isBinaries.add(false); + // additionalParams.add(null); // TCTLSeparatedProtocol is not done yet. protocols.add(org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName()); isBinaries.add(false); + additionalParams.add(null); System.out.println("input struct = " + struct); @@ -80,8 +107,14 @@ schema.setProperty(Constants.SERIALIZATION_FORMAT, protocol); schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "test"); schema.setProperty(Constants.SERIALIZATION_DDL, - "struct test { i32 hello, list bye, map another}"); + "struct test { i32 hello, list bye, map another, i32 nhello, double d, double nd}"); schema.setProperty(Constants.SERIALIZATION_LIB, new DynamicSerDe().getClass().toString()); + HashMap p = additionalParams.get(pp); + if (p != null) { + for(Entry e: p.entrySet()) { + schema.setProperty(e.getKey(), e.getValue()); + } + } DynamicSerDe serde = new DynamicSerDe(); serde.initialize(new Configuration(), schema); @@ -93,15 +126,8 @@ // Try to serialize BytesWritable bytes = (BytesWritable) serde.serialize(struct, oi); - - StringBuilder sb = new StringBuilder(); - for (int i=0; i0) { + int compareResult = bytes[i-1].compareTo(bytes[i]); + if ( (compareResult<0 && !ascending) || (compareResult>0 && ascending) ) { + System.out.println("Test failed in " + (ascending ? "ascending" : "descending") + " order."); + System.out.println("serialized data of " + structs[i-1] + " = " + hexString(bytes[i-1])); + System.out.println("serialized data of " + structs[i] + " = " + hexString(bytes[i])); + fail("Sort order of serialized " + structs[i-1] + " and " + structs[i] + " are reversed!"); + } + } + } + + // Try to deserialize + Object[] deserialized = new Object[structs.length]; + for (int i=0; i struct = new ArrayList(); + if (i==0) { + struct.add(null); + } else { + struct.add(Double.valueOf((r.nextDouble()-0.5)*10)); + } + structs[i] = struct; + } + sort(structs); + ddl = "struct test { double hello}"; + System.out.println("Testing " + ddl); + testTBinarySortableProtocol(structs, ddl, true); + testTBinarySortableProtocol(structs, ddl, false); + + // Test integer + for (int i=0; i struct = new ArrayList(); + if (i==0) { + struct.add(null); + } else { + struct.add((int)((r.nextDouble()-0.5)*1.5*Integer.MAX_VALUE)); + } + structs[i] = struct; + } + sort(structs); + // Null should be smaller than any other value, so put a null at the front end + // to test whether that is held. + ((List)structs[0]).set(0, null); + ddl = "struct test { i32 hello}"; + System.out.println("Testing " + ddl); + testTBinarySortableProtocol(structs, ddl, true); + testTBinarySortableProtocol(structs, ddl, false); + + // Test long + for (int i=0; i struct = new ArrayList(); + if (i==0) { + struct.add(null); + } else { + struct.add((long)((r.nextDouble()-0.5)*1.5*Long.MAX_VALUE)); + } + structs[i] = struct; + } + sort(structs); + // Null should be smaller than any other value, so put a null at the front end + // to test whether that is held. + ((List)structs[0]).set(0, null); + ddl = "struct test { i64 hello}"; + System.out.println("Testing " + ddl); + testTBinarySortableProtocol(structs, ddl, true); + testTBinarySortableProtocol(structs, ddl, false); + + // Test string + for (int i=0; i struct = new ArrayList(); + if (i==0) { + struct.add(null); + } else { + struct.add(String.valueOf((r.nextDouble()-0.5)*1000)); + } + structs[i] = struct; + } + sort(structs); + // Null should be smaller than any other value, so put a null at the front end + // to test whether that is held. + ((List)structs[0]).set(0, null); + ddl = "struct test { string hello}"; + System.out.println("Testing " + ddl); + testTBinarySortableProtocol(structs, ddl, true); + testTBinarySortableProtocol(structs, ddl, false); + + // Test string + double + for (int i=0; i struct = new ArrayList(); + if (i%9==0) { + struct.add(null); + } else { + struct.add("str" + (i/5)); + } + if (i%7==0) { + struct.add(null); + } else { + struct.add(Double.valueOf((r.nextDouble()-0.5)*10)); + } + structs[i] = struct; + } + sort(structs); + // Null should be smaller than any other value, so put a null at the front end + // to test whether that is held. + ((List)structs[0]).set(0, null); + ddl = "struct test { string hello, double another}"; + System.out.println("Testing " + ddl); + testTBinarySortableProtocol(structs, ddl, true); + testTBinarySortableProtocol(structs, ddl, false); + + System.out.println("Test testTBinarySortableProtocol passed!"); + } catch (Throwable e) { + e.printStackTrace(); + throw e; + } + } + + public void testConfigurableTCTLSeparated() throws Throwable { try { @@ -161,20 +394,14 @@ serde.initialize(new Configuration(), schema); TCTLSeparatedProtocol prot = (TCTLSeparatedProtocol)serde.oprot_; - assertTrue(prot.getPrimarySeparator() == 9); + assertTrue(prot.getPrimarySeparator().equals("\u0009")); ObjectInspector oi = serde.getObjectInspector(); // Try to serialize BytesWritable bytes = (BytesWritable) serde.serialize(struct, oi); - StringBuilder sb = new StringBuilder(); - for (int i=0; i bye = null; + HashMap another = new HashMap(); + another.put("firstKey", 1); + another.put("secondKey", 2); + ArrayList struct = new ArrayList(); + struct.add(Integer.valueOf(234)); + struct.add(bye); + struct.add(another); + + Properties schema = new Properties(); + schema.setProperty(Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName()); + schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "test"); + schema.setProperty(Constants.SERIALIZATION_DDL, + "struct test { i32 hello, list bye, map another}"); + schema.setProperty(Constants.SERIALIZATION_LIB, new DynamicSerDe().getClass().toString()); + schema.setProperty(TCTLSeparatedProtocol.ReturnNullsKey, "true"); + + DynamicSerDe serde = new DynamicSerDe(); + serde.initialize(new Configuration(), schema); + + ObjectInspector oi = serde.getObjectInspector(); + + // Try to serialize + BytesWritable bytes = (BytesWritable) serde.serialize(struct, oi); + + hexString(bytes); + + // Try to deserialize + Object o = serde.deserialize(bytes); + assertEquals(struct, o); + + } catch (Throwable e) { + e.printStackTrace(); + throw e; + } + + } + + /** + * Tests all elements of a struct being null with return nulls on + */ + + public void testNulls2() throws Throwable { + try { + + + // Try to construct an object + ArrayList bye = null; + HashMap another = null; + ArrayList struct = new ArrayList(); + struct.add(null); + struct.add(bye); + struct.add(another); + + Properties schema = new Properties(); + schema.setProperty(Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName()); + schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "test"); + schema.setProperty(Constants.SERIALIZATION_DDL, + "struct test { i32 hello, list bye, map another}"); + schema.setProperty(Constants.SERIALIZATION_LIB, new DynamicSerDe().getClass().toString()); + schema.setProperty(TCTLSeparatedProtocol.ReturnNullsKey, "true"); + + DynamicSerDe serde = new DynamicSerDe(); + serde.initialize(new Configuration(), schema); + + ObjectInspector oi = serde.getObjectInspector(); + + // Try to serialize + BytesWritable bytes = (BytesWritable) serde.serialize(struct, oi); + + hexString(bytes); + + // Try to deserialize + Object o = serde.deserialize(bytes); + List olist = (List)o; + + assertTrue(olist.size() == 3); + assertEquals(null, olist.get(0)); + assertEquals(null, olist.get(1)); + assertEquals(null, olist.get(2)); + + // assertEquals(o, struct); Cannot do this because types of null lists are wrong. + + } catch (Throwable e) { + e.printStackTrace(); + throw e; + } + + } + + /** + * Tests map and list being empty with return nulls on + */ + + public void testNulls3() throws Throwable { + try { + + + // Try to construct an object + ArrayList bye = new ArrayList (); + HashMap another = null; + ArrayList struct = new ArrayList(); + struct.add(null); + struct.add(bye); + struct.add(another); + + Properties schema = new Properties(); + schema.setProperty(Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName()); + schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "test"); + schema.setProperty(Constants.SERIALIZATION_DDL, + "struct test { i32 hello, list bye, map another}"); + schema.setProperty(Constants.SERIALIZATION_LIB, new DynamicSerDe().getClass().toString()); + + schema.setProperty(TCTLSeparatedProtocol.ReturnNullsKey, "true"); + DynamicSerDe serde = new DynamicSerDe(); + serde.initialize(new Configuration(), schema); + + ObjectInspector oi = serde.getObjectInspector(); + + // Try to serialize + BytesWritable bytes = (BytesWritable) serde.serialize(struct, oi); + + hexString(bytes); + + // Try to deserialize + Object o = serde.deserialize(bytes); + List olist = (List)o; + + assertTrue(olist.size() == 3); + assertEquals(null, olist.get(0)); + assertEquals(0, ((List)olist.get(1)).size()); + assertEquals(null, olist.get(2)); + + // assertEquals(o, struct); Cannot do this because types of null lists are wrong. + + } catch (Throwable e) { + e.printStackTrace(); + throw e; + } + + } + + + /** + * Tests map and list null/empty with return nulls *off* + */ + + public void testNulls4() throws Throwable { + try { + + + // Try to construct an object + ArrayList bye = new ArrayList (); + HashMap another = null; + ArrayList struct = new ArrayList(); + struct.add(null); + struct.add(bye); + struct.add(another); + + Properties schema = new Properties(); + schema.setProperty(Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName()); + schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "test"); + schema.setProperty(Constants.SERIALIZATION_DDL, + "struct test { i32 hello, list bye, map another}"); + schema.setProperty(Constants.SERIALIZATION_LIB, new DynamicSerDe().getClass().toString()); + + schema.setProperty(TCTLSeparatedProtocol.ReturnNullsKey, "false"); + DynamicSerDe serde = new DynamicSerDe(); + serde.initialize(new Configuration(), schema); + + ObjectInspector oi = serde.getObjectInspector(); + + // Try to serialize + BytesWritable bytes = (BytesWritable) serde.serialize(struct, oi); + + hexString(bytes); + + // Try to deserialize + Object o = serde.deserialize(bytes); + List olist = (List)o; + + assertTrue(olist.size() == 3); + assertEquals(new Integer(0), (Integer)olist.get(0)); + List num1 = (List)olist.get(1); + assertTrue(num1.size() == 0); + Map num2 = (Map)olist.get(2); + assertTrue(num2.size() == 0); + + // assertEquals(o, struct); Cannot do this because types of null lists are wrong. + + } catch (Throwable e) { + e.printStackTrace(); + throw e; + } + + } + + + /** + * Tests map and list null/empty with return nulls *off* + */ + + public void testStructsinStructs() throws Throwable { + try { + + + Properties schema = new Properties(); + // schema.setProperty(Constants.SERIALIZATION_FORMAT, com.facebook.thrift.protocol.TJSONProtocol.class.getName()); + schema.setProperty(Constants.SERIALIZATION_FORMAT, com.facebook.thrift.protocol.TBinaryProtocol.class.getName()); + schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "test"); + schema.setProperty(Constants.SERIALIZATION_DDL, + "struct inner { i32 field1, string field2 },struct test {inner foo, i32 hello, list bye, map another}"); + schema.setProperty(Constants.SERIALIZATION_LIB, new DynamicSerDe().getClass().toString()); + + + // + // construct object of above type + // + + // construct the inner struct + ArrayList innerStruct = new ArrayList(); + innerStruct.add(new Integer(22)); + innerStruct.add(new String("hello world")); + + // construct outer struct + ArrayList bye = new ArrayList (); + bye.add("firstString"); + bye.add("secondString"); + HashMap another = new HashMap(); + another.put("firstKey", 1); + another.put("secondKey", 2); + + ArrayList struct = new ArrayList(); + + struct.add(innerStruct); + struct.add(Integer.valueOf(234)); + struct.add(bye); + struct.add(another); + + DynamicSerDe serde = new DynamicSerDe(); + serde.initialize(new Configuration(), schema); + + ObjectInspector oi = serde.getObjectInspector(); + + // Try to serialize + BytesWritable bytes = (BytesWritable) serde.serialize(struct, oi); + + // Try to deserialize + Object o = serde.deserialize(bytes); + List olist = (List)o; + + + assertEquals(4, olist.size()); + assertEquals(innerStruct, olist.get(0)); + assertEquals(new Integer(234), olist.get(1)); + assertEquals(bye, olist.get(2)); + assertEquals(another, olist.get(3)); + + } catch (Throwable e) { + e.printStackTrace(); + throw e; + } + + } + + + + + public void testSkip() throws Throwable { + try { + + // Try to construct an object + ArrayList bye = new ArrayList(); + bye.add("firstString"); + bye.add("secondString"); + HashMap another = new HashMap(); + another.put("firstKey", 1); + another.put("secondKey", 2); + ArrayList struct = new ArrayList(); + struct.add(Integer.valueOf(234)); + struct.add(bye); + struct.add(another); + + Properties schema = new Properties(); + schema.setProperty(Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName()); + schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "test"); + schema.setProperty(Constants.SERIALIZATION_DDL, + "struct test { i32 hello, list bye, map another}"); + schema.setProperty(Constants.SERIALIZATION_LIB, new DynamicSerDe().getClass().toString()); + + schema.setProperty(Constants.FIELD_DELIM, "9"); + schema.setProperty(Constants.COLLECTION_DELIM, "1"); + schema.setProperty(Constants.LINE_DELIM, "2"); + schema.setProperty(Constants.MAPKEY_DELIM, "4"); + + DynamicSerDe serde = new DynamicSerDe(); + serde.initialize(new Configuration(), schema); + + TCTLSeparatedProtocol prot = (TCTLSeparatedProtocol)serde.oprot_; + assertTrue(prot.getPrimarySeparator().equals("\u0009")); + + ObjectInspector oi = serde.getObjectInspector(); + + // Try to serialize + BytesWritable bytes = (BytesWritable) serde.serialize(struct, oi); + + hexString(bytes); + + String compare = "234" + "\u0009" + "firstString" + "\u0001" + "secondString" + "\u0009" + "firstKey" + "\u0004" + "1" + "\u0001" + "secondKey" + "\u0004" + "2"; + + System.out.println("bytes in text =" + new String(bytes.get(), 0, bytes.getSize()) + ">"); + System.out.println("compare to =" + compare + ">"); + + assertTrue(compare.equals( new String(bytes.get(), 0, bytes.getSize()))); + + schema.setProperty(Constants.SERIALIZATION_DDL, + "struct test { i32 hello, skip list bye, map another}"); + + serde.initialize(new Configuration(), schema); + + // Try to deserialize + Object o = serde.deserialize(bytes); + System.out.println("o class = " + o.getClass()); + List olist = (List)o; + System.out.println("o size = " + olist.size()); + System.out.println("o = " + o); + + assertEquals(null, olist.get(1)); + + // set the skipped field to null + struct.set(1,null); + + assertEquals(o, struct); + + } catch (Throwable e) { + e.printStackTrace(); + throw e; + } + + } + } Index: src/contrib/hive/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java =================================================================== --- src/contrib/hive/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java (revision 712243) +++ src/contrib/hive/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java (working copy) @@ -34,7 +34,7 @@ assertEquals(oi1, oi2); assertEquals(Category.PRIMITIVE, oi1.getCategory()); assertEquals(c, oi1.getPrimitiveClass()); - assertEquals(ObjectInspectorUtils.getClassShortName(c.getName()), + assertEquals(ObjectInspectorUtils.getClassShortName(c), oi1.getTypeName()); } catch (Throwable e) { e.printStackTrace(); Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeString.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeString.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeString.java (working copy) @@ -1,60 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType.*; - -public class DynamicSerDeTypeString extends DynamicSerDeTypeBase { - - // production is: string - - public DynamicSerDeTypeString(int i) { - super(i); - } - public DynamicSerDeTypeString(thrift_grammar p, int i) { - super(p,i); - } - public Class getRealType() { return java.lang.String.class; } - - public String toString() { return "string"; } - - public String deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return String.valueOf(iprot.readString()); - } - - public void serialize(Object s, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - oprot.writeString((String)s); - } - public byte getType() { - return TType.STRING; - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeDouble.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeDouble.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeDouble.java (working copy) @@ -1,59 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType; - -public class DynamicSerDeTypeDouble extends DynamicSerDeTypeBase { - - // production is: double - - public DynamicSerDeTypeDouble(int i) { - super(i); - } - public DynamicSerDeTypeDouble(thrift_grammar p, int i) { - super(p,i); - } - - public String toString() { return "double"; } - - public Double deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return new Double(iprot.readDouble()); - } - - public void serialize(Object s, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - oprot.writeDouble((Double)s); - } - public byte getType() { - return TType.DOUBLE; - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeBase.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeBase.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeBase.java (working copy) @@ -1,65 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import org.apache.hadoop.hive.serde.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.TException; -import java.io.Serializable; - -public class DynamicSerDeTypeBase extends DynamicSerDeSimpleNode implements Serializable { - private static final long serialVersionUID = 1L; - - public DynamicSerDeTypeBase(int i) { - super(i); - } - - public DynamicSerDeTypeBase(thrift_grammar p, int i) { - super(p,i); - } - - public Class getRealType() throws SerDeException { - throw new SerDeException("Not implemented in base"); - } - - public Object get(Object obj) { - throw new RuntimeException("Not implemented in base"); - } - - public void serialize(Object o, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - throw new SerDeException("Not implemented in base"); - } - - public Object deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - throw new SerDeException("Not implemented in base"); - } - - public String toString() { - return "BAD"; - } - - public byte getType() { - return -1; - } - - public boolean isPrimitive() { return true; } - public boolean isList() { return false; } - public boolean isMap() { return false; } - -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeList.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeList.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeList.java (working copy) @@ -1,93 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType; -import java.lang.Integer; - - -public class DynamicSerDeTypeList extends DynamicSerDeTypeBase { - - public boolean isPrimitive() { return false; } - public boolean isList() { return true; } - - // production is: list - - static final private int FD_TYPE = 0; - - public Class getRealType() { - return java.util.ArrayList.class; - } - - public DynamicSerDeTypeList(int i) { - super(i); - } - public DynamicSerDeTypeList(thrift_grammar p, int i) { - super(p,i); - } - - public DynamicSerDeTypeBase getElementType() { - return (DynamicSerDeTypeBase)((DynamicSerDeFieldType)this.jjtGetChild(FD_TYPE)).getMyType(); - } - - public String toString() { - return "list<" + this.getElementType().toString() + ">"; - } - - public ArrayList deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - TList thelist = iprot.readListBegin(); - ArrayList result = new ArrayList () ; - for(int i = 0; i < thelist.size; i++) { - Object elem = this.getElementType().deserialize(iprot); - result.add(elem); - } - // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy - iprot.readListEnd(); - return result; - } - - public void serialize(Object obj, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - List list = (List)obj; - DynamicSerDeTypeBase mt = this.getElementType(); - oprot.writeListBegin(new TList(mt.getType(),list.size())); - for(Object o: list) { - mt.serialize(o,oprot); - } - // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy - oprot.writeListEnd(); - } - - public byte getType() { - return TType.LIST; - } - -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeMap.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeMap.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeMap.java (working copy) @@ -1,115 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType; - -public class DynamicSerDeTypeMap extends DynamicSerDeTypeBase { - - public boolean isPrimitive() { return false; } - public boolean isMap() { return true;} - - // production is: Map - - private final byte FD_KEYTYPE = 0; - private final byte FD_VALUETYPE = 1; - - // returns Map - public Class getRealType() { - try { - Class c = this.getKeyType().getRealType(); - Class c2 = this.getValueType().getRealType(); - Object o = c.newInstance(); - Object o2 = c2.newInstance(); - Map l = Collections.singletonMap(o,o2); - return l.getClass(); - } catch (Exception e) { - e.printStackTrace(); - throw new RuntimeException(e); - } - } - - public DynamicSerDeTypeMap(int i) { - super(i); - } - - public DynamicSerDeTypeMap(thrift_grammar p, int i) { - super(p,i); - } - - public DynamicSerDeTypeBase getKeyType() { - return (DynamicSerDeTypeBase)((DynamicSerDeFieldType)this.jjtGetChild(FD_KEYTYPE)).getMyType(); - } - - public DynamicSerDeTypeBase getValueType() { - return (DynamicSerDeTypeBase)((DynamicSerDeFieldType)this.jjtGetChild(FD_VALUETYPE)).getMyType(); - } - - public String toString() { - return "map<" + this.getKeyType().toString() + "," + this.getValueType().toString() + ">"; - } - - public Map deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - TMap themap = iprot.readMapBegin(); - HashMap result = new HashMap (); - for(int i = 0; i < themap.size; i++) { - Object key = this.getKeyType().deserialize(iprot); - Object value = this.getValueType().deserialize(iprot); - result.put(key,value); - } - - // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy - iprot.readMapEnd(); - return result; - } - - public void serialize(Object o, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - Map map = (Map)o; - DynamicSerDeTypeBase keyType = this.getKeyType(); - DynamicSerDeTypeBase valueType = this.getValueType(); - oprot.writeMapBegin(new TMap(keyType.getType(),valueType.getType(),map.size())); - for(Iterator i = map.entrySet().iterator(); i.hasNext(); ) { - Map.Entry it = (Map.Entry)i.next(); - Object key = it.getKey(); - Object value = it.getValue(); - keyType.serialize(key, oprot); - valueType.serialize(value, oprot); - } - // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy - oprot.writeMapEnd(); - } - - public byte getType() { - return TType.MAP; - } -}; - Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeStructBase.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeStructBase.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeStructBase.java (working copy) @@ -1,95 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType.*; - -abstract public class DynamicSerDeStructBase extends DynamicSerDeTypeBase implements Serializable { - - DynamicSerDeFieldList fieldList; - - public DynamicSerDeStructBase(int i) { - super(i); - } - public DynamicSerDeStructBase(thrift_grammar p, int i) { - super(p,i); - } - - abstract public DynamicSerDeFieldList getFieldList(); - - public void initialize() { - fieldList = getFieldList(); - fieldList.initialize(); - } - - - public Class getRealType() { - return DynamicSerDeTypeContainer.class; - } - - public DynamicSerDeTypeContainer deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - if(thrift_mode) { - iprot.readStructBegin(); - } - - DynamicSerDeTypeContainer container = fieldList.deserialize(iprot); - - if(thrift_mode) { - iprot.readStructEnd(); - } - return container; - } - - /** - * serialize - * - * The way to serialize a Thrift "table" which in thrift land is really a function and thus this class's name. - * - * @param o - this list should be in the order of the function's params for now. If we wanted to remove this requirement, - * we'd need to make it a List> with the String being the field name. - * - */ - public void serialize(DynamicSerDeTypeContainer fields, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - final int seqid_ = 0; // bugbug set it right - todo pw - - if(thrift_mode) { - oprot.writeStructBegin(new TStruct(this.name)); - } - - fieldList.serialize(fields.fields, oprot); - - if(thrift_mode) { - oprot.writeFieldStop(); - oprot.writeStructEnd(); - } - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFieldList.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFieldList.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFieldList.java (working copy) @@ -1,179 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType.*; - -public class DynamicSerDeFieldList extends DynamicSerDeSimpleNode implements Serializable { - - // private void writeObject(ObjectOutputStream out) throws IOException { - // out.writeObject(types_by_column_name); - // out.writeObject(ordered_types); - // } - - // production: Field()* - - // mapping of the fieldid to the field - private Map types_by_id = null; - private Map types_by_column_name = null; - private DynamicSerDeTypeBase ordered_types[] = null; - - public DynamicSerDeFieldList(int i) { - super(i); - } - - public DynamicSerDeFieldList(thrift_grammar p, int i) { - super(p,i); - } - - private DynamicSerDeField getField(int i) { - return (DynamicSerDeField)this.jjtGetChild(i); - } - - final protected DynamicSerDeField [] getChildren() { - int size = this.jjtGetNumChildren(); - DynamicSerDeField result [] = new DynamicSerDeField[size]; - for(int i = 0; i < size; i++) { - result[i] = (DynamicSerDeField)this.jjtGetChild(i); - } - return result; - } - - private int getNumFields() { - return this.jjtGetNumChildren(); - } - - public void initialize() { - if(types_by_id == null) { - // multiple means of lookup - types_by_id = new HashMap (); - types_by_column_name = new HashMap (); - ordered_types = new DynamicSerDeTypeBase[this.jjtGetNumChildren()]; - - // put them in and also roll them up while we're at it - // a Field contains a FieldType which in turn contains a type - for(int i = 0 ; i < this.jjtGetNumChildren(); i++) { - DynamicSerDeField mt = this.getField(i); - DynamicSerDeTypeBase type = mt.getFieldType().getMyType(); - type.fieldid = mt.fieldid; - type.name = mt.name; - - types_by_id.put(Integer.valueOf(mt.fieldid) , type); - types_by_column_name.put(mt.name, type); - ordered_types[i] = type; - } - } - } - - private DynamicSerDeTypeBase getFieldByFieldId(int i) { - return types_by_id.get(i); - } - - protected DynamicSerDeTypeBase getFieldByName(String fieldname) { - return types_by_column_name.get(fieldname); - } - - public DynamicSerDeTypeContainer deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - - DynamicSerDeTypeContainer result = new DynamicSerDeTypeContainer(); - for(int i = 0; i < this.getNumFields(); i++) { - DynamicSerDeTypeBase mt = null; - TField field = null; - - if(thrift_mode) { - field = iprot.readFieldBegin(); - - if(field.type >= 0) { - if(field.type == TType.STOP) { - break; - } - mt = this.getFieldByFieldId(field.id); - if(mt == null) { - System.err.println("ERROR for fieldid: " + field.id + " system has no knowledge of this field which is of type : " + field.type); - TProtocolUtil.skip(iprot,field.type); - continue; - } - } - } - - if(!thrift_mode || field.type < 0) { - mt = this.ordered_types[i]; - } - - result.fields.put(mt.name, mt.deserialize(iprot)); - - if(thrift_mode) { - iprot.readFieldEnd(); - } - } - return result; - } - - public void serialize(Map obj_fields, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - - int i = 0; - TField field = new TField(); - - for(Iterator it = obj_fields.entrySet().iterator(); it.hasNext(); ) { - Map.Entry entry = (Map.Entry)it.next(); - String key = (String)entry.getKey(); - Object f = entry.getValue(); - DynamicSerDeTypeBase mt = this.types_by_column_name.get(key); - - if(thrift_mode) { - field.name = mt.name; - field.type = mt.getType(); - field.id = (short)mt.fieldid; - oprot.writeFieldBegin(field); - } - - mt.serialize(f, oprot); - if(thrift_mode) { - oprot.writeFieldEnd(); - } - i++; - } - if(thrift_mode) { - oprot.writeFieldStop(); - } - } - - public String toString() { - StringBuffer result = new StringBuffer(); - String prefix = ""; - for(DynamicSerDeField t: this.getChildren()) { - result.append(prefix + t.fieldid + ":" + t.getFieldType().getMyType().toString() + " " + t.name); - prefix = ","; - } - return result.toString(); - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDe.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDe.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDe.java (working copy) @@ -1,241 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import org.apache.hadoop.hive.serde.*; -import java.util.*; -import java.io.*; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.*; -import org.apache.hadoop.util.StringUtils; - -import com.facebook.thrift.*; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.transport.*; - -public class DynamicSerDe implements SerDe, Serializable { - - private String type_name; - private DynamicSerDeStructBase bt; - - transient private thrift_grammar parse_tree; - transient private boolean inStreaming; - transient protected ByteStream.Input bis_; - transient protected ByteStream.Output bos_; - transient private TProtocol oprot_; - transient private TProtocol iprot_; - - public static final String META_TABLE_NAME = "name"; - - static public void main(String args[]) { - try { - - - { - String schema_file = args[0]; - String kv_file = args[1]; - - Properties schema = new Properties(); - schema.load(new - FileInputStream(schema_file)); - System.out.println(schema.toString()); - - DynamicSerDe serde = new DynamicSerDe(); - serde.initialize(new Configuration(), schema); - - BufferedReader r = new BufferedReader(new FileReader(kv_file)); - String row; - SerDeField keyF = serde.getFieldFromExpression(null, "key"); - while((row = r.readLine()) != null) { - Text t = new Text(row); - System.out.println("row = " + row); - System.out.flush(); - Object o = serde.deserialize(t); - System.out.println(o.toString() + " of type " + - o.getClass().getName()); - Object fo = keyF.get(o); - } - if(true) return; - - } - - - - Properties schema = new Properties(); - String ddl = "struct test { i32 hello, list bye, set more, map another}" ; - - schema.setProperty(Constants.SERIALIZATION_DDL,ddl); - schema.setProperty(Constants.SERIALIZATION_LIB, new DynamicSerDe().getClass().toString()); - schema.setProperty(Constants.SERIALIZATION_FORMAT, "com.facebook.thrift.protocol.TJSONProtocol"); - schema.setProperty(META_TABLE_NAME,"test"); - - DynamicSerDe serde = new DynamicSerDe(); - serde.initialize(new Configuration(), schema); - - DynamicSerDeTypeContainer data = new DynamicSerDeTypeContainer(); - ArrayList hellos = new ArrayList(); - hellos.add("goodbye and this is more stuff - what is going oin here this is really really weird"); - - Set set = new HashSet(); - set.add(22); - - Map map = new HashMap(); - map.put("me",true); - - data.fields.put("bye",hellos); - data.fields.put("hello",Integer.valueOf(10032)); - data.fields.put("more",set); - data.fields.put("another",map); - - BytesWritable foo = (BytesWritable)serde.serialize(data); - System.err.println(new String(foo.get())); - - Object obj = serde.deserialize(foo); - System.err.println("obj=" + obj); - - } catch(Exception e) { - System.err.println("got exception: " + e.getMessage()); - e.printStackTrace(); - } - } - TIOStreamTransport tios; - - public void initialize(Configuration job, Properties tbl) throws SerDeException { - try { - - String ddl = tbl.getProperty(Constants.SERIALIZATION_DDL); - type_name = tbl.getProperty(META_TABLE_NAME); - String protoName = tbl.getProperty(Constants.SERIALIZATION_FORMAT); - - if(protoName == null) { - protoName = "com.facebook.thrift.protocol.TBinaryProtocol"; - } - TProtocolFactory protFactory = TReflectionUtils.getProtocolFactoryByName(protoName); - bos_ = new ByteStream.Output(); - bis_ = new ByteStream.Input(); - tios = new TIOStreamTransport(bis_,bos_); - - oprot_ = protFactory.getProtocol(tios); - iprot_ = protFactory.getProtocol(tios); - - // in theory the include path should come from the configuration - List include_path = new ArrayList(); - include_path.add("."); - this.parse_tree = new thrift_grammar(new ByteArrayInputStream(ddl.getBytes()), include_path,false); - this.parse_tree.Start(); - - this.bt = (DynamicSerDeStructBase)this.parse_tree.types.get(type_name); - - if(this.bt == null) { - this.bt = (DynamicSerDeStructBase)this.parse_tree.tables.get(type_name); - } - - if(this.bt == null) { - throw new SerDeException("Could not lookup table type " + type_name + " in this ddl: " + ddl); - } - - this.bt.initialize(); - - this.inStreaming = job.get("hive.streaming.select") != null; - } catch (Exception e) { - System.out.println(StringUtils.stringifyException(e)); - throw new SerDeException(e); - } - } - - public DynamicSerDeTypeContainer deserialize(Writable field) throws SerDeException { - try { - Text b = (Text)field; - bis_.reset(b.getBytes(), b.getLength()); - return this.bt.deserialize(iprot_); - } catch(Exception e) { - e.printStackTrace(); - throw new SerDeException(e); - } - - } - - public Writable serialize(Object o) throws SerDeException { - DynamicSerDeTypeContainer obj = (DynamicSerDeTypeContainer)o; - - try { - this.bt.serialize(obj, oprot_); - oprot_.getTransport().flush(); - } catch(Exception e) { - e.printStackTrace(); - throw new SerDeException(e); - } - return new BytesWritable(bos_.getData()); - } - - - public SerDeField getFieldFromExpression(SerDeField parentField, String fieldExpression) - throws SerDeException { - - DynamicSerDeStructBase type = bt; - DynamicSerDeHiveField parentFieldCast = (DynamicSerDeHiveField)parentField; - - if(parentFieldCast != null) { - if(parentFieldCast.isList()) { - DynamicSerDeTypeBase elemType = parentFieldCast.getListElementMetaType(); - if(elemType instanceof DynamicSerDeStructBase) { - type = (DynamicSerDeStructBase)elemType; - } else { - throw new SerDeException("Trying to get fields from a non struct/table type: " + elemType); - } - } else { - DynamicSerDeTypeBase metaType = parentFieldCast.getMetaType(); - if(metaType instanceof DynamicSerDeStructBase) { - type = (DynamicSerDeStructBase)metaType; - } else { - throw new SerDeException("Trying to get fields from a non struct/table type: " + metaType); - } - } - } - SerDeField field = new DynamicSerDeHiveField(type, fieldExpression); - return field; - } - - public List getFields(SerDeField parentField) throws SerDeException { - DynamicSerDeStructBase type = bt; - DynamicSerDeHiveField parentFieldCast = (DynamicSerDeHiveField)parentField; - - if(parentFieldCast != null) { - DynamicSerDeTypeBase t = parentFieldCast.getMetaType(); - if(t instanceof DynamicSerDeStructBase) { - type = (DynamicSerDeStructBase)t; - } else { - throw new SerDeException("trying to getFields on a non struct type: " + t); - } - } - - List fields = new ArrayList(); - - for(DynamicSerDeField elem: type.getFieldList().getChildren()) { - fields.add(this.getFieldFromExpression(parentField, elem.name)); - } - return fields; - } - - public String toJSONString(Object obj, SerDeField hf) throws SerDeException { - // return(tsd.toJSONString(obj, hf)); - return null; - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeField.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeField.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeField.java (working copy) @@ -1,47 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import org.apache.hadoop.hive.serde.*; -import com.facebook.thrift.TException; -import com.facebook.thrift.protocol.TProtocol; - -public class DynamicSerDeField extends DynamicSerDeSimpleNode { - - - // production is: - // [this.fieldid :] Requiredness() FieldType() this.name FieldValue() [CommaOrSemicolon()] - - private final int FD_REQUIREDNESS = 0; - private final int FD_FIELD_TYPE = 1; - private final int FD_FIELD_VALUE =2; - - public DynamicSerDeFieldType getFieldType() { - return (DynamicSerDeFieldType)this.jjtGetChild(FD_FIELD_TYPE); - } - - public DynamicSerDeField(int i) { - super(i); - } - public DynamicSerDeField(thrift_grammar p, int i) { - super(p,i); - } - -} - Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeContainer.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeContainer.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeContainer.java (working copy) @@ -1,52 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import java.util.*; - -public class DynamicSerDeTypeContainer { - - public Map fields; - - public DynamicSerDeTypeContainer() { - fields = new HashMap(); - } - - public Map getFields() { - return fields; - } - - public Iterator keySet() { - return fields.keySet().iterator(); - } - - public Iterator entrySet() { - return fields.entrySet().iterator(); - } - - public String toString() { - StringBuffer ret = new StringBuffer(); - String comma = ""; - for(Iterator it = fields.keySet().iterator(); it.hasNext(); ) { - ret.append(comma + fields.get(it.next())); - comma = ","; - } - return ret.toString(); - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeStruct.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeStruct.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeStruct.java (working copy) @@ -1,64 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType.*; - -public class DynamicSerDeStruct extends DynamicSerDeStructBase { - - // production is: struct this.name { FieldList() } - - final private static int FD_FIELD_LIST = 0; - - public DynamicSerDeStruct(int i) { - super(i); - } - public DynamicSerDeStruct(thrift_grammar p, int i) { - super(p,i); - } - - public String toString() { - String result = "struct " + this.name + "("; - result += this.getFieldList().toString(); - result += ")"; - return result; - } - - public DynamicSerDeFieldList getFieldList() { - return (DynamicSerDeFieldList)this.jjtGetChild(FD_FIELD_LIST); - } - - public byte getType() { - return TType.STRUCT; - } - -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammar.jjt =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammar.jjt (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammar.jjt (working copy) @@ -1,867 +0,0 @@ -options { - MULTI=true; - STATIC = false; - NODE_PREFIX = "DynamicSerDe"; -} - - -PARSER_BEGIN(thrift_grammar) - -package com.facebook.serde.dynamic_type; - -import java.util.*; -import java.io.*; -import java.net.*; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.transport.*; -import com.facebook.serde.thrift.*; -import com.facebook.serde.*; - -public class thrift_grammar { - - private List include_path = null; - - // for computing the autogenerated field ids in thrift - private int field_val; - - // store types and tables - // separately because one cannot use a table (ie service.method) as a Struct like type. - protected Map types; - protected Map tables; - - // system include path - final private static String default_include_path[] = { "/usr/local/include","/usr/include","/usr/local/include/thrift/if","/usr/local/include/fb303/if" }; - - // need three params to differentiate between this and 2 param method auto generated since - // some calls in the autogenerated code use null param for 2nd param and thus ambiguous. - protected thrift_grammar(InputStream is, List include_path, boolean junk) { - this(is,null); - this.types = new HashMap () ; - this.tables = new HashMap () ; - this.include_path = include_path; - this.field_val = -1; - } - - // find the file on the include path - private static File findFile(String fname, List include_path) { - for(String path: include_path) { - final String full = path + "/" + fname; - File f = new File(full); - if(f.exists()) { - return f; - } - } - return null; - } - - public static void main(String args[]) { - String filename = null; - List include_path = new ArrayList(); - - for(String path: default_include_path) { - include_path.add(path); - } - for(int i = 0; i < args.length; i++) { - String arg = args[i]; - if(arg.equals("--include") && i + 1 < args.length) { - include_path.add(args[++i]); - } - if(arg.equals("--file") && i + 1 < args.length) { - filename = args[++i]; - } - } - - InputStream is = System.in; - if(filename != null) { - try { - is = new FileInputStream(findFile(filename, include_path)); - } catch(IOException e) { - } - } - thrift_grammar t = new thrift_grammar(is,include_path,false); - - try { - t.Start(); - } catch (Exception e) { - System.out.println("Parse error."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } -} - -PARSER_END(thrift_grammar) - - - -SKIP : -{ - " " -| "\t" -| "\n" -| "\r" -| <"#"(~["\n"])* ("\n"|"\r"|"\r\n")> -| <"//" (~["\n","\r"])* ("\n"|"\r"|"\r\n")> -| <"/*" (~["*"])* "*" (~["/"] (~["*"])* "*")* "/"> -} - - -/** - * HELPER DEFINITIONS, COMMENTS, CONSTANTS, AND WHATNOT - */ - -TOKEN: -{ -| - | -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| - | - | - | - | - | - | - | - | - | - | - | - | - -} - -TOKEN: { - - -| -)*"."()+(["e","E"](["+","-"])?()+)?> -| -(||"."|"_")*> -| -<#LETTER: (["a"-"z", "A"-"Z" ]) > -| -<#DIGIT: ["0"-"9"] > -| - -| - -} - - -SimpleNode Start() : {} -{ - HeaderList() (Definition())+ - { - return jjtThis; - } -} - -SimpleNode HeaderList() : {} -{ - (Header())* - { - return jjtThis; - } - -} - -SimpleNode Header() : {} -{ - Include() - { - return jjtThis; - } -| Namespace() - { - return jjtThis; - } -} - -SimpleNode Namespace() : {} -{ - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -} - - -SimpleNode Include() : { - String fname; - boolean found = false; -} -{ - - fname=.image -{ - // bugbug somewhat fragile below substring expression - fname = fname.substring(1,fname.length() - 1); - - // try to find the file on the include path - File f = thrift_grammar.findFile(fname, this.include_path); - if(f != null) { - found = true; - try { - FileInputStream fis = new FileInputStream(f); - thrift_grammar t = new thrift_grammar(fis,this.include_path, false); - t.Start(); - fis.close(); - found = true; - // add in what we found to our type and table tables. - this.tables.putAll(t.tables); - this.types.putAll(t.types); - } catch (Exception e) { - System.out.println("File: " + fname + " - Oops."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } - if(!found) { - throw new RuntimeException("include file not found: " + fname); - } - return jjtThis; -} -} - - -SimpleNode Definition() : {} -{ - Const() - { - return jjtThis; - } -| Service() - { - return jjtThis; - } -| TypeDefinition() - { - return jjtThis; - } -} - -SimpleNode TypeDefinition() : {} -{ - Typedef() - { - return jjtThis; - } -| Enum() - { - return jjtThis; - } -| Senum() - { - return jjtThis; - } -| Struct() - { - return jjtThis; - } -| Xception() - { - return jjtThis; - } - -} - -DynamicSerDeTypedef Typedef() : {} -{ - - DefinitionType() - jjtThis.name = .image - { - // store the type for later retrieval - this.types.put(jjtThis.name, jjtThis); - return jjtThis; - } -} - - -// returning void because we ignore this production. -void CommaOrSemicolon() : {} -{ - "," -| - ";" -{ -} -} - -SimpleNode Enum() : {} -{ - "{" EnumDefList() "}" - { - return jjtThis; - } -} - -SimpleNode EnumDefList() : {} -{ - (EnumDef())+ - { - return jjtThis; - } -} - -SimpleNode EnumDef() : {} -{ - ["=" ] [CommaOrSemicolon()] - { - return jjtThis; - } -} - -SimpleNode Senum() : {} -{ - "{" SenumDefList() "}" - { - return jjtThis; - } -} - -SimpleNode SenumDefList() : {} -{ - (SenumDef())+ - { - return jjtThis; - } -} - -SimpleNode SenumDef() : {} -{ - [CommaOrSemicolon()] - { - return jjtThis; - } -} - - -SimpleNode Const() : {} -{ - FieldType() "=" ConstValue() [CommaOrSemicolon()] - { - return jjtThis; - } -} - -SimpleNode ConstValue() : {} -{ - - { - } -| - { - } -| - { - } -| - { - } -| ConstList() - { - } -| ConstMap() - { - return jjtThis; - } -} - -SimpleNode ConstList() : {} -{ - "[" ConstListContents() "]" - { - return jjtThis; - } -} - -SimpleNode ConstListContents() : {} -{ - (ConstValue() [CommaOrSemicolon()])+ - { - return jjtThis; - } -} - -SimpleNode ConstMap() : {} -{ - "{" ConstMapContents() "}" - { - return jjtThis; - } -} - -SimpleNode ConstMapContents() : {} -{ - (ConstValue() ":" ConstValue() [CommaOrSemicolon()])+ - { - } -| - { - return jjtThis; - } -} - -DynamicSerDeStruct Struct() : { - -} -{ - - jjtThis.name = .image - "{" - FieldList() - "}" - { - this.types.put(jjtThis.name,jjtThis); - return jjtThis; - } -} - - -SimpleNode Xception() : {} -{ - "{" FieldList() "}" - { - return jjtThis; - } -} - - -SimpleNode Service() : {} -{ - - - Extends() - "{" - FlagArgs() - (Function())+ - UnflagArgs() - "}" - { - // at some point, these should be inserted as a "db" - return jjtThis; - } -} - -SimpleNode FlagArgs() : {} -{ - { - return jjtThis; - } -} - -SimpleNode UnflagArgs() : {} -{ - { - return jjtThis; - } -} - -SimpleNode Extends() : {} -{ - - { - return jjtThis; - } -| - { - return jjtThis; - } -} - - -DynamicSerDeFunction Function() : {} -{ - // metastore ignores async and type - Async() - FunctionType() - - // the name of the function/table - jjtThis.name = .image - "(" - FieldList() - ")" - Throws() - [CommaOrSemicolon()] - - { - this.tables.put(jjtThis.name, jjtThis); - return jjtThis; - } -} - -void Async() : {} -{ - -| -{} -} - -void Throws() : {} -{ - "(" FieldList() ")" -| -{} -} - - -// nothing special - just use the DynamicSerDeFieldList's children methods to access the fields -DynamicSerDeFieldList FieldList() : { - this.field_val = -1; -} -{ - (Field())* { - return jjtThis; - } -} - - -DynamicSerDeField Field() : { - - String fidnum = ""; - String fid; -} -{ - - // parse the field id which is optional - [fidnum=.image ":"] - - // is this field required or optional? default is optional - FieldRequiredness() - - // field type - obviously not optional - FieldType() - - // the name of the field - not optional - jjtThis.name = .image - - // does it have = some value? - FieldValue() - - // take it or leave it - [CommaOrSemicolon()] - - { - if(fidnum.length() > 0) { - int fidInt = Integer.valueOf(fidnum); - jjtThis.fieldid = fidInt; - } else { - jjtThis.fieldid = this.field_val--; - } - return jjtThis; - } -} - - - -SimpleNode FieldRequiredness() : {} -{ - - { - return jjtThis; - } -| - { - return jjtThis; - } -| - { - return jjtThis; - } -} - -SimpleNode FieldValue() : {} -{ - "=" - ConstValue() - { - return jjtThis; - } -| -{ - return jjtThis; -} -} - -SimpleNode DefinitionType() : {} -{ -// BaseType() xxx - TypeString() - { - return jjtThis; - } -| TypeBool() - { - return jjtThis; - } -| Typei16() - { - return jjtThis; - } -| Typei32() - { - return jjtThis; - } -| Typei64() - { - return jjtThis; - } -| TypeDouble() - { - return jjtThis; - } -| TypeMap() - { - return jjtThis; - } -| TypeSet() - { - return jjtThis; - } -| TypeList() - { - return jjtThis; - } -} - -void FunctionType() : {} -{ - FieldType() -| -{} -} - -DynamicSerDeFieldType FieldType() : { -} - -{ - TypeString() - { - return jjtThis; - } -| TypeBool() - { - return jjtThis; - } -| Typei16() - { - return jjtThis; - } -| Typei32() - { - return jjtThis; - } -| Typei64() - { - return jjtThis; - } -| TypeDouble() - { - return jjtThis; - } -| - TypeMap() - { - return jjtThis; - } -| - TypeSet() - { - return jjtThis; - } -| - TypeList() - { - return jjtThis; - } -| - jjtThis.name = .image - { - return jjtThis; - } -} - -DynamicSerDeTypeString TypeString() : {} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypeByte TypeByte() : { -} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypei16 Typei16() : { -} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypei32 Typei32() : {} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypei64 Typei64() : {} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypeDouble TypeDouble() : {} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypeBool TypeBool() : {} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypeMap TypeMap() : {} -{ - - "<" - FieldType() - "," - FieldType() - ">" - { - return jjtThis; - } -} - -DynamicSerDeTypeSet TypeSet() : {} -{ - - "<" - - FieldType() - - ">" - { - return jjtThis; - } -} - -DynamicSerDeTypeList TypeList() : {} -{ - - "<" - - FieldType() - - ">" - { - return jjtThis; - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFunction.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFunction.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFunction.java (working copy) @@ -1,67 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType.*; - -public class DynamicSerDeFunction extends DynamicSerDeStructBase { - - // production is: Async() FunctionType() NAME FieldList() Throws() [CommaOrSemicolon] - - private final int FD_ASYNC = 0; - private final int FD_FUNCTION_TYPE = 1; - private final int FD_FIELD_LIST = 2; - private final int FD_THROWS = 3; - - public DynamicSerDeFunction(int i) { - super(i); - } - public DynamicSerDeFunction(thrift_grammar p, int i) { - super(p,i); - } - - public DynamicSerDeFieldList getFieldList() { - return (DynamicSerDeFieldList)this.jjtGetChild(FD_FIELD_LIST); - } - - public String toString() { - String result = "function " + this.name + " ("; - result += this.getFieldList().toString(); - result += ")"; - return result; - } - - public byte getType() { - return TMessageType.CALL; - } - -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFieldType.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFieldType.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFieldType.java (working copy) @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import org.apache.hadoop.hive.serde.*; -import com.facebook.thrift.TException; -import com.facebook.thrift.protocol.TProtocol; - -// basically just a container for the real type so more like a proxy -public class DynamicSerDeFieldType extends DynamicSerDeSimpleNode { - - // production: this.name | BaseType() | MapType() | SetType() | ListType() - - private final int FD_FIELD_TYPE = 0; - public DynamicSerDeFieldType(int i) { - super(i); - } - public DynamicSerDeFieldType(thrift_grammar p, int i) { - super(p,i); - } - - protected DynamicSerDeTypeBase getMyType() { - // bugbug, need to deal with a named type here - i.e., look it up and proxy to it - // should raise an exception if this is a typedef since won't be any children - // and thus we can quickly find this comment and limitation. - return (DynamicSerDeTypeBase)this.jjtGetChild(FD_FIELD_TYPE); - } - - -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeBool.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeBool.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeBool.java (working copy) @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType; - -public class DynamicSerDeTypeBool extends DynamicSerDeTypeBase { - - // production is: bool - - - public DynamicSerDeTypeBool(int i) { - super(i); - } - public DynamicSerDeTypeBool(thrift_grammar p, int i) { - super(p,i); - } - - public String toString() { return "bool"; } - - public Boolean deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Boolean.valueOf(iprot.readBool()); - } - - public void serialize(Object s, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - oprot.writeBool((Boolean)s); - } - public byte getType() { - return TType.BOOL; - } -} - Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeSet.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeSet.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeSet.java (working copy) @@ -1,95 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType; - -public class DynamicSerDeTypeSet extends DynamicSerDeTypeBase { - - // production is: set - - static final private int FD_TYPE = 0; - - public DynamicSerDeTypeSet(int i) { - super(i); - } - public DynamicSerDeTypeSet(thrift_grammar p, int i) { - super(p,i); - } - - // returns Set - public Class getRealType() { - try { - Class c = this.getElementType().getRealType(); - Object o = c.newInstance(); - Set l = Collections.singleton(o); - return l.getClass(); - } catch (Exception e) { - e.printStackTrace(); - throw new RuntimeException(e); - } - } - - public DynamicSerDeTypeBase getElementType() { - return (DynamicSerDeTypeBase)((DynamicSerDeFieldType)this.jjtGetChild(FD_TYPE)).getMyType(); - } - - public String toString() { - return "set<" + this.getElementType().toString() + ">"; - } - - public Set deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - TSet theset = iprot.readSetBegin(); - Set result = new HashSet () ; - for(int i = 0; i < theset.size; i++) { - Object elem = this.getElementType().deserialize(iprot); - result.add(elem); - } - // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy - iprot.readSetEnd(); - return result; - } - - public void serialize(Object obj, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - Set set = (Set)obj; - DynamicSerDeTypeBase mt = this.getElementType(); - oprot.writeSetBegin(new TSet(mt.getType(),set.size())); - for(Object o: set) { - mt.serialize(o,oprot); - } - // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy - oprot.writeSetEnd(); - } - public byte getType() { - return TType.SET; - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSimpleNode.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSimpleNode.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSimpleNode.java (working copy) @@ -1,31 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeSimpleNode extends SimpleNode { - protected static final boolean thrift_mode = true; - public DynamicSerDeSimpleNode(int i) { - super(i); - } - public DynamicSerDeSimpleNode(thrift_grammar p, int i) { - super(p,i); - } - protected int fieldid; - protected String name; -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeHiveField.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeHiveField.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeHiveField.java (working copy) @@ -1,181 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; - -/** - * The default implementation of Hive Field based on Java Reflection. - */ - -public class DynamicSerDeHiveField implements SerDeField { - - protected DynamicSerDeTypeBase _parentMetaType; - protected DynamicSerDeTypeBase _metaType; - protected DynamicSerDeTypeBase _valueMetaType; - protected DynamicSerDeTypeBase _keyMetaType; - - protected Class _parentClass; - protected Class _class; - protected String _fieldName; - protected boolean _isList; - protected boolean _isMap; - protected boolean _isClassPrimitive; - protected Class _valueClass; - protected Class _keyClass; - - - public static boolean isClassPrimitive(Class c) { - return ((c == String.class) || (c == Boolean.class) || - (c == Character.class) || - java.lang.Number.class.isAssignableFrom(c) || - c.isPrimitive()); - } - - public DynamicSerDeHiveField(DynamicSerDeStructBase parent, String fieldName) throws SerDeException { - try { - _parentClass = parent.getRealType(); - _parentMetaType = parent; - - _fieldName = fieldName; - _metaType = parent.getFieldList().getFieldByName(fieldName); - - _isList = _metaType.isList(); - _isMap = _metaType.isMap(); - _isClassPrimitive = _metaType.isPrimitive(); - - if(_isList) { - DynamicSerDeTypeList type = (DynamicSerDeTypeList)_metaType; - _valueClass = type.getElementType().getRealType(); - _valueMetaType = type.getElementType(); - } - if(_isMap) { - DynamicSerDeTypeMap type = (DynamicSerDeTypeMap)_metaType; - _keyClass = type.getKeyType().getRealType(); - _valueClass = type.getValueType().getRealType(); - _keyMetaType = type.getKeyType(); - _valueMetaType = type.getValueType(); - } - - _class = _metaType.getRealType(); - - if(_class == null) { - System.err.println("_metaType.getClass().getName()=" + _metaType.getClass().getName()); - throw new SerDeException("could not get the real type for " + _metaType.name + ":" + _metaType); - } - } catch (Exception e) { - e.printStackTrace(); - throw new SerDeException("Illegal class or member:" + e.getMessage()); - } - } - - public Object get(Object obj) throws SerDeException { - try { - DynamicSerDeTypeContainer container = (DynamicSerDeTypeContainer)obj; - return container.fields.get(_fieldName); - } catch (Exception e) { - throw new SerDeException("Illegal object or access error", e); - } - } - - public boolean isList() { - return _isList; - } - - public boolean isMap() { - return _isMap; - } - - public boolean isPrimitive() { - if(_isList || _isMap) - return false; - - return _isClassPrimitive; - } - - public Class getType() { - return _class; - } - - public DynamicSerDeTypeBase getMetaType() { - return _metaType; - } - - public DynamicSerDeTypeBase getListElementMetaType() { - if(_isList) { - return _valueMetaType; - } else { - throw new RuntimeException("Not a list field "); - } - } - - public DynamicSerDeTypeBase getMapKeyMetaType() { - if(_isMap) { - return _keyMetaType; - } else { - throw new RuntimeException("Not a list field "); - } - } - - public DynamicSerDeTypeBase getValueMetaType() { - if(_isMap) { - return _valueMetaType; - } else { - throw new RuntimeException("Not a list field "); - } - } - - public Class getListElementType() { - if(_isList) { - return _valueClass; - } else { - throw new RuntimeException("Not a list field "); - } - } - - public Class getMapKeyType() { - if(_isMap) { - return _keyClass; - } else { - throw new RuntimeException("Not a map field "); - } - } - - public Class getMapValueType() { - if(_isMap) { - return _valueClass; - } else { - throw new RuntimeException("Not a map field "); - } - } - - public String getName() { - return _fieldName; - } - - public static String fieldToString(SerDeField hf) { - return("Field= "+hf.getName() + - ", isPrimitive="+hf.isPrimitive()+ - ", isList="+hf.isList()+(hf.isList()?" of "+hf.getListElementType().getName():"")+ - ", isMap="+hf.isMap()+(hf.isMap()?" of <"+hf.getMapKeyType().getName()+"," - +hf.getMapValueType().getName()+">":"")+ - ", type="+hf.getType().getName()); - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypei32.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypei32.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypei32.java (working copy) @@ -1,63 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType; - -public class DynamicSerDeTypei32 extends DynamicSerDeTypeBase { - - // production is: i32 - - public DynamicSerDeTypei32(int i) { - super(i); - } - public DynamicSerDeTypei32(thrift_grammar p, int i) { - super(p,i); - } - - public String toString() { return "i32"; } - - public Object deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Integer.valueOf(iprot.readI32()); - } - - public void serialize(Object o, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - oprot.writeI32((Integer)o); - } - public Class getRealType() { return java.lang.Integer.class; } - public Integer getRealTypeInstance() { return Integer.valueOf(0); } - - - public byte getType() { - return TType.I32; - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypedef.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypedef.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypedef.java (working copy) @@ -1,95 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType.*; - -public class DynamicSerDeTypedef extends DynamicSerDeTypeBase { - - // production is: typedef DefinitionType() this.name - - private final static int FD_DEFINITION_TYPE = 0; - - public DynamicSerDeTypedef(int i) { - super(i); - } - public DynamicSerDeTypedef(thrift_grammar p, int i) { - super(p,i); - } - - private DynamicSerDeSimpleNode getDefinitionType() { - return (DynamicSerDeSimpleNode)this.jjtGetChild(FD_DEFINITION_TYPE); - } - - - public DynamicSerDeTypeBase getMyType() { - DynamicSerDeSimpleNode child = this.getDefinitionType(); - DynamicSerDeTypeBase ret = (DynamicSerDeTypeBase)child.jjtGetChild(0); - return ret; - } - - public String toString() { - String result = "typedef " + this.name + "("; - result += this.getDefinitionType().toString(); - result += ")"; - return result; - } - - /** - * - * - */ - - public List deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - throw new RuntimeException("not implemented"); - // return (List)this.getTypedef().deserialize(iprot); - } - - /** - * serialize - * - * The way to serialize a Thrift "table" which in thrift land is really a function and thus this class's name. - * - * @param o - this list should be in the order of the function's params for now. If we wanted to remove this requirement, - * we'd need to make it a List> with the String being the field name. - * - */ - public void serialize(List fields, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - throw new RuntimeException("not implemented"); - // this.getTypedef().serialize(fields, oprot); - } - - public byte getType() { - throw new RuntimeException("not implemented"); - } - -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypei16.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypei16.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypei16.java (working copy) @@ -1,61 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType; - -public class DynamicSerDeTypei16 extends DynamicSerDeTypeBase { - - public Class getRealType() { return Integer.valueOf(2).getClass(); } - - // production is: i16 - - public DynamicSerDeTypei16(int i) { - super(i); - } - public DynamicSerDeTypei16(thrift_grammar p, int i) { - super(p,i); - } - - public String toString() { return "i16"; } - - public Integer deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Integer.valueOf(iprot.readI16()); - } - - public void serialize(final Object s, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - oprot.writeI16((Short)s); - } - public byte getType() { - return TType.I16; - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypei64.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypei64.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypei64.java (working copy) @@ -1,61 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType; - -public class DynamicSerDeTypei64 extends DynamicSerDeTypeBase { - - public Class getRealType() { return Long.valueOf(0).getClass(); } - - // production is: i64 - - public DynamicSerDeTypei64(int i) { - super(i); - } - public DynamicSerDeTypei64(thrift_grammar p, int i) { - super(p,i); - } - - public String toString() { return "i64"; } - - public Long deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Long.valueOf(iprot.readI64()); - } - - public void serialize(Object s, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - oprot.writeI64((Long)s); - } - public byte getType() { - return TType.I64; - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeByte.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeByte.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeByte.java (working copy) @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.dynamic_type; - - -import com.facebook.thrift.TException; -import com.facebook.thrift.TApplicationException; -import com.facebook.thrift.protocol.TBinaryProtocol; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.protocol.TProtocol; -import com.facebook.thrift.server.TServer; -import com.facebook.thrift.server.*; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.transport.TServerTransport; -import java.util.*; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import java.lang.reflect.*; -import com.facebook.thrift.protocol.TType; - -public class DynamicSerDeTypeByte extends DynamicSerDeTypeBase { - - // production is: byte - - - public DynamicSerDeTypeByte(int i) { - super(i); - } - public DynamicSerDeTypeByte(thrift_grammar p, int i) { - super(p,i); - } - - public String toString() { return "byte"; } - - public Byte deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Byte.valueOf(iprot.readByte()); - } - - public void serialize(Object s, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - // bugbug need to use object of byte type!!! - oprot.writeByte((Byte)s); - } - public byte getType() { - return TType.BYTE; - } -} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/simple_meta/MetadataTypedColumnsetSerDe.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/simple_meta/MetadataTypedColumnsetSerDe.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/simple_meta/MetadataTypedColumnsetSerDe.java (working copy) @@ -18,17 +18,36 @@ package org.apache.hadoop.hive.serde.simple_meta; -import org.apache.hadoop.hive.serde.*; -import org.apache.hadoop.hive.serde.thrift.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Properties; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.serde.ByteStreamTypedSerDe; +import org.apache.hadoop.hive.serde.ColumnSet; +import org.apache.hadoop.hive.serde.ComplexSerDeField; +import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.ExpressionUtils; +import org.apache.hadoop.hive.serde.ReflectionSerDeField; +import org.apache.hadoop.hive.serde.SerDe; +import org.apache.hadoop.hive.serde.SerDeException; +import org.apache.hadoop.hive.serde.SerDeField; +import org.apache.hadoop.hive.serde.SerDeUtils; +import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.util.StringUtils; + +import com.facebook.thrift.TBase; import com.facebook.thrift.TException; -import com.facebook.thrift.TBase; import com.facebook.thrift.TSerializer; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.transport.*; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.*; -import java.util.*; -import java.io.*; +import com.facebook.thrift.protocol.TBinaryProtocol; +import com.facebook.thrift.protocol.TProtocol; +import com.facebook.thrift.protocol.TProtocolFactory; +import com.facebook.thrift.protocol.TSimpleJSONProtocol; +import com.facebook.thrift.transport.TIOStreamTransport; @@ -51,6 +70,18 @@ // stores the columns in order private String _columns_list[]; + static { + StackTraceElement[] sTrace = new Exception().getStackTrace(); + String className = sTrace[0].getClassName(); + try { + // For backward compatibility: this class replaces the columnsetSerDe class. + SerDeUtils.registerSerDe(DynamicSerDe.class.getName(), + Class.forName(className)); + } catch(Exception e) { + throw new RuntimeException(e); + } + } + public String toString() { return "MetaDataTypedColumnsetSerDe[" + separator + "," + _columns + "]"; } @@ -79,7 +110,12 @@ public void initialize(Configuration job, Properties tbl) throws SerDeException { inStreaming = job.get("hive.streaming.select") != null; separator = DefaultSeparator; - String alt_sep = tbl.getProperty(Constants.SERIALIZATION_FORMAT); + String alt_sep = null; + if(DynamicSerDe.class.getName().equals(tbl.getProperty(Constants.SERIALIZATION_LIB))) { + alt_sep = tbl.getProperty(Constants.FIELD_DELIM); + } else { + alt_sep = tbl.getProperty(Constants.SERIALIZATION_FORMAT); + } if(alt_sep != null && alt_sep.length() > 0) { try { byte b [] = new byte[1]; @@ -125,8 +161,13 @@ ColumnSet c = cachedObj; try { try { - Text tw = (Text)field; - String row = tw.toString(); + String row = null; + if(field instanceof BytesWritable) { + row = new String(((BytesWritable) field).get(), 0, ((BytesWritable) field).getSize(), "UTF-8"); + } else { + Text tw = (Text)field; + row = tw.toString(); + } return(deserialize(c, row, separator, nullString)); } catch (ClassCastException e) { throw new SerDeException("columnsetSerDe expects Text", e); Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/SerDeUtils.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/SerDeUtils.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde/SerDeUtils.java (working copy) @@ -64,7 +64,7 @@ // Eagerly load SerDes so they will register their symbolic names even on Lazy Loading JVMs try { // loading these classes will automatically register the short names - Class.forName(org.apache.hadoop.hive.serde.dynamic_type.DynamicSerDe.class.getName()); + Class.forName(org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe.class.getName()); Class.forName(org.apache.hadoop.hive.serde.jute.JuteSerDe.class.getName()); Class.forName(org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe.class.getName()); Class.forName(org.apache.hadoop.hive.serde.thrift.columnsetSerDe.class.getName()); Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java (working copy) @@ -64,6 +64,9 @@ private List columnNames; private ObjectInspector cachedObjectInspector; + private boolean lastColumnTakesRest = false; + private int splitLimit = -1; + public String toString() { return "MetaDataTypedColumnsetSerDe[" + separator + "," + columnNames + "]"; } @@ -103,23 +106,36 @@ if (columnProperty == null || columnProperty.length() == 0 || columnsetSerDe) { // Hack for tables with no columns - // Treat it as a table with a single column called "col" + // Treat it as a table with a single column called "col" cachedObjectInspector = ObjectInspectorFactory.getReflectionObjectInspector( ColumnSet.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } else { columnNames = Arrays.asList(columnProperty.split(",")); cachedObjectInspector = MetadataListStructObjectInspector.getInstance(columnNames); } - LOG.debug(getClass().getName() + ": initialized with columnNames: " + columnNames + " and separator code=" + (int)separator.charAt(0) ); + + String lastColumnTakesRestString = tbl.getProperty(Constants.SERIALIZATION_LAST_COLUMN_TAKES_REST); + lastColumnTakesRest = (lastColumnTakesRestString != null && lastColumnTakesRestString.equalsIgnoreCase("true")); + splitLimit = (lastColumnTakesRest && columnNames != null) ? columnNames.size() : -1; + + LOG.debug(getClass().getName() + ": initialized with columnNames: " + columnNames + " and separator code=" + (int)separator.charAt(0) + + " lastColumnTakesRest=" + lastColumnTakesRest + " splitLimit=" + splitLimit); } - public static Object deserialize(ColumnSet c, String row, String sep, String nullString) throws Exception { + /** + * Split the row into columns. + * @param limit up to limit columns will be produced (the last column takes all the rest), -1 for unlimited. + * @return + * @throws Exception + */ + public static Object deserialize(ColumnSet c, String row, String sep, + String nullString, int limit) throws Exception { if (c.col == null) { c.col = new ArrayList(); } else { c.col.clear(); } - String [] l1 = row.split(sep, -1); + String [] l1 = row.split(sep, limit); for(String s: l1) { if (s.equals(nullString)) { @@ -145,7 +161,7 @@ row = field.toString(); } try { - deserialize(deserializeCache, row, separator, nullString); + deserialize(deserializeCache, row, separator, nullString, splitLimit); if (columnNames != null) { assert(columnNames.size() == deserializeCache.col.size()); } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarConstants.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarConstants.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarConstants.java (revision 0) @@ -0,0 +1,135 @@ +/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammarConstants.java */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +public interface thrift_grammarConstants { + + int EOF = 0; + int tok_const = 8; + int tok_namespace = 9; + int tok_cpp_namespace = 10; + int tok_cpp_include = 11; + int tok_cpp_type = 12; + int tok_java_package = 13; + int tok_cocoa_prefix = 14; + int tok_csharp_namespace = 15; + int tok_php_namespace = 16; + int tok_py_module = 17; + int tok_perl_package = 18; + int tok_ruby_namespace = 19; + int tok_smalltalk_category = 20; + int tok_smalltalk_prefix = 21; + int tok_xsd_all = 22; + int tok_xsd_optional = 23; + int tok_xsd_nillable = 24; + int tok_xsd_namespace = 25; + int tok_xsd_attrs = 26; + int tok_include = 27; + int tok_void = 28; + int tok_bool = 29; + int tok_byte = 30; + int tok_i16 = 31; + int tok_i32 = 32; + int tok_i64 = 33; + int tok_double = 34; + int tok_string = 35; + int tok_slist = 36; + int tok_senum = 37; + int tok_map = 38; + int tok_list = 39; + int tok_set = 40; + int tok_async = 41; + int tok_typedef = 42; + int tok_struct = 43; + int tok_exception = 44; + int tok_extends = 45; + int tok_throws = 46; + int tok_service = 47; + int tok_enum = 48; + int tok_required = 49; + int tok_optional = 50; + int tok_skip = 51; + int tok_int_constant = 52; + int tok_double_constant = 53; + int IDENTIFIER = 54; + int LETTER = 55; + int DIGIT = 56; + int tok_literal = 57; + int tok_st_identifier = 58; + + int DEFAULT = 0; + + String[] tokenImage = { + "", + "\" \"", + "\"\\t\"", + "\"\\n\"", + "\"\\r\"", + "", + "", + "", + "\"const\"", + "\"namespace\"", + "\"cpp_namespace\"", + "\"cpp_include\"", + "\"cpp_type\"", + "\"java_package\"", + "\"cocoa_prefix\"", + "\"csharp_namespace\"", + "\"php_namespace\"", + "\"py_module\"", + "\"perl_package\"", + "\"ruby_namespace\"", + "\"smalltalk_category\"", + "\"smalltalk_prefix\"", + "\"xsd_all\"", + "\"xsd_optional\"", + "\"xsd_nillable\"", + "\"xsd_namespace\"", + "\"xsd_attrs\"", + "\"include\"", + "\"void\"", + "\"bool\"", + "\"byte\"", + "\"i16\"", + "\"i32\"", + "\"i64\"", + "\"double\"", + "\"string\"", + "\"slist\"", + "\"senum\"", + "\"map\"", + "\"list\"", + "\"set\"", + "\"async\"", + "\"typedef\"", + "\"struct\"", + "\"exception\"", + "\"extends\"", + "\"throws\"", + "\"service\"", + "\"enum\"", + "\"required\"", + "\"optional\"", + "\"skip\"", + "", + "", + "", + "", + "", + "", + "", + "\",\"", + "\";\"", + "\"{\"", + "\"}\"", + "\"=\"", + "\"[\"", + "\"]\"", + "\":\"", + "\"(\"", + "\")\"", + "\"<\"", + "\">\"", + }; + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java (working copy) @@ -108,7 +108,7 @@ this.bt.initialize(); } catch (Exception e) { - System.out.println(StringUtils.stringifyException(e)); + System.err.println(StringUtils.stringifyException(e)); throw new SerDeException(e); } } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java (working copy) @@ -32,6 +32,10 @@ private final int FD_FIELD_TYPE = 1; private final int FD_FIELD_VALUE =2; + public boolean isSkippable() { + return ((DynamicSerDeFieldRequiredness)this.jjtGetChild(FD_REQUIREDNESS)).getRequiredness() == DynamicSerDeFieldRequiredness.RequirednessTypes.Skippable; + } + public DynamicSerDeFieldType getFieldType() { return (DynamicSerDeFieldType)this.jjtGetChild(FD_FIELD_TYPE); } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeInclude.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeInclude.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeInclude.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeInclude.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeInclude extends SimpleNode { + public DynamicSerDeInclude(int id) { + super(id); + } + + public DynamicSerDeInclude(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFlagArgs.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFlagArgs.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFlagArgs.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeFlagArgs.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeFlagArgs extends SimpleNode { + public DynamicSerDeFlagArgs(int id) { + super(id); + } + + public DynamicSerDeFlagArgs(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenum.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenum.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenum.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenum.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeSenum extends SimpleNode { + public DynamicSerDeSenum(int id) { + super(id); + } + + public DynamicSerDeSenum(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunctionType.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunctionType.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunctionType.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeFunctionType.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeFunctionType extends SimpleNode { + public DynamicSerDeFunctionType(int id) { + super(id); + } + + public DynamicSerDeFunctionType(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinition.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinition.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinition.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeDefinition.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeDefinition extends SimpleNode { + public DynamicSerDeDefinition(int id) { + super(id); + } + + public DynamicSerDeDefinition(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeXception.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeXception.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeXception.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeXception.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeXception extends SimpleNode { + public DynamicSerDeXception(int id) { + super(id); + } + + public DynamicSerDeXception(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeNamespace.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeNamespace.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeNamespace.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeNamespace.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeNamespace extends SimpleNode { + public DynamicSerDeNamespace(int id) { + super(id); + } + + public DynamicSerDeNamespace(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBool.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBool.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBool.java (working copy) @@ -45,7 +45,12 @@ @Override public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Boolean.valueOf(iprot.readBool()); + boolean val = iprot.readBool(); + if (val == false && iprot instanceof org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol && + ((org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol)iprot).lastPrimitiveWasNull()) { + return null; + } + return Boolean.valueOf(val); } @Override @@ -61,4 +66,8 @@ public byte getType() { return TType.BOOL; } + + public Class getRealType() { return java.lang.Boolean.class; } + public Boolean getRealTypeInstance() { return Boolean.FALSE; } + } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei32.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei32.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei32.java (working copy) @@ -47,7 +47,12 @@ @Override public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Integer.valueOf(iprot.readI32()); + int val = iprot.readI32(); + if (val == 0 && iprot instanceof org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol && + ((org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol)iprot).lastPrimitiveWasNull()) { + return null; + } + return Integer.valueOf(val); } @Override Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei16.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei16.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei16.java (working copy) @@ -49,7 +49,12 @@ @Override public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Integer.valueOf(iprot.readI16()); + int val = iprot.readI16(); + if (val == 0 && iprot instanceof org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol && + ((org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol)iprot).lastPrimitiveWasNull()) { + return null; + } + return Integer.valueOf(val); } @Override Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java (revision 0) @@ -0,0 +1,192 @@ +/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +/** + * This exception is thrown when parse errors are encountered. + * You can explicitly create objects of this exception type by + * calling the method generateParseException in the generated + * parser. + * + * You can modify this class to customize your error reporting + * mechanisms so long as you retain the public fields. + */ +public class ParseException extends Exception { + + /** + * This constructor is used by the method "generateParseException" + * in the generated parser. Calling this constructor generates + * a new object of this type with the fields "currentToken", + * "expectedTokenSequences", and "tokenImage" set. The boolean + * flag "specialConstructor" is also set to true to indicate that + * this constructor was used to create this object. + * This constructor calls its super class with the empty string + * to force the "toString" method of parent class "Throwable" to + * print the error message in the form: + * ParseException: + */ + public ParseException(Token currentTokenVal, + int[][] expectedTokenSequencesVal, + String[] tokenImageVal + ) + { + super(""); + specialConstructor = true; + currentToken = currentTokenVal; + expectedTokenSequences = expectedTokenSequencesVal; + tokenImage = tokenImageVal; + } + + /** + * The following constructors are for use by you for whatever + * purpose you can think of. Constructing the exception in this + * manner makes the exception behave in the normal way - i.e., as + * documented in the class "Throwable". The fields "errorToken", + * "expectedTokenSequences", and "tokenImage" do not contain + * relevant information. The JavaCC generated code does not use + * these constructors. + */ + + public ParseException() { + super(); + specialConstructor = false; + } + + public ParseException(String message) { + super(message); + specialConstructor = false; + } + + /** + * This variable determines which constructor was used to create + * this object and thereby affects the semantics of the + * "getMessage" method (see below). + */ + protected boolean specialConstructor; + + /** + * This is the last token that has been consumed successfully. If + * this object has been created due to a parse error, the token + * followng this token will (therefore) be the first error token. + */ + public Token currentToken; + + /** + * Each entry in this array is an array of integers. Each array + * of integers represents a sequence of tokens (by their ordinal + * values) that is expected at this point of the parse. + */ + public int[][] expectedTokenSequences; + + /** + * This is a reference to the "tokenImage" array of the generated + * parser within which the parse error occurred. This array is + * defined in the generated ...Constants interface. + */ + public String[] tokenImage; + + /** + * This method has the standard behavior when this object has been + * created using the standard constructors. Otherwise, it uses + * "currentToken" and "expectedTokenSequences" to generate a parse + * error message and returns it. If this object has been created + * due to a parse error, and you do not catch it (it gets thrown + * from the parser), then this method is called during the printing + * of the final stack trace, and hence the correct error message + * gets displayed. + */ + public String getMessage() { + if (!specialConstructor) { + return super.getMessage(); + } + StringBuffer expected = new StringBuffer(); + int maxSize = 0; + for (int i = 0; i < expectedTokenSequences.length; i++) { + if (maxSize < expectedTokenSequences[i].length) { + maxSize = expectedTokenSequences[i].length; + } + for (int j = 0; j < expectedTokenSequences[i].length; j++) { + expected.append(tokenImage[expectedTokenSequences[i][j]]).append(" "); + } + if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) { + expected.append("..."); + } + expected.append(eol).append(" "); + } + String retval = "Encountered \""; + Token tok = currentToken.next; + for (int i = 0; i < maxSize; i++) { + if (i != 0) retval += " "; + if (tok.kind == 0) { + retval += tokenImage[0]; + break; + } + retval += add_escapes(tok.image); + tok = tok.next; + } + retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn; + retval += "." + eol; + if (expectedTokenSequences.length == 1) { + retval += "Was expecting:" + eol + " "; + } else { + retval += "Was expecting one of:" + eol + " "; + } + retval += expected.toString(); + return retval; + } + + /** + * The end of line string for this machine. + */ + protected String eol = System.getProperty("line.separator", "\n"); + + /** + * Used to convert raw characters to their escaped version + * when these raw version cannot be used as part of an ASCII + * string literal. + */ + protected String add_escapes(String str) { + StringBuffer retval = new StringBuffer(); + char ch; + for (int i = 0; i < str.length(); i++) { + switch (str.charAt(i)) + { + case 0 : + continue; + case '\b': + retval.append("\\b"); + continue; + case '\t': + retval.append("\\t"); + continue; + case '\n': + retval.append("\\n"); + continue; + case '\f': + retval.append("\\f"); + continue; + case '\r': + retval.append("\\r"); + continue; + case '\"': + retval.append("\\\""); + continue; + case '\'': + retval.append("\\\'"); + continue; + case '\\': + retval.append("\\\\"); + continue; + default: + if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { + String s = "0000" + Integer.toString(ch, 16); + retval.append("\\u" + s.substring(s.length() - 4, s.length())); + } else { + retval.append(ch); + } + continue; + } + } + return retval.toString(); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei64.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei64.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei64.java (working copy) @@ -49,7 +49,12 @@ @Override public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Long.valueOf(iprot.readI64()); + long val = iprot.readI64(); + if (val == 0 && iprot instanceof org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol && + ((org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol)iprot).lastPrimitiveWasNull()) { + return null; + } + return Long.valueOf(val); } @Override Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDef.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDef.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDef.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenumDef.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeSenumDef extends SimpleNode { + public DynamicSerDeSenumDef(int id) { + super(id); + } + + public DynamicSerDeSenumDef(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeString.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeString.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeString.java (working copy) @@ -47,14 +47,14 @@ public String toString() { return "string"; } public String deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return String.valueOf(iprot.readString()); + return iprot.readString(); } @Override public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return String.valueOf(iprot.readString()); + return iprot.readString(); } - + public void serialize(Object s, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { oprot.writeString((String)s); } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTokenManager.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTokenManager.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTokenManager.java (revision 0) @@ -0,0 +1,1461 @@ +/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammarTokenManager.java */ +package org.apache.hadoop.hive.serde2.dynamic_type; +import java.util.*; +import java.io.*; +import java.net.*; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.transport.*; +import org.apache.hadoop.hive.serde2.dynamic_type.*; + +public class thrift_grammarTokenManager implements thrift_grammarConstants +{ + public java.io.PrintStream debugStream = System.out; + public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; } +private final int jjStopStringLiteralDfa_0(int pos, long active0, long active1) +{ + switch (pos) + { + case 0: + if ((active0 & 0xfffffffffff00L) != 0L) + { + jjmatchedKind = 54; + return 35; + } + return -1; + case 1: + if ((active0 & 0xfffffffffff00L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 1; + return 35; + } + return -1; + case 2: + if ((active0 & 0x14380000000L) != 0L) + return 35; + if ((active0 & 0xffebc7fffff00L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 2; + return 35; + } + return -1; + case 3: + if ((active0 & 0x9008070000000L) != 0L) + return 35; + if ((active0 & 0x6fe3c0fffff00L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 3; + return 35; + } + return -1; + case 4: + if ((active0 & 0x23000000100L) != 0L) + return 35; + if ((active0 & 0x6fc0c0ffffe00L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 4; + return 35; + } + return -1; + case 5: + if ((active0 & 0x480c00000000L) != 0L) + return 35; + if ((active0 & 0x6b4000ffffe00L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 5; + return 35; + } + return -1; + case 6: + if ((active0 & 0x6100007bffe00L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 6; + return 35; + } + if ((active0 & 0xa40008400000L) != 0L) + return 35; + return -1; + case 7: + if ((active0 & 0x100007bfee00L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 7; + return 35; + } + if ((active0 & 0x6000000001000L) != 0L) + return 35; + return -1; + case 8: + if ((active0 & 0x3bdec00L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 8; + return 35; + } + if ((active0 & 0x100004020200L) != 0L) + return 35; + return -1; + case 9: + if ((active0 & 0x3bdec00L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 9; + return 35; + } + return -1; + case 10: + if ((active0 & 0x800L) != 0L) + return 35; + if ((active0 & 0x3bde400L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 10; + return 35; + } + return -1; + case 11: + if ((active0 & 0x1846000L) != 0L) + return 35; + if ((active0 & 0x2398400L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 11; + return 35; + } + return -1; + case 12: + if ((active0 & 0x2010400L) != 0L) + return 35; + if ((active0 & 0x388000L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 12; + return 35; + } + return -1; + case 13: + if ((active0 & 0x80000L) != 0L) + return 35; + if ((active0 & 0x308000L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 13; + return 35; + } + return -1; + case 14: + if ((active0 & 0x308000L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 14; + return 35; + } + return -1; + case 15: + if ((active0 & 0x208000L) != 0L) + return 35; + if ((active0 & 0x100000L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 15; + return 35; + } + return -1; + case 16: + if ((active0 & 0x100000L) != 0L) + { + jjmatchedKind = 54; + jjmatchedPos = 16; + return 35; + } + return -1; + default : + return -1; + } +} +private final int jjStartNfa_0(int pos, long active0, long active1) +{ + return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0, active1), pos + 1); +} +private final int jjStopAtPos(int pos, int kind) +{ + jjmatchedKind = kind; + jjmatchedPos = pos; + return pos + 1; +} +private final int jjStartNfaWithStates_0(int pos, int kind, int state) +{ + jjmatchedKind = kind; + jjmatchedPos = pos; + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { return pos + 1; } + return jjMoveNfa_0(state, pos + 1); +} +private final int jjMoveStringLiteralDfa0_0() +{ + switch(curChar) + { + case 40: + return jjStopAtPos(0, 67); + case 41: + return jjStopAtPos(0, 68); + case 44: + return jjStopAtPos(0, 59); + case 58: + return jjStopAtPos(0, 66); + case 59: + return jjStopAtPos(0, 60); + case 60: + return jjStopAtPos(0, 69); + case 61: + return jjStopAtPos(0, 63); + case 62: + return jjStopAtPos(0, 70); + case 91: + return jjStopAtPos(0, 64); + case 93: + return jjStopAtPos(0, 65); + case 97: + return jjMoveStringLiteralDfa1_0(0x20000000000L); + case 98: + return jjMoveStringLiteralDfa1_0(0x60000000L); + case 99: + return jjMoveStringLiteralDfa1_0(0xdd00L); + case 100: + return jjMoveStringLiteralDfa1_0(0x400000000L); + case 101: + return jjMoveStringLiteralDfa1_0(0x1300000000000L); + case 105: + return jjMoveStringLiteralDfa1_0(0x388000000L); + case 106: + return jjMoveStringLiteralDfa1_0(0x2000L); + case 108: + return jjMoveStringLiteralDfa1_0(0x8000000000L); + case 109: + return jjMoveStringLiteralDfa1_0(0x4000000000L); + case 110: + return jjMoveStringLiteralDfa1_0(0x200L); + case 111: + return jjMoveStringLiteralDfa1_0(0x4000000000000L); + case 112: + return jjMoveStringLiteralDfa1_0(0x70000L); + case 114: + return jjMoveStringLiteralDfa1_0(0x2000000080000L); + case 115: + return jjMoveStringLiteralDfa1_0(0x8893800300000L); + case 116: + return jjMoveStringLiteralDfa1_0(0x440000000000L); + case 118: + return jjMoveStringLiteralDfa1_0(0x10000000L); + case 120: + return jjMoveStringLiteralDfa1_0(0x7c00000L); + case 123: + return jjStopAtPos(0, 61); + case 125: + return jjStopAtPos(0, 62); + default : + return jjMoveNfa_0(0, 0); + } +} +private final int jjMoveStringLiteralDfa1_0(long active0) +{ + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(0, active0, 0L); + return 1; + } + switch(curChar) + { + case 49: + return jjMoveStringLiteralDfa2_0(active0, 0x80000000L); + case 51: + return jjMoveStringLiteralDfa2_0(active0, 0x100000000L); + case 54: + return jjMoveStringLiteralDfa2_0(active0, 0x200000000L); + case 97: + return jjMoveStringLiteralDfa2_0(active0, 0x4000002200L); + case 101: + return jjMoveStringLiteralDfa2_0(active0, 0x2812000040000L); + case 104: + return jjMoveStringLiteralDfa2_0(active0, 0x400000010000L); + case 105: + return jjMoveStringLiteralDfa2_0(active0, 0x8000000000L); + case 107: + return jjMoveStringLiteralDfa2_0(active0, 0x8000000000000L); + case 108: + return jjMoveStringLiteralDfa2_0(active0, 0x1000000000L); + case 109: + return jjMoveStringLiteralDfa2_0(active0, 0x300000L); + case 110: + return jjMoveStringLiteralDfa2_0(active0, 0x1000008000000L); + case 111: + return jjMoveStringLiteralDfa2_0(active0, 0x430004100L); + case 112: + return jjMoveStringLiteralDfa2_0(active0, 0x4000000001c00L); + case 115: + return jjMoveStringLiteralDfa2_0(active0, 0x20007c08000L); + case 116: + return jjMoveStringLiteralDfa2_0(active0, 0x80800000000L); + case 117: + return jjMoveStringLiteralDfa2_0(active0, 0x80000L); + case 120: + return jjMoveStringLiteralDfa2_0(active0, 0x300000000000L); + case 121: + return jjMoveStringLiteralDfa2_0(active0, 0x40040020000L); + default : + break; + } + return jjStartNfa_0(0, active0, 0L); +} +private final int jjMoveStringLiteralDfa2_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(0, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(1, active0, 0L); + return 2; + } + switch(curChar) + { + case 50: + if ((active0 & 0x100000000L) != 0L) + return jjStartNfaWithStates_0(2, 32, 35); + break; + case 52: + if ((active0 & 0x200000000L) != 0L) + return jjStartNfaWithStates_0(2, 33, 35); + break; + case 54: + if ((active0 & 0x80000000L) != 0L) + return jjStartNfaWithStates_0(2, 31, 35); + break; + case 95: + return jjMoveStringLiteralDfa3_0(active0, 0x20000L); + case 97: + return jjMoveStringLiteralDfa3_0(active0, 0x300000L); + case 98: + return jjMoveStringLiteralDfa3_0(active0, 0x80000L); + case 99: + return jjMoveStringLiteralDfa3_0(active0, 0x100008004000L); + case 100: + return jjMoveStringLiteralDfa3_0(active0, 0x7c00000L); + case 104: + return jjMoveStringLiteralDfa3_0(active0, 0x8000L); + case 105: + return jjMoveStringLiteralDfa3_0(active0, 0x8001010000000L); + case 109: + return jjMoveStringLiteralDfa3_0(active0, 0x200L); + case 110: + return jjMoveStringLiteralDfa3_0(active0, 0x2000000100L); + case 111: + return jjMoveStringLiteralDfa3_0(active0, 0x20000000L); + case 112: + if ((active0 & 0x4000000000L) != 0L) + return jjStartNfaWithStates_0(2, 38, 35); + return jjMoveStringLiteralDfa3_0(active0, 0x40000011c00L); + case 113: + return jjMoveStringLiteralDfa3_0(active0, 0x2000000000000L); + case 114: + return jjMoveStringLiteralDfa3_0(active0, 0xc80800040000L); + case 115: + return jjMoveStringLiteralDfa3_0(active0, 0x8000000000L); + case 116: + if ((active0 & 0x10000000000L) != 0L) + return jjStartNfaWithStates_0(2, 40, 35); + return jjMoveStringLiteralDfa3_0(active0, 0x4200040000000L); + case 117: + return jjMoveStringLiteralDfa3_0(active0, 0x1000400000000L); + case 118: + return jjMoveStringLiteralDfa3_0(active0, 0x2000L); + case 121: + return jjMoveStringLiteralDfa3_0(active0, 0x20000000000L); + default : + break; + } + return jjStartNfa_0(1, active0, 0L); +} +private final int jjMoveStringLiteralDfa3_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(1, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(2, active0, 0L); + return 3; + } + switch(curChar) + { + case 95: + return jjMoveStringLiteralDfa4_0(active0, 0x7c11c00L); + case 97: + return jjMoveStringLiteralDfa4_0(active0, 0xa000L); + case 98: + return jjMoveStringLiteralDfa4_0(active0, 0x400000000L); + case 100: + if ((active0 & 0x10000000L) != 0L) + return jjStartNfaWithStates_0(3, 28, 35); + break; + case 101: + if ((active0 & 0x40000000L) != 0L) + return jjStartNfaWithStates_0(3, 30, 35); + return jjMoveStringLiteralDfa4_0(active0, 0x340000000200L); + case 105: + return jjMoveStringLiteralDfa4_0(active0, 0x4000800000000L); + case 108: + if ((active0 & 0x20000000L) != 0L) + return jjStartNfaWithStates_0(3, 29, 35); + return jjMoveStringLiteralDfa4_0(active0, 0x8340000L); + case 109: + if ((active0 & 0x1000000000000L) != 0L) + return jjStartNfaWithStates_0(3, 48, 35); + return jjMoveStringLiteralDfa4_0(active0, 0x20000L); + case 110: + return jjMoveStringLiteralDfa4_0(active0, 0x20000000000L); + case 111: + return jjMoveStringLiteralDfa4_0(active0, 0x400000004000L); + case 112: + if ((active0 & 0x8000000000000L) != 0L) + return jjStartNfaWithStates_0(3, 51, 35); + break; + case 115: + return jjMoveStringLiteralDfa4_0(active0, 0x1000000100L); + case 116: + if ((active0 & 0x8000000000L) != 0L) + return jjStartNfaWithStates_0(3, 39, 35); + break; + case 117: + return jjMoveStringLiteralDfa4_0(active0, 0x2082000000000L); + case 118: + return jjMoveStringLiteralDfa4_0(active0, 0x800000000000L); + case 121: + return jjMoveStringLiteralDfa4_0(active0, 0x80000L); + default : + break; + } + return jjStartNfa_0(2, active0, 0L); +} +private final int jjMoveStringLiteralDfa4_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(2, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(3, active0, 0L); + return 4; + } + switch(curChar) + { + case 95: + return jjMoveStringLiteralDfa5_0(active0, 0xc2000L); + case 97: + return jjMoveStringLiteralDfa5_0(active0, 0x4404000L); + case 99: + if ((active0 & 0x20000000000L) != 0L) + return jjStartNfaWithStates_0(4, 41, 35); + return jjMoveStringLiteralDfa5_0(active0, 0x80000000000L); + case 100: + return jjMoveStringLiteralDfa5_0(active0, 0x40000000000L); + case 105: + return jjMoveStringLiteralDfa5_0(active0, 0x2800000000800L); + case 108: + return jjMoveStringLiteralDfa5_0(active0, 0x400300000L); + case 109: + if ((active0 & 0x2000000000L) != 0L) + return jjStartNfaWithStates_0(4, 37, 35); + break; + case 110: + return jjMoveStringLiteralDfa5_0(active0, 0x200803010400L); + case 111: + return jjMoveStringLiteralDfa5_0(active0, 0x4000000820000L); + case 112: + return jjMoveStringLiteralDfa5_0(active0, 0x100000000000L); + case 114: + return jjMoveStringLiteralDfa5_0(active0, 0x8000L); + case 115: + return jjMoveStringLiteralDfa5_0(active0, 0x200L); + case 116: + if ((active0 & 0x100L) != 0L) + return jjStartNfaWithStates_0(4, 8, 35); + else if ((active0 & 0x1000000000L) != 0L) + return jjStartNfaWithStates_0(4, 36, 35); + return jjMoveStringLiteralDfa5_0(active0, 0x1000L); + case 117: + return jjMoveStringLiteralDfa5_0(active0, 0x8000000L); + case 119: + return jjMoveStringLiteralDfa5_0(active0, 0x400000000000L); + default : + break; + } + return jjStartNfa_0(3, active0, 0L); +} +private final int jjMoveStringLiteralDfa5_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(3, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(4, active0, 0L); + return 5; + } + switch(curChar) + { + case 95: + return jjMoveStringLiteralDfa6_0(active0, 0x4000L); + case 97: + return jjMoveStringLiteralDfa6_0(active0, 0x2010400L); + case 99: + return jjMoveStringLiteralDfa6_0(active0, 0x800000000000L); + case 100: + return jjMoveStringLiteralDfa6_0(active0, 0x200008020000L); + case 101: + if ((active0 & 0x400000000L) != 0L) + return jjStartNfaWithStates_0(5, 34, 35); + return jjMoveStringLiteralDfa6_0(active0, 0x40000000000L); + case 103: + if ((active0 & 0x800000000L) != 0L) + return jjStartNfaWithStates_0(5, 35, 35); + break; + case 105: + return jjMoveStringLiteralDfa6_0(active0, 0x1000000L); + case 108: + return jjMoveStringLiteralDfa6_0(active0, 0x400000L); + case 110: + return jjMoveStringLiteralDfa6_0(active0, 0x4000000080800L); + case 112: + return jjMoveStringLiteralDfa6_0(active0, 0x84a200L); + case 114: + return jjMoveStringLiteralDfa6_0(active0, 0x2000000000000L); + case 115: + if ((active0 & 0x400000000000L) != 0L) + return jjStartNfaWithStates_0(5, 46, 35); + break; + case 116: + if ((active0 & 0x80000000000L) != 0L) + return jjStartNfaWithStates_0(5, 43, 35); + return jjMoveStringLiteralDfa6_0(active0, 0x100004300000L); + case 121: + return jjMoveStringLiteralDfa6_0(active0, 0x1000L); + default : + break; + } + return jjStartNfa_0(4, active0, 0L); +} +private final int jjMoveStringLiteralDfa6_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(4, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(5, active0, 0L); + return 6; + } + switch(curChar) + { + case 95: + return jjMoveStringLiteralDfa7_0(active0, 0x8000L); + case 97: + return jjMoveStringLiteralDfa7_0(active0, 0x40000003c2200L); + case 99: + return jjMoveStringLiteralDfa7_0(active0, 0x800L); + case 101: + if ((active0 & 0x8000000L) != 0L) + return jjStartNfaWithStates_0(6, 27, 35); + else if ((active0 & 0x800000000000L) != 0L) + return jjStartNfaWithStates_0(6, 47, 35); + return jjMoveStringLiteralDfa7_0(active0, 0x2000000000000L); + case 102: + if ((active0 & 0x40000000000L) != 0L) + return jjStartNfaWithStates_0(6, 42, 35); + break; + case 105: + return jjMoveStringLiteralDfa7_0(active0, 0x100000000000L); + case 108: + if ((active0 & 0x400000L) != 0L) + return jjStartNfaWithStates_0(6, 22, 35); + return jjMoveStringLiteralDfa7_0(active0, 0x1000000L); + case 109: + return jjMoveStringLiteralDfa7_0(active0, 0x2010400L); + case 112: + return jjMoveStringLiteralDfa7_0(active0, 0x5000L); + case 115: + if ((active0 & 0x200000000000L) != 0L) + return jjStartNfaWithStates_0(6, 45, 35); + break; + case 116: + return jjMoveStringLiteralDfa7_0(active0, 0x4800000L); + case 117: + return jjMoveStringLiteralDfa7_0(active0, 0x20000L); + default : + break; + } + return jjStartNfa_0(5, active0, 0L); +} +private final int jjMoveStringLiteralDfa7_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(5, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(6, active0, 0L); + return 7; + } + switch(curChar) + { + case 99: + return jjMoveStringLiteralDfa8_0(active0, 0x42200L); + case 100: + if ((active0 & 0x2000000000000L) != 0L) + return jjStartNfaWithStates_0(7, 49, 35); + break; + case 101: + if ((active0 & 0x1000L) != 0L) + return jjStartNfaWithStates_0(7, 12, 35); + return jjMoveStringLiteralDfa8_0(active0, 0x2010400L); + case 105: + return jjMoveStringLiteralDfa8_0(active0, 0x800000L); + case 108: + if ((active0 & 0x4000000000000L) != 0L) + return jjStartNfaWithStates_0(7, 50, 35); + return jjMoveStringLiteralDfa8_0(active0, 0x1320800L); + case 109: + return jjMoveStringLiteralDfa8_0(active0, 0x80000L); + case 110: + return jjMoveStringLiteralDfa8_0(active0, 0x8000L); + case 111: + return jjMoveStringLiteralDfa8_0(active0, 0x100000000000L); + case 114: + return jjMoveStringLiteralDfa8_0(active0, 0x4004000L); + default : + break; + } + return jjStartNfa_0(6, active0, 0L); +} +private final int jjMoveStringLiteralDfa8_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(6, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(7, active0, 0L); + return 8; + } + switch(curChar) + { + case 97: + return jjMoveStringLiteralDfa9_0(active0, 0x1008000L); + case 101: + if ((active0 & 0x200L) != 0L) + return jjStartNfaWithStates_0(8, 9, 35); + else if ((active0 & 0x20000L) != 0L) + return jjStartNfaWithStates_0(8, 17, 35); + return jjMoveStringLiteralDfa9_0(active0, 0x84000L); + case 107: + return jjMoveStringLiteralDfa9_0(active0, 0x342000L); + case 110: + if ((active0 & 0x100000000000L) != 0L) + return jjStartNfaWithStates_0(8, 44, 35); + break; + case 111: + return jjMoveStringLiteralDfa9_0(active0, 0x800000L); + case 115: + if ((active0 & 0x4000000L) != 0L) + return jjStartNfaWithStates_0(8, 26, 35); + return jjMoveStringLiteralDfa9_0(active0, 0x2010400L); + case 117: + return jjMoveStringLiteralDfa9_0(active0, 0x800L); + default : + break; + } + return jjStartNfa_0(7, active0, 0L); +} +private final int jjMoveStringLiteralDfa9_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(7, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(8, active0, 0L); + return 9; + } + switch(curChar) + { + case 95: + return jjMoveStringLiteralDfa10_0(active0, 0x300000L); + case 97: + return jjMoveStringLiteralDfa10_0(active0, 0x42000L); + case 98: + return jjMoveStringLiteralDfa10_0(active0, 0x1000000L); + case 100: + return jjMoveStringLiteralDfa10_0(active0, 0x800L); + case 102: + return jjMoveStringLiteralDfa10_0(active0, 0x4000L); + case 109: + return jjMoveStringLiteralDfa10_0(active0, 0x8000L); + case 110: + return jjMoveStringLiteralDfa10_0(active0, 0x800000L); + case 112: + return jjMoveStringLiteralDfa10_0(active0, 0x2010400L); + case 115: + return jjMoveStringLiteralDfa10_0(active0, 0x80000L); + default : + break; + } + return jjStartNfa_0(8, active0, 0L); +} +private final int jjMoveStringLiteralDfa10_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(8, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(9, active0, 0L); + return 10; + } + switch(curChar) + { + case 97: + return jjMoveStringLiteralDfa11_0(active0, 0x2810400L); + case 99: + return jjMoveStringLiteralDfa11_0(active0, 0x100000L); + case 101: + if ((active0 & 0x800L) != 0L) + return jjStartNfaWithStates_0(10, 11, 35); + return jjMoveStringLiteralDfa11_0(active0, 0x8000L); + case 103: + return jjMoveStringLiteralDfa11_0(active0, 0x42000L); + case 105: + return jjMoveStringLiteralDfa11_0(active0, 0x4000L); + case 108: + return jjMoveStringLiteralDfa11_0(active0, 0x1000000L); + case 112: + return jjMoveStringLiteralDfa11_0(active0, 0x280000L); + default : + break; + } + return jjStartNfa_0(9, active0, 0L); +} +private final int jjMoveStringLiteralDfa11_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(9, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(10, active0, 0L); + return 11; + } + switch(curChar) + { + case 97: + return jjMoveStringLiteralDfa12_0(active0, 0x180000L); + case 99: + return jjMoveStringLiteralDfa12_0(active0, 0x2010400L); + case 101: + if ((active0 & 0x2000L) != 0L) + return jjStartNfaWithStates_0(11, 13, 35); + else if ((active0 & 0x40000L) != 0L) + return jjStartNfaWithStates_0(11, 18, 35); + else if ((active0 & 0x1000000L) != 0L) + return jjStartNfaWithStates_0(11, 24, 35); + break; + case 108: + if ((active0 & 0x800000L) != 0L) + return jjStartNfaWithStates_0(11, 23, 35); + break; + case 114: + return jjMoveStringLiteralDfa12_0(active0, 0x200000L); + case 115: + return jjMoveStringLiteralDfa12_0(active0, 0x8000L); + case 120: + if ((active0 & 0x4000L) != 0L) + return jjStartNfaWithStates_0(11, 14, 35); + break; + default : + break; + } + return jjStartNfa_0(10, active0, 0L); +} +private final int jjMoveStringLiteralDfa12_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(10, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(11, active0, 0L); + return 12; + } + switch(curChar) + { + case 99: + return jjMoveStringLiteralDfa13_0(active0, 0x80000L); + case 101: + if ((active0 & 0x400L) != 0L) + return jjStartNfaWithStates_0(12, 10, 35); + else if ((active0 & 0x10000L) != 0L) + return jjStartNfaWithStates_0(12, 16, 35); + else if ((active0 & 0x2000000L) != 0L) + return jjStartNfaWithStates_0(12, 25, 35); + return jjMoveStringLiteralDfa13_0(active0, 0x200000L); + case 112: + return jjMoveStringLiteralDfa13_0(active0, 0x8000L); + case 116: + return jjMoveStringLiteralDfa13_0(active0, 0x100000L); + default : + break; + } + return jjStartNfa_0(11, active0, 0L); +} +private final int jjMoveStringLiteralDfa13_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(11, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(12, active0, 0L); + return 13; + } + switch(curChar) + { + case 97: + return jjMoveStringLiteralDfa14_0(active0, 0x8000L); + case 101: + if ((active0 & 0x80000L) != 0L) + return jjStartNfaWithStates_0(13, 19, 35); + return jjMoveStringLiteralDfa14_0(active0, 0x100000L); + case 102: + return jjMoveStringLiteralDfa14_0(active0, 0x200000L); + default : + break; + } + return jjStartNfa_0(12, active0, 0L); +} +private final int jjMoveStringLiteralDfa14_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(12, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(13, active0, 0L); + return 14; + } + switch(curChar) + { + case 99: + return jjMoveStringLiteralDfa15_0(active0, 0x8000L); + case 103: + return jjMoveStringLiteralDfa15_0(active0, 0x100000L); + case 105: + return jjMoveStringLiteralDfa15_0(active0, 0x200000L); + default : + break; + } + return jjStartNfa_0(13, active0, 0L); +} +private final int jjMoveStringLiteralDfa15_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(13, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(14, active0, 0L); + return 15; + } + switch(curChar) + { + case 101: + if ((active0 & 0x8000L) != 0L) + return jjStartNfaWithStates_0(15, 15, 35); + break; + case 111: + return jjMoveStringLiteralDfa16_0(active0, 0x100000L); + case 120: + if ((active0 & 0x200000L) != 0L) + return jjStartNfaWithStates_0(15, 21, 35); + break; + default : + break; + } + return jjStartNfa_0(14, active0, 0L); +} +private final int jjMoveStringLiteralDfa16_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(14, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(15, active0, 0L); + return 16; + } + switch(curChar) + { + case 114: + return jjMoveStringLiteralDfa17_0(active0, 0x100000L); + default : + break; + } + return jjStartNfa_0(15, active0, 0L); +} +private final int jjMoveStringLiteralDfa17_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(15, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(16, active0, 0L); + return 17; + } + switch(curChar) + { + case 121: + if ((active0 & 0x100000L) != 0L) + return jjStartNfaWithStates_0(17, 20, 35); + break; + default : + break; + } + return jjStartNfa_0(16, active0, 0L); +} +private final void jjCheckNAdd(int state) +{ + if (jjrounds[state] != jjround) + { + jjstateSet[jjnewStateCnt++] = state; + jjrounds[state] = jjround; + } +} +private final void jjAddStates(int start, int end) +{ + do { + jjstateSet[jjnewStateCnt++] = jjnextStates[start]; + } while (start++ != end); +} +private final void jjCheckNAddTwoStates(int state1, int state2) +{ + jjCheckNAdd(state1); + jjCheckNAdd(state2); +} +private final void jjCheckNAddStates(int start, int end) +{ + do { + jjCheckNAdd(jjnextStates[start]); + } while (start++ != end); +} +private final void jjCheckNAddStates(int start) +{ + jjCheckNAdd(jjnextStates[start]); + jjCheckNAdd(jjnextStates[start + 1]); +} +static final long[] jjbitVec0 = { + 0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL +}; +private final int jjMoveNfa_0(int startState, int curPos) +{ + int[] nextStates; + int startsAt = 0; + jjnewStateCnt = 35; + int i = 1; + jjstateSet[0] = startState; + int j, kind = 0x7fffffff; + for (;;) + { + if (++jjround == 0x7fffffff) + ReInitRounds(); + if (curChar < 64) + { + long l = 1L << curChar; + MatchLoop: do + { + switch(jjstateSet[--i]) + { + case 35: + if ((0x3ff600000000000L & l) != 0L) + { + if (kind > 58) + kind = 58; + jjCheckNAdd(15); + } + if ((0x3ff400000000000L & l) != 0L) + { + if (kind > 54) + kind = 54; + jjCheckNAdd(7); + } + break; + case 0: + if ((0x3ff000000000000L & l) != 0L) + { + if (kind > 52) + kind = 52; + jjCheckNAdd(5); + } + else if ((0x280000000000L & l) != 0L) + jjCheckNAddStates(0, 2); + else if (curChar == 47) + jjAddStates(3, 4); + else if (curChar == 39) + jjCheckNAddTwoStates(12, 13); + else if (curChar == 34) + jjCheckNAddTwoStates(9, 10); + else if (curChar == 35) + jjCheckNAddStates(5, 7); + if (curChar == 45) + { + if (kind > 58) + kind = 58; + jjCheckNAdd(15); + } + break; + case 1: + if ((0xfffffffffffffbffL & l) != 0L) + jjCheckNAddStates(5, 7); + break; + case 2: + if ((0x2400L & l) != 0L && kind > 5) + kind = 5; + break; + case 3: + if (curChar == 10 && kind > 5) + kind = 5; + break; + case 4: + if (curChar == 13) + jjstateSet[jjnewStateCnt++] = 3; + break; + case 5: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 52) + kind = 52; + jjCheckNAdd(5); + break; + case 7: + if ((0x3ff400000000000L & l) == 0L) + break; + if (kind > 54) + kind = 54; + jjCheckNAdd(7); + break; + case 8: + if (curChar == 34) + jjCheckNAddTwoStates(9, 10); + break; + case 9: + if ((0xfffffffbffffffffL & l) != 0L) + jjCheckNAddTwoStates(9, 10); + break; + case 10: + if (curChar == 34 && kind > 57) + kind = 57; + break; + case 11: + if (curChar == 39) + jjCheckNAddTwoStates(12, 13); + break; + case 12: + if ((0xffffff7fffffffffL & l) != 0L) + jjCheckNAddTwoStates(12, 13); + break; + case 13: + if (curChar == 39 && kind > 57) + kind = 57; + break; + case 14: + if (curChar != 45) + break; + if (kind > 58) + kind = 58; + jjCheckNAdd(15); + break; + case 15: + if ((0x3ff600000000000L & l) == 0L) + break; + if (kind > 58) + kind = 58; + jjCheckNAdd(15); + break; + case 16: + if (curChar == 47) + jjAddStates(3, 4); + break; + case 17: + if (curChar == 47) + jjCheckNAddStates(8, 10); + break; + case 18: + if ((0xffffffffffffdbffL & l) != 0L) + jjCheckNAddStates(8, 10); + break; + case 19: + if ((0x2400L & l) != 0L && kind > 6) + kind = 6; + break; + case 20: + if (curChar == 10 && kind > 6) + kind = 6; + break; + case 21: + if (curChar == 13) + jjstateSet[jjnewStateCnt++] = 20; + break; + case 22: + if (curChar == 42) + jjCheckNAddTwoStates(23, 24); + break; + case 23: + if ((0xfffffbffffffffffL & l) != 0L) + jjCheckNAddTwoStates(23, 24); + break; + case 24: + if (curChar == 42) + jjAddStates(11, 12); + break; + case 25: + if ((0xffff7fffffffffffL & l) != 0L) + jjCheckNAddTwoStates(26, 24); + break; + case 26: + if ((0xfffffbffffffffffL & l) != 0L) + jjCheckNAddTwoStates(26, 24); + break; + case 27: + if (curChar == 47 && kind > 7) + kind = 7; + break; + case 28: + if ((0x280000000000L & l) != 0L) + jjCheckNAddStates(0, 2); + break; + case 29: + if ((0x3ff000000000000L & l) != 0L) + jjCheckNAddTwoStates(29, 30); + break; + case 30: + if (curChar == 46) + jjCheckNAdd(31); + break; + case 31: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 53) + kind = 53; + jjCheckNAddTwoStates(31, 32); + break; + case 33: + if ((0x280000000000L & l) != 0L) + jjCheckNAdd(34); + break; + case 34: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 53) + kind = 53; + jjCheckNAdd(34); + break; + default : break; + } + } while(i != startsAt); + } + else if (curChar < 128) + { + long l = 1L << (curChar & 077); + MatchLoop: do + { + switch(jjstateSet[--i]) + { + case 35: + if ((0x7fffffe87fffffeL & l) != 0L) + { + if (kind > 58) + kind = 58; + jjCheckNAdd(15); + } + if ((0x7fffffe87fffffeL & l) != 0L) + { + if (kind > 54) + kind = 54; + jjCheckNAdd(7); + } + break; + case 0: + if ((0x7fffffe07fffffeL & l) != 0L) + { + if (kind > 58) + kind = 58; + jjCheckNAdd(15); + } + if ((0x7fffffe07fffffeL & l) != 0L) + { + if (kind > 54) + kind = 54; + jjCheckNAdd(7); + } + break; + case 1: + jjAddStates(5, 7); + break; + case 6: + if ((0x7fffffe07fffffeL & l) == 0L) + break; + if (kind > 54) + kind = 54; + jjCheckNAdd(7); + break; + case 7: + if ((0x7fffffe87fffffeL & l) == 0L) + break; + if (kind > 54) + kind = 54; + jjCheckNAdd(7); + break; + case 9: + jjAddStates(13, 14); + break; + case 12: + jjAddStates(15, 16); + break; + case 14: + if ((0x7fffffe07fffffeL & l) == 0L) + break; + if (kind > 58) + kind = 58; + jjCheckNAdd(15); + break; + case 15: + if ((0x7fffffe87fffffeL & l) == 0L) + break; + if (kind > 58) + kind = 58; + jjCheckNAdd(15); + break; + case 18: + jjAddStates(8, 10); + break; + case 23: + jjCheckNAddTwoStates(23, 24); + break; + case 25: + case 26: + jjCheckNAddTwoStates(26, 24); + break; + case 32: + if ((0x2000000020L & l) != 0L) + jjAddStates(17, 18); + break; + default : break; + } + } while(i != startsAt); + } + else + { + int i2 = (curChar & 0xff) >> 6; + long l2 = 1L << (curChar & 077); + MatchLoop: do + { + switch(jjstateSet[--i]) + { + case 1: + if ((jjbitVec0[i2] & l2) != 0L) + jjAddStates(5, 7); + break; + case 9: + if ((jjbitVec0[i2] & l2) != 0L) + jjAddStates(13, 14); + break; + case 12: + if ((jjbitVec0[i2] & l2) != 0L) + jjAddStates(15, 16); + break; + case 18: + if ((jjbitVec0[i2] & l2) != 0L) + jjAddStates(8, 10); + break; + case 23: + if ((jjbitVec0[i2] & l2) != 0L) + jjCheckNAddTwoStates(23, 24); + break; + case 25: + case 26: + if ((jjbitVec0[i2] & l2) != 0L) + jjCheckNAddTwoStates(26, 24); + break; + default : break; + } + } while(i != startsAt); + } + if (kind != 0x7fffffff) + { + jjmatchedKind = kind; + jjmatchedPos = curPos; + kind = 0x7fffffff; + } + ++curPos; + if ((i = jjnewStateCnt) == (startsAt = 35 - (jjnewStateCnt = startsAt))) + return curPos; + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { return curPos; } + } +} +static final int[] jjnextStates = { + 5, 29, 30, 17, 22, 1, 2, 4, 18, 19, 21, 25, 27, 9, 10, 12, + 13, 33, 34, +}; +public static final String[] jjstrLiteralImages = { +"", null, null, null, null, null, null, null, "\143\157\156\163\164", +"\156\141\155\145\163\160\141\143\145", "\143\160\160\137\156\141\155\145\163\160\141\143\145", +"\143\160\160\137\151\156\143\154\165\144\145", "\143\160\160\137\164\171\160\145", +"\152\141\166\141\137\160\141\143\153\141\147\145", "\143\157\143\157\141\137\160\162\145\146\151\170", +"\143\163\150\141\162\160\137\156\141\155\145\163\160\141\143\145", "\160\150\160\137\156\141\155\145\163\160\141\143\145", +"\160\171\137\155\157\144\165\154\145", "\160\145\162\154\137\160\141\143\153\141\147\145", +"\162\165\142\171\137\156\141\155\145\163\160\141\143\145", "\163\155\141\154\154\164\141\154\153\137\143\141\164\145\147\157\162\171", +"\163\155\141\154\154\164\141\154\153\137\160\162\145\146\151\170", "\170\163\144\137\141\154\154", +"\170\163\144\137\157\160\164\151\157\156\141\154", "\170\163\144\137\156\151\154\154\141\142\154\145", +"\170\163\144\137\156\141\155\145\163\160\141\143\145", "\170\163\144\137\141\164\164\162\163", "\151\156\143\154\165\144\145", +"\166\157\151\144", "\142\157\157\154", "\142\171\164\145", "\151\61\66", "\151\63\62", +"\151\66\64", "\144\157\165\142\154\145", "\163\164\162\151\156\147", +"\163\154\151\163\164", "\163\145\156\165\155", "\155\141\160", "\154\151\163\164", "\163\145\164", +"\141\163\171\156\143", "\164\171\160\145\144\145\146", "\163\164\162\165\143\164", +"\145\170\143\145\160\164\151\157\156", "\145\170\164\145\156\144\163", "\164\150\162\157\167\163", +"\163\145\162\166\151\143\145", "\145\156\165\155", "\162\145\161\165\151\162\145\144", +"\157\160\164\151\157\156\141\154", "\163\153\151\160", null, null, null, null, null, null, null, "\54", "\73", +"\173", "\175", "\75", "\133", "\135", "\72", "\50", "\51", "\74", "\76", }; +public static final String[] lexStateNames = { + "DEFAULT", +}; +static final long[] jjtoToken = { + 0xfe7fffffffffff01L, 0x7fL, +}; +static final long[] jjtoSkip = { + 0xfeL, 0x0L, +}; +protected SimpleCharStream input_stream; +private final int[] jjrounds = new int[35]; +private final int[] jjstateSet = new int[70]; +protected char curChar; +public thrift_grammarTokenManager(SimpleCharStream stream){ + if (SimpleCharStream.staticFlag) + throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer."); + input_stream = stream; +} +public thrift_grammarTokenManager(SimpleCharStream stream, int lexState){ + this(stream); + SwitchTo(lexState); +} +public void ReInit(SimpleCharStream stream) +{ + jjmatchedPos = jjnewStateCnt = 0; + curLexState = defaultLexState; + input_stream = stream; + ReInitRounds(); +} +private final void ReInitRounds() +{ + int i; + jjround = 0x80000001; + for (i = 35; i-- > 0;) + jjrounds[i] = 0x80000000; +} +public void ReInit(SimpleCharStream stream, int lexState) +{ + ReInit(stream); + SwitchTo(lexState); +} +public void SwitchTo(int lexState) +{ + if (lexState >= 1 || lexState < 0) + throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE); + else + curLexState = lexState; +} + +protected Token jjFillToken() +{ + Token t = Token.newToken(jjmatchedKind); + t.kind = jjmatchedKind; + String im = jjstrLiteralImages[jjmatchedKind]; + t.image = (im == null) ? input_stream.GetImage() : im; + t.beginLine = input_stream.getBeginLine(); + t.beginColumn = input_stream.getBeginColumn(); + t.endLine = input_stream.getEndLine(); + t.endColumn = input_stream.getEndColumn(); + return t; +} + +int curLexState = 0; +int defaultLexState = 0; +int jjnewStateCnt; +int jjround; +int jjmatchedPos; +int jjmatchedKind; + +public Token getNextToken() +{ + int kind; + Token specialToken = null; + Token matchedToken; + int curPos = 0; + + EOFLoop : + for (;;) + { + try + { + curChar = input_stream.BeginToken(); + } + catch(java.io.IOException e) + { + jjmatchedKind = 0; + matchedToken = jjFillToken(); + return matchedToken; + } + + try { input_stream.backup(0); + while (curChar <= 32 && (0x100002600L & (1L << curChar)) != 0L) + curChar = input_stream.BeginToken(); + } + catch (java.io.IOException e1) { continue EOFLoop; } + jjmatchedKind = 0x7fffffff; + jjmatchedPos = 0; + curPos = jjMoveStringLiteralDfa0_0(); + if (jjmatchedKind != 0x7fffffff) + { + if (jjmatchedPos + 1 < curPos) + input_stream.backup(curPos - jjmatchedPos - 1); + if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) + { + matchedToken = jjFillToken(); + return matchedToken; + } + else + { + continue EOFLoop; + } + } + int error_line = input_stream.getEndLine(); + int error_column = input_stream.getEndColumn(); + String error_after = null; + boolean EOFSeen = false; + try { input_stream.readChar(); input_stream.backup(1); } + catch (java.io.IOException e1) { + EOFSeen = true; + error_after = curPos <= 1 ? "" : input_stream.GetImage(); + if (curChar == '\n' || curChar == '\r') { + error_line++; + error_column = 0; + } + else + error_column++; + } + if (!EOFSeen) { + input_stream.backup(1); + error_after = curPos <= 1 ? "" : input_stream.GetImage(); + } + throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR); + } +} + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStructBase.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStructBase.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStructBase.java (working copy) @@ -81,7 +81,6 @@ fieldList.serialize(o, oi, oprot); if(thrift_mode) { - oprot.writeFieldStop(); oprot.writeStructEnd(); } } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java (working copy) @@ -86,7 +86,12 @@ deserializeReuse = new HashMap(); } TMap themap = iprot.readMapBegin(); - for(int i = 0; i < themap.size; i++) { + if (themap == null) { + return null; + } + // themap might be reused by the Protocol. + int mapSize = themap.size; + for(int i = 0; i < mapSize; i++) { Object key = this.getKeyType().deserialize(null, iprot); Object value = this.getValueType().deserialize(null, iprot); deserializeReuse.put(key,value); @@ -97,6 +102,7 @@ return deserializeReuse; } + TMap serializeMap = new TMap(); @Override public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException, @@ -104,19 +110,33 @@ DynamicSerDeTypeBase keyType = this.getKeyType(); DynamicSerDeTypeBase valueType = this.getValueType(); + org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol nullProtocol = + (oprot instanceof org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol) + ? (org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol)oprot + : null; + assert(oi.getCategory() == ObjectInspector.Category.MAP); MapObjectInspector moi = (MapObjectInspector)oi; ObjectInspector koi = moi.getMapKeyObjectInspector(); ObjectInspector voi = moi.getMapValueObjectInspector(); Map map = moi.getMap(o); - oprot.writeMapBegin(new TMap(keyType.getType(),valueType.getType(),map.size())); + serializeMap.size = map.size(); + serializeMap.keyType = keyType.getType(); + serializeMap.valueType = valueType.getType(); + oprot.writeMapBegin(serializeMap); + for(Iterator i = map.entrySet().iterator(); i.hasNext(); ) { Map.Entry it = (Map.Entry)i.next(); Object key = it.getKey(); Object value = it.getValue(); keyType.serialize(key, koi, oprot); - valueType.serialize(value, voi, oprot); + if (value == null) { + assert(nullProtocol != null); + nullProtocol.writeNull(); + } else { + valueType.serialize(value, voi, oprot); + } } // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy oprot.writeMapEnd(); Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldValue.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldValue.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldValue.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeFieldValue.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeFieldValue extends SimpleNode { + public DynamicSerDeFieldValue(int id) { + super(id); + } + + public DynamicSerDeFieldValue(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstList.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstList.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstList.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstList.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeConstList extends SimpleNode { + public DynamicSerDeConstList(int id) { + super(id); + } + + public DynamicSerDeConstList(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnumDef.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnumDef.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnumDef.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeEnumDef.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeEnumDef extends SimpleNode { + public DynamicSerDeEnumDef(int id) { + super(id); + } + + public DynamicSerDeEnumDef(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeaderList.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeaderList.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeaderList.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeHeaderList.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeHeaderList extends SimpleNode { + public DynamicSerDeHeaderList(int id) { + super(id); + } + + public DynamicSerDeHeaderList(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldType.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldType.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldType.java (working copy) @@ -37,6 +37,4 @@ // and thus we can quickly find this comment and limitation. return (DynamicSerDeTypeBase)this.jjtGetChild(FD_FIELD_TYPE); } - - } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/Token.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/Token.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/Token.java (revision 0) @@ -0,0 +1,81 @@ +/* Generated By:JavaCC: Do not edit this line. Token.java Version 3.0 */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +/** + * Describes the input token stream. + */ + +public class Token { + + /** + * An integer that describes the kind of this token. This numbering + * system is determined by JavaCCParser, and a table of these numbers is + * stored in the file ...Constants.java. + */ + public int kind; + + /** + * beginLine and beginColumn describe the position of the first character + * of this token; endLine and endColumn describe the position of the + * last character of this token. + */ + public int beginLine, beginColumn, endLine, endColumn; + + /** + * The string image of the token. + */ + public String image; + + /** + * A reference to the next regular (non-special) token from the input + * stream. If this is the last token from the input stream, or if the + * token manager has not read tokens beyond this one, this field is + * set to null. This is true only if this token is also a regular + * token. Otherwise, see below for a description of the contents of + * this field. + */ + public Token next; + + /** + * This field is used to access special tokens that occur prior to this + * token, but after the immediately preceding regular (non-special) token. + * If there are no such special tokens, this field is set to null. + * When there are more than one such special token, this field refers + * to the last of these special tokens, which in turn refers to the next + * previous special token through its specialToken field, and so on + * until the first special token (whose specialToken field is null). + * The next fields of special tokens refer to other special tokens that + * immediately follow it (without an intervening regular token). If there + * is no such token, this field is null. + */ + public Token specialToken; + + /** + * Returns the image. + */ + public String toString() + { + return image; + } + + /** + * Returns a new Token object, by default. However, if you want, you + * can create and return subclass objects based on the value of ofKind. + * Simply add the cases to the switch for all those special cases. + * For example, if you have a subclass of Token called IDToken that + * you want to create if ofKind is ID, simlpy add something like : + * + * case MyParserConstants.ID : return new IDToken(); + * + * to the following switch statement. Then you can cast matchedToken + * variable to the appropriate type and use it in your lexical actions. + */ + public static final Token newToken(int ofKind) + { + switch(ofKind) + { + default : return new Token(); + } + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinitionType.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinitionType.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinitionType.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeDefinitionType.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeDefinitionType extends SimpleNode { + public DynamicSerDeDefinitionType(int id) { + super(id); + } + + public DynamicSerDeDefinitionType(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDefinition.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDefinition.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDefinition.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeTypeDefinition.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeTypeDefinition extends SimpleNode { + public DynamicSerDeTypeDefinition(int id) { + super(id); + } + + public DynamicSerDeTypeDefinition(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTreeConstants.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTreeConstants.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTreeConstants.java (revision 0) @@ -0,0 +1,105 @@ +/* Generated By:JJTree: Do not edit this line. /home/pwyckoff/projects/hadoop/trunk/VENDOR/hadoop-0.17/src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTreeConstants.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public interface thrift_grammarTreeConstants +{ + public int JJTSTART = 0; + public int JJTHEADERLIST = 1; + public int JJTHEADER = 2; + public int JJTNAMESPACE = 3; + public int JJTINCLUDE = 4; + public int JJTDEFINITION = 5; + public int JJTTYPEDEFINITION = 6; + public int JJTTYPEDEF = 7; + public int JJTCOMMAORSEMICOLON = 8; + public int JJTENUM = 9; + public int JJTENUMDEFLIST = 10; + public int JJTENUMDEF = 11; + public int JJTSENUM = 12; + public int JJTSENUMDEFLIST = 13; + public int JJTSENUMDEF = 14; + public int JJTCONST = 15; + public int JJTCONSTVALUE = 16; + public int JJTCONSTLIST = 17; + public int JJTCONSTLISTCONTENTS = 18; + public int JJTCONSTMAP = 19; + public int JJTCONSTMAPCONTENTS = 20; + public int JJTSTRUCT = 21; + public int JJTXCEPTION = 22; + public int JJTSERVICE = 23; + public int JJTFLAGARGS = 24; + public int JJTUNFLAGARGS = 25; + public int JJTEXTENDS = 26; + public int JJTFUNCTION = 27; + public int JJTASYNC = 28; + public int JJTTHROWS = 29; + public int JJTFIELDLIST = 30; + public int JJTFIELD = 31; + public int JJTFIELDREQUIREDNESS = 32; + public int JJTFIELDVALUE = 33; + public int JJTDEFINITIONTYPE = 34; + public int JJTFUNCTIONTYPE = 35; + public int JJTFIELDTYPE = 36; + public int JJTTYPESTRING = 37; + public int JJTTYPEBYTE = 38; + public int JJTTYPEI16 = 39; + public int JJTTYPEI32 = 40; + public int JJTTYPEI64 = 41; + public int JJTTYPEDOUBLE = 42; + public int JJTTYPEBOOL = 43; + public int JJTTYPEMAP = 44; + public int JJTTYPESET = 45; + public int JJTTYPELIST = 46; + + + public String[] jjtNodeName = { + "Start", + "HeaderList", + "Header", + "Namespace", + "Include", + "Definition", + "TypeDefinition", + "Typedef", + "CommaOrSemicolon", + "Enum", + "EnumDefList", + "EnumDef", + "Senum", + "SenumDefList", + "SenumDef", + "Const", + "ConstValue", + "ConstList", + "ConstListContents", + "ConstMap", + "ConstMapContents", + "Struct", + "Xception", + "Service", + "FlagArgs", + "UnflagArgs", + "Extends", + "Function", + "Async", + "Throws", + "FieldList", + "Field", + "FieldRequiredness", + "FieldValue", + "DefinitionType", + "FunctionType", + "FieldType", + "TypeString", + "TypeByte", + "Typei16", + "Typei32", + "Typei64", + "TypeDouble", + "TypeBool", + "TypeMap", + "TypeSet", + "TypeList", + }; +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstListContents.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstListContents.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstListContents.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstListContents.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeConstListContents extends SimpleNode { + public DynamicSerDeConstListContents(int id) { + super(id); + } + + public DynamicSerDeConstListContents(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeService.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeService.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeService.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeService.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeService extends SimpleNode { + public DynamicSerDeService(int id) { + super(id); + } + + public DynamicSerDeService(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnum.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnum.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnum.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeEnum.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeEnum extends SimpleNode { + public DynamicSerDeEnum(int id) { + super(id); + } + + public DynamicSerDeEnum(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeByte.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeByte.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeByte.java (working copy) @@ -41,10 +41,15 @@ public String toString() { return "byte"; } public Byte deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Byte.valueOf(iprot.readByte()); + byte val = iprot.readByte(); + if (val == 0 && iprot instanceof org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol && + ((org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol)iprot).lastPrimitiveWasNull()) { + return null; + } + return Byte.valueOf(val); } public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Byte.valueOf(iprot.readByte()); + return deserialize(iprot); } public void serialize(Object s, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleNode.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleNode.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleNode.java (revision 0) @@ -0,0 +1,72 @@ +/* Generated By:JJTree: Do not edit this line. SimpleNode.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class SimpleNode implements Node { + protected Node parent; + protected Node[] children; + protected int id; + protected thrift_grammar parser; + + public SimpleNode(int i) { + id = i; + } + + public SimpleNode(thrift_grammar p, int i) { + this(i); + parser = p; + } + + public void jjtOpen() { + } + + public void jjtClose() { + } + + public void jjtSetParent(Node n) { parent = n; } + public Node jjtGetParent() { return parent; } + + public void jjtAddChild(Node n, int i) { + if (children == null) { + children = new Node[i + 1]; + } else if (i >= children.length) { + Node c[] = new Node[i + 1]; + System.arraycopy(children, 0, c, 0, children.length); + children = c; + } + children[i] = n; + } + + public Node jjtGetChild(int i) { + return children[i]; + } + + public int jjtGetNumChildren() { + return (children == null) ? 0 : children.length; + } + + /* You can override these two methods in subclasses of SimpleNode to + customize the way the node appears when the tree is dumped. If + your output uses more than one line you should override + toString(String), otherwise overriding toString() is probably all + you need to do. */ + + public String toString() { return thrift_grammarTreeConstants.jjtNodeName[id]; } + public String toString(String prefix) { return prefix + toString(); } + + /* Override this method if you want to customize how the node dumps + out its children. */ + + public void dump(String prefix) { + System.out.println(toString(prefix)); + if (children != null) { + for (int i = 0; i < children.length; ++i) { + SimpleNode n = (SimpleNode)children[i]; + if (n != null) { + n.dump(prefix + " "); + } + } + } + } +} + Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDouble.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDouble.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDouble.java (working copy) @@ -46,7 +46,12 @@ public String toString() { return "double"; } public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - return Double.valueOf(iprot.readDouble()); + double val = iprot.readDouble(); + if (val == 0 && iprot instanceof org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol && + ((org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol)iprot).lastPrimitiveWasNull()) { + return null; + } + return Double.valueOf(val); } public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { @@ -58,4 +63,7 @@ public byte getType() { return TType.DOUBLE; } + + public Class getRealType() { return java.lang.Double.class; } + public Double getRealTypeInstance() { return Double.valueOf(0); } } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java (revision 0) @@ -0,0 +1,48 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeFieldRequiredness extends SimpleNode { + public enum RequirednessTypes + { + Required, Skippable, Optional, + }; + + /** + * Is this a required, skippable or optional field. + * Used by DynamicSerDe for optimizations. + */ + protected RequirednessTypes requiredness; + + /** + * Get the requiredness attribute of this field. + */ + public RequirednessTypes getRequiredness() { + return requiredness; + } + + public DynamicSerDeFieldRequiredness(int id) { + super(id); + } + + public DynamicSerDeFieldRequiredness(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java (working copy) @@ -87,6 +87,9 @@ for(int i = 0 ; i < this.jjtGetNumChildren(); i++) { DynamicSerDeField mt = this.getField(i); DynamicSerDeTypeBase type = mt.getFieldType().getMyType(); + // types get initialized in case they need to setup any + // internal data structures - e.g., DynamicSerDeStructBase + type.initialize(); type.fieldid = mt.fieldid; type.name = mt.name; @@ -106,6 +109,15 @@ return types_by_column_name.get(fieldname); } + /** + * Indicates whether fields can be out of order or missing. i.e., is it really real + * thrift serialization. + * This is used by dynamicserde to do some optimizations if it knows all the fields exist + * and are required and are serialized in order. + * For now, those optimizations are only done for DynamicSerDe serialized data so always + * set to false for now. + */ + protected boolean isRealThrift = false; public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { ArrayList struct = null; @@ -120,16 +132,33 @@ assert(struct.size() == this.getNumFields()); } + boolean fastSkips = iprot instanceof org.apache.hadoop.hive.serde2.thrift.SkippableTProtocol; + + // may need to strip away the STOP marker when in thrift mode + boolean stopSeen = false; + // Read the fields. for(int i = 0; i < this.getNumFields(); i++) { DynamicSerDeTypeBase mt = null; TField field = null; - + + if(!isRealThrift && this.getField(i).isSkippable()) { + // PRE - all the fields are required and serialized in order - is !isRealThrift + mt = this.ordered_types[i]; + if(fastSkips) { + ((org.apache.hadoop.hive.serde2.thrift.SkippableTProtocol)iprot).skip(mt.getType()); + } else { + TProtocolUtil.skip(iprot,mt.getType()); + } + struct.set(i, null); + continue; + } if (thrift_mode) { field = iprot.readFieldBegin(); if(field.type >= 0) { if(field.type == TType.STOP) { + stopSeen = true; break; } mt = this.getFieldByFieldId(field.id); @@ -157,23 +186,28 @@ orderedId = ordered_column_id_by_name.get(mt.name); } struct.set(orderedId, mt.deserialize(struct.get(orderedId), iprot)); - if(thrift_mode) { iprot.readFieldEnd(); } } + if(thrift_mode && !stopSeen) { + // strip off the STOP marker, which may be left if all the fields were in the serialization + TField field = iprot.readFieldBegin(); + assert(field.type == TType.STOP); + } return struct; } TField field = new TField(); public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { - // Assuming the ObjectInspector represents exactly the same type as this struct. // This assumption should be checked during query compile time. assert(oi instanceof StructObjectInspector); StructObjectInspector soi = (StructObjectInspector) oi; + boolean writeNulls = oprot instanceof org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol; + // For every field List fields = soi.getAllStructFieldRefs(); if (fields.size() != ordered_types.length) { @@ -184,6 +218,10 @@ Object f = soi.getStructFieldData(o, fields.get(i)); DynamicSerDeTypeBase mt = ordered_types[i]; + if (f == null && !writeNulls) { + continue; + } + if(thrift_mode) { field.name = mt.name; field.type = mt.getType(); @@ -191,8 +229,11 @@ oprot.writeFieldBegin(field); } - mt.serialize(f, fields.get(i).getFieldObjectInspector(), oprot); - + if(f == null) { + ((org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol)oprot).writeNull(); + } else { + mt.serialize(f, fields.get(i).getFieldObjectInspector(), oprot); + } if(thrift_mode) { oprot.writeFieldEnd(); } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMap.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMap.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMap.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstMap.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeConstMap extends SimpleNode { + public DynamicSerDeConstMap(int id) { + super(id); + } + + public DynamicSerDeConstMap(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeThrows.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeThrows.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeThrows.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeThrows.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeThrows extends SimpleNode { + public DynamicSerDeThrows(int id) { + super(id); + } + + public DynamicSerDeThrows(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java (revision 0) @@ -0,0 +1,123 @@ +/* Generated By:JJTree: Do not edit this line. /home/pwyckoff/projects/hadoop/trunk/VENDOR/hadoop-0.17/src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +class JJTthrift_grammarState { + private java.util.Stack nodes; + private java.util.Stack marks; + + private int sp; // number of nodes on stack + private int mk; // current mark + private boolean node_created; + + JJTthrift_grammarState() { + nodes = new java.util.Stack(); + marks = new java.util.Stack(); + sp = 0; + mk = 0; + } + + /* Determines whether the current node was actually closed and + pushed. This should only be called in the final user action of a + node scope. */ + boolean nodeCreated() { + return node_created; + } + + /* Call this to reinitialize the node stack. It is called + automatically by the parser's ReInit() method. */ + void reset() { + nodes.removeAllElements(); + marks.removeAllElements(); + sp = 0; + mk = 0; + } + + /* Returns the root node of the AST. It only makes sense to call + this after a successful parse. */ + Node rootNode() { + return (Node)nodes.elementAt(0); + } + + /* Pushes a node on to the stack. */ + void pushNode(Node n) { + nodes.push(n); + ++sp; + } + + /* Returns the node on the top of the stack, and remove it from the + stack. */ + Node popNode() { + if (--sp < mk) { + mk = ((Integer)marks.pop()).intValue(); + } + return (Node)nodes.pop(); + } + + /* Returns the node currently on the top of the stack. */ + Node peekNode() { + return (Node)nodes.peek(); + } + + /* Returns the number of children on the stack in the current node + scope. */ + int nodeArity() { + return sp - mk; + } + + + void clearNodeScope(Node n) { + while (sp > mk) { + popNode(); + } + mk = ((Integer)marks.pop()).intValue(); + } + + + void openNodeScope(Node n) { + marks.push(new Integer(mk)); + mk = sp; + n.jjtOpen(); + } + + + /* A definite node is constructed from a specified number of + children. That number of nodes are popped from the stack and + made the children of the definite node. Then the definite node + is pushed on to the stack. */ + void closeNodeScope(Node n, int num) { + mk = ((Integer)marks.pop()).intValue(); + while (num-- > 0) { + Node c = popNode(); + c.jjtSetParent(n); + n.jjtAddChild(c, num); + } + n.jjtClose(); + pushNode(n); + node_created = true; + } + + + /* A conditional node is constructed if its condition is true. All + the nodes that have been pushed since the node was opened are + made children of the the conditional node, which is then pushed + on to the stack. If the condition is false the node is not + constructed and they are left on the stack. */ + void closeNodeScope(Node n, boolean condition) { + if (condition) { + int a = nodeArity(); + mk = ((Integer)marks.pop()).intValue(); + while (a-- > 0) { + Node c = popNode(); + c.jjtSetParent(n); + n.jjtAddChild(c, a); + } + n.jjtClose(); + pushNode(n); + node_created = true; + } else { + mk = ((Integer)marks.pop()).intValue(); + node_created = false; + } + } +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStart.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStart.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStart.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeStart.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeStart extends SimpleNode { + public DynamicSerDeStart(int id) { + super(id); + } + + public DynamicSerDeStart(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/Node.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/Node.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/Node.java (revision 0) @@ -0,0 +1,34 @@ +/* Generated By:JJTree: Do not edit this line. Node.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +/* All AST nodes must implement this interface. It provides basic + machinery for constructing the parent and child relationships + between nodes. */ + +public interface Node { + + /** This method is called after the node has been made the current + node. It indicates that child nodes can now be added to it. */ + public void jjtOpen(); + + /** This method is called after all the child nodes have been + added. */ + public void jjtClose(); + + /** This pair of methods are used to inform the node of its + parent. */ + public void jjtSetParent(Node n); + public Node jjtGetParent(); + + /** This method tells the node to add its argument to the node's + list of children. */ + public void jjtAddChild(Node n, int i); + + /** This method returns a child node. The children are numbered + from zero, left to right. */ + public Node jjtGetChild(int i); + + /** Return the number of children the node has. */ + public int jjtGetNumChildren(); +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jj =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jj (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jj (revision 0) @@ -0,0 +1,2365 @@ +/*@bgen(jjtree) Generated By:JJTree: Do not edit this line. /home/pwyckoff/projects/hadoop/trunk/VENDOR/hadoop-0.17/src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jj */ +/*@egen*/options { + STATIC = false; +} + + +PARSER_BEGIN(thrift_grammar) + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import java.util.*; +import java.io.*; +import java.net.*; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.transport.*; +import org.apache.hadoop.hive.serde2.dynamic_type.*; + +public class thrift_grammar/*@bgen(jjtree)*/implements thrift_grammarTreeConstants/*@egen*/ {/*@bgen(jjtree)*/ + protected JJTthrift_grammarState jjtree = new JJTthrift_grammarState(); + +/*@egen*/ + + private List include_path = null; + + // for computing the autogenerated field ids in thrift + private int field_val; + + // store types and tables + // separately because one cannot use a table (ie service.method) as a Struct like type. + protected Map types; + protected Map tables; + + // system include path + final private static String default_include_path[] = { "/usr/local/include","/usr/include","/usr/local/include/thrift/if","/usr/local/include/fb303/if" }; + + // need three params to differentiate between this and 2 param method auto generated since + // some calls in the autogenerated code use null param for 2nd param and thus ambiguous. + protected thrift_grammar(InputStream is, List include_path, boolean junk) { + this(is,null); + this.types = new HashMap () ; + this.tables = new HashMap () ; + this.include_path = include_path; + this.field_val = -1; + } + + // find the file on the include path + private static File findFile(String fname, List include_path) { + for(String path: include_path) { + final String full = path + "/" + fname; + File f = new File(full); + if(f.exists()) { + return f; + } + } + return null; + } + + public static void main(String args[]) { + String filename = null; + List include_path = new ArrayList(); + + for(String path: default_include_path) { + include_path.add(path); + } + for(int i = 0; i < args.length; i++) { + String arg = args[i]; + if(arg.equals("--include") && i + 1 < args.length) { + include_path.add(args[++i]); + } + if(arg.equals("--file") && i + 1 < args.length) { + filename = args[++i]; + } + } + + InputStream is = System.in; + if(filename != null) { + try { + is = new FileInputStream(findFile(filename, include_path)); + } catch(IOException e) { + } + } + thrift_grammar t = new thrift_grammar(is,include_path,false); + + try { + t.Start(); + } catch (Exception e) { + System.out.println("Parse error."); + System.out.println(e.getMessage()); + e.printStackTrace(); + } + } +} + +PARSER_END(thrift_grammar) + + + +SKIP : +{ + " " +| "\t" +| "\n" +| "\r" +| <"#"(~["\n"])* ("\n"|"\r"|"\r\n")> +| <"//" (~["\n","\r"])* ("\n"|"\r"|"\r\n")> +| <"/*" (~["*"])* "*" (~["/"] (~["*"])* "*")* "/"> +} + + +/** + * HELPER DEFINITIONS, COMMENTS, CONSTANTS, AND WHATNOT + */ + +TOKEN: +{ +| + | +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| + | + | + | + | + | + | + | + | + | + | + | + | + | + +} + +TOKEN: { + + +| +)*"."()+(["e","E"](["+","-"])?()+)?> +| +(||"."|"_")*> +| +<#LETTER: (["a"-"z", "A"-"Z" ]) > +| +<#DIGIT: ["0"-"9"] > +| + +| + +} + + +SimpleNode Start() : {/*@bgen(jjtree) Start */ + DynamicSerDeStart jjtn000 = new DynamicSerDeStart(JJTSTART); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Start */ + try { +/*@egen*/ + HeaderList() ([CommaOrSemicolon()] Definition())+/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode HeaderList() : {/*@bgen(jjtree) HeaderList */ + DynamicSerDeHeaderList jjtn000 = new DynamicSerDeHeaderList(JJTHEADERLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) HeaderList */ + try { +/*@egen*/ + (Header())*/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ + +} + +SimpleNode Header() : {/*@bgen(jjtree) Header */ + DynamicSerDeHeader jjtn000 = new DynamicSerDeHeader(JJTHEADER); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Header */ + try { +/*@egen*/ + Include()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Namespace()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode Namespace() : {/*@bgen(jjtree) Namespace */ + DynamicSerDeNamespace jjtn000 = new DynamicSerDeNamespace(JJTNAMESPACE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Namespace */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +}/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +SimpleNode Include() : {/*@bgen(jjtree) Include */ + DynamicSerDeInclude jjtn000 = new DynamicSerDeInclude(JJTINCLUDE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ + String fname; + boolean found = false; +} +{/*@bgen(jjtree) Include */ + try { +/*@egen*/ + + fname=.image/*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + // bugbug somewhat fragile below substring expression + fname = fname.substring(1,fname.length() - 1); + + // try to find the file on the include path + File f = thrift_grammar.findFile(fname, this.include_path); + if(f != null) { + found = true; + try { + FileInputStream fis = new FileInputStream(f); + thrift_grammar t = new thrift_grammar(fis,this.include_path, false); + t.Start(); + fis.close(); + found = true; + // add in what we found to our type and table tables. + this.tables.putAll(t.tables); + this.types.putAll(t.types); + } catch (Exception e) { + System.out.println("File: " + fname + " - Oops."); + System.out.println(e.getMessage()); + e.printStackTrace(); + } + } + if(!found) { + throw new RuntimeException("include file not found: " + fname); + } + return jjtn000; +}/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +SimpleNode Definition() : {/*@bgen(jjtree) Definition */ + DynamicSerDeDefinition jjtn000 = new DynamicSerDeDefinition(JJTDEFINITION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Definition */ + try { +/*@egen*/ + Const()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Service()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeDefinition()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode TypeDefinition() : {/*@bgen(jjtree) TypeDefinition */ + DynamicSerDeTypeDefinition jjtn000 = new DynamicSerDeTypeDefinition(JJTTYPEDEFINITION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeDefinition */ + try { +/*@egen*/ + Typedef()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Enum()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Senum()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Struct()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Xception()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ + +} + +DynamicSerDeTypedef Typedef() : {/*@bgen(jjtree) Typedef */ + DynamicSerDeTypedef jjtn000 = new DynamicSerDeTypedef(JJTTYPEDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Typedef */ + try { +/*@egen*/ + + DefinitionType() + jjtn000.name = .image/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + // store the type for later retrieval + this.types.put(jjtn000.name, jjtn000); + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +// returning void because we ignore this production. +void CommaOrSemicolon() : {/*@bgen(jjtree) CommaOrSemicolon */ + DynamicSerDeCommaOrSemicolon jjtn000 = new DynamicSerDeCommaOrSemicolon(JJTCOMMAORSEMICOLON); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) CommaOrSemicolon */ + try { +/*@egen*/ + "," +| + ";"/*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ +}/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode Enum() : {/*@bgen(jjtree) Enum */ + DynamicSerDeEnum jjtn000 = new DynamicSerDeEnum(JJTENUM); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Enum */ + try { +/*@egen*/ + "{" EnumDefList() "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode EnumDefList() : {/*@bgen(jjtree) EnumDefList */ + DynamicSerDeEnumDefList jjtn000 = new DynamicSerDeEnumDefList(JJTENUMDEFLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) EnumDefList */ + try { +/*@egen*/ + (EnumDef())+/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode EnumDef() : {/*@bgen(jjtree) EnumDef */ + DynamicSerDeEnumDef jjtn000 = new DynamicSerDeEnumDef(JJTENUMDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) EnumDef */ + try { +/*@egen*/ + ["=" ] [CommaOrSemicolon()]/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode Senum() : {/*@bgen(jjtree) Senum */ + DynamicSerDeSenum jjtn000 = new DynamicSerDeSenum(JJTSENUM); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Senum */ + try { +/*@egen*/ + "{" SenumDefList() "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode SenumDefList() : {/*@bgen(jjtree) SenumDefList */ + DynamicSerDeSenumDefList jjtn000 = new DynamicSerDeSenumDefList(JJTSENUMDEFLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) SenumDefList */ + try { +/*@egen*/ + (SenumDef())+/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode SenumDef() : {/*@bgen(jjtree) SenumDef */ + DynamicSerDeSenumDef jjtn000 = new DynamicSerDeSenumDef(JJTSENUMDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) SenumDef */ + try { +/*@egen*/ + [CommaOrSemicolon()]/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +SimpleNode Const() : {/*@bgen(jjtree) Const */ + DynamicSerDeConst jjtn000 = new DynamicSerDeConst(JJTCONST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Const */ + try { +/*@egen*/ + FieldType() "=" ConstValue() [CommaOrSemicolon()]/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode ConstValue() : {/*@bgen(jjtree) ConstValue */ + DynamicSerDeConstValue jjtn000 = new DynamicSerDeConstValue(JJTCONSTVALUE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) ConstValue */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +| /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +| /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +| /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +| ConstList()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +| ConstMap()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode ConstList() : {/*@bgen(jjtree) ConstList */ + DynamicSerDeConstList jjtn000 = new DynamicSerDeConstList(JJTCONSTLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) ConstList */ + try { +/*@egen*/ + "[" ConstListContents() "]"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode ConstListContents() : {/*@bgen(jjtree) ConstListContents */ + DynamicSerDeConstListContents jjtn000 = new DynamicSerDeConstListContents(JJTCONSTLISTCONTENTS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) ConstListContents */ + try { +/*@egen*/ + (ConstValue() [CommaOrSemicolon()])+/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode ConstMap() : {/*@bgen(jjtree) ConstMap */ + DynamicSerDeConstMap jjtn000 = new DynamicSerDeConstMap(JJTCONSTMAP); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) ConstMap */ + try { +/*@egen*/ + "{" ConstMapContents() "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode ConstMapContents() : {/*@bgen(jjtree) ConstMapContents */ + DynamicSerDeConstMapContents jjtn000 = new DynamicSerDeConstMapContents(JJTCONSTMAPCONTENTS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) ConstMapContents */ + try { +/*@egen*/ + (ConstValue() ":" ConstValue() [CommaOrSemicolon()])+/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +|/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeStruct Struct() : {/*@bgen(jjtree) Struct */ + DynamicSerDeStruct jjtn000 = new DynamicSerDeStruct(JJTSTRUCT); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ + +} +{/*@bgen(jjtree) Struct */ + try { +/*@egen*/ + + jjtn000.name = .image + "{" + FieldList() + "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + this.types.put(jjtn000.name,jjtn000); + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +SimpleNode Xception() : {/*@bgen(jjtree) Xception */ + DynamicSerDeXception jjtn000 = new DynamicSerDeXception(JJTXCEPTION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Xception */ + try { +/*@egen*/ + "{" FieldList() "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +SimpleNode Service() : {/*@bgen(jjtree) Service */ + DynamicSerDeService jjtn000 = new DynamicSerDeService(JJTSERVICE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Service */ + try { +/*@egen*/ + + + Extends() + "{" + FlagArgs() + (Function())+ + UnflagArgs() + "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + // at some point, these should be inserted as a "db" + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode FlagArgs() : {/*@bgen(jjtree) FlagArgs */ + DynamicSerDeFlagArgs jjtn000 = new DynamicSerDeFlagArgs(JJTFLAGARGS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) FlagArgs */ + try { +/*@egen*//*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode UnflagArgs() : {/*@bgen(jjtree) UnflagArgs */ + DynamicSerDeUnflagArgs jjtn000 = new DynamicSerDeUnflagArgs(JJTUNFLAGARGS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) UnflagArgs */ + try { +/*@egen*//*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode Extends() : {/*@bgen(jjtree) Extends */ + DynamicSerDeExtends jjtn000 = new DynamicSerDeExtends(JJTEXTENDS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Extends */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +|/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +DynamicSerDeFunction Function() : {/*@bgen(jjtree) Function */ + DynamicSerDeFunction jjtn000 = new DynamicSerDeFunction(JJTFUNCTION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Function */ + try { +/*@egen*/ + // metastore ignores async and type + Async() + FunctionType() + + // the name of the function/table + jjtn000.name = .image + "(" + FieldList() + ")" + Throws() + [CommaOrSemicolon()]/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + + { + this.tables.put(jjtn000.name, jjtn000); + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +void Async() : {/*@bgen(jjtree) Async */ + DynamicSerDeAsync jjtn000 = new DynamicSerDeAsync(JJTASYNC); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Async */ + try { +/*@egen*/ + +|/*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{}/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +void Throws() : {/*@bgen(jjtree) Throws */ + DynamicSerDeThrows jjtn000 = new DynamicSerDeThrows(JJTTHROWS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Throws */ + try { +/*@egen*/ + "(" FieldList() ")" +|/*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{}/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +// nothing special - just use the DynamicSerDeFieldList's children methods to access the fields +DynamicSerDeFieldList FieldList() : {/*@bgen(jjtree) FieldList */ + DynamicSerDeFieldList jjtn000 = new DynamicSerDeFieldList(JJTFIELDLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ + this.field_val = -1; +} +{/*@bgen(jjtree) FieldList */ + try { +/*@egen*/ + (Field())*/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +DynamicSerDeField Field() : {/*@bgen(jjtree) Field */ + DynamicSerDeField jjtn000 = new DynamicSerDeField(JJTFIELD); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ + + String fidnum = ""; + String fid; +} +{/*@bgen(jjtree) Field */ + try { +/*@egen*/ + + // parse the field id which is optional + [fidnum=.image ":"] + + // is this field required or optional? default is optional + FieldRequiredness() + + // field type - obviously not optional + FieldType() + + // the name of the field - not optional + jjtn000.name = .image + + // does it have = some value? + FieldValue() + + // take it or leave it + [CommaOrSemicolon()]/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + + { + if(fidnum.length() > 0) { + int fidInt = Integer.valueOf(fidnum); + jjtn000.fieldid = fidInt; + } else { + jjtn000.fieldid = this.field_val--; + } + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + + +DynamicSerDeFieldRequiredness FieldRequiredness() : {/*@bgen(jjtree) FieldRequiredness */ + DynamicSerDeFieldRequiredness jjtn000 = new DynamicSerDeFieldRequiredness(JJTFIELDREQUIREDNESS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) FieldRequiredness */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + jjtn000.requiredness = DynamicSerDeFieldRequiredness.RequirednessTypes.Required; + return jjtn000; + } +| /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + jjtn000.requiredness = DynamicSerDeFieldRequiredness.RequirednessTypes.Optional; + return jjtn000; + } +| /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + jjtn000.requiredness = DynamicSerDeFieldRequiredness.RequirednessTypes.Skippable; + return jjtn000; + } +|/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode FieldValue() : {/*@bgen(jjtree) FieldValue */ + DynamicSerDeFieldValue jjtn000 = new DynamicSerDeFieldValue(JJTFIELDVALUE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) FieldValue */ + try { +/*@egen*/ + "=" + ConstValue()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +|/*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +}/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode DefinitionType() : {/*@bgen(jjtree) DefinitionType */ + DynamicSerDeDefinitionType jjtn000 = new DynamicSerDeDefinitionType(JJTDEFINITIONTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) DefinitionType */ + try { +/*@egen*/ +// BaseType() xxx + TypeString()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeBool()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei16()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei32()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei64()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeDouble()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeMap()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeSet()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeList()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +void FunctionType() : {/*@bgen(jjtree) FunctionType */ + DynamicSerDeFunctionType jjtn000 = new DynamicSerDeFunctionType(JJTFUNCTIONTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) FunctionType */ + try { +/*@egen*/ + FieldType() +| /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{}/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeFieldType FieldType() : {/*@bgen(jjtree) FieldType */ + DynamicSerDeFieldType jjtn000 = new DynamicSerDeFieldType(JJTFIELDTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ +} + +{/*@bgen(jjtree) FieldType */ + try { +/*@egen*/ + TypeString()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeBool()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei16()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei32()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei64()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeDouble()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| + TypeMap()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| + TypeSet()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| + TypeList()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| + jjtn000.name = .image/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + + if (this.types.get(jjtn000.name) == null) { + System.err.println("ERROR: DDL specifying type " + jjtn000.name + " which has not been defined"); + throw new RuntimeException("specifying type " + jjtn000.name + " which has not been defined"); + } + // lookup the specified type and set this nodes type to it. Precludes forward and self references for now. + jjtn000.jjtAddChild(this.types.get(jjtn000.name),0); + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeString TypeString() : {/*@bgen(jjtree) TypeString */ + DynamicSerDeTypeString jjtn000 = new DynamicSerDeTypeString(JJTTYPESTRING); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeString */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeByte TypeByte() : {/*@bgen(jjtree) TypeByte */ + DynamicSerDeTypeByte jjtn000 = new DynamicSerDeTypeByte(JJTTYPEBYTE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ +} +{/*@bgen(jjtree) TypeByte */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypei16 Typei16() : {/*@bgen(jjtree) Typei16 */ + DynamicSerDeTypei16 jjtn000 = new DynamicSerDeTypei16(JJTTYPEI16); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ +} +{/*@bgen(jjtree) Typei16 */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypei32 Typei32() : {/*@bgen(jjtree) Typei32 */ + DynamicSerDeTypei32 jjtn000 = new DynamicSerDeTypei32(JJTTYPEI32); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Typei32 */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypei64 Typei64() : {/*@bgen(jjtree) Typei64 */ + DynamicSerDeTypei64 jjtn000 = new DynamicSerDeTypei64(JJTTYPEI64); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Typei64 */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeDouble TypeDouble() : {/*@bgen(jjtree) TypeDouble */ + DynamicSerDeTypeDouble jjtn000 = new DynamicSerDeTypeDouble(JJTTYPEDOUBLE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeDouble */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeBool TypeBool() : {/*@bgen(jjtree) TypeBool */ + DynamicSerDeTypeBool jjtn000 = new DynamicSerDeTypeBool(JJTTYPEBOOL); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeBool */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeMap TypeMap() : {/*@bgen(jjtree) TypeMap */ + DynamicSerDeTypeMap jjtn000 = new DynamicSerDeTypeMap(JJTTYPEMAP); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeMap */ + try { +/*@egen*/ + + "<" + FieldType() + "," + FieldType() + ">"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeSet TypeSet() : {/*@bgen(jjtree) TypeSet */ + DynamicSerDeTypeSet jjtn000 = new DynamicSerDeTypeSet(JJTTYPESET); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeSet */ + try { +/*@egen*/ + + "<" + + FieldType() + + ">"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeList TypeList() : {/*@bgen(jjtree) TypeList */ + DynamicSerDeTypeList jjtn000 = new DynamicSerDeTypeList(JJTTYPELIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeList */ + try { +/*@egen*/ + + "<" + + FieldType() + + ">"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMapContents.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMapContents.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMapContents.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstMapContents.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeConstMapContents extends SimpleNode { + public DynamicSerDeConstMapContents(int id) { + super(id); + } + + public DynamicSerDeConstMapContents(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDefList.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDefList.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDefList.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenumDefList.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeSenumDefList extends SimpleNode { + public DynamicSerDeSenumDefList(int id) { + super(id); + } + + public DynamicSerDeSenumDefList(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeCommaOrSemicolon.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeCommaOrSemicolon.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeCommaOrSemicolon.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeCommaOrSemicolon.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeCommaOrSemicolon extends SimpleNode { + public DynamicSerDeCommaOrSemicolon(int id) { + super(id); + } + + public DynamicSerDeCommaOrSemicolon(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java (revision 0) @@ -0,0 +1,133 @@ +/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 3.0 */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class TokenMgrError extends Error +{ + /* + * Ordinals for various reasons why an Error of this type can be thrown. + */ + + /** + * Lexical error occured. + */ + static final int LEXICAL_ERROR = 0; + + /** + * An attempt wass made to create a second instance of a static token manager. + */ + static final int STATIC_LEXER_ERROR = 1; + + /** + * Tried to change to an invalid lexical state. + */ + static final int INVALID_LEXICAL_STATE = 2; + + /** + * Detected (and bailed out of) an infinite loop in the token manager. + */ + static final int LOOP_DETECTED = 3; + + /** + * Indicates the reason why the exception is thrown. It will have + * one of the above 4 values. + */ + int errorCode; + + /** + * Replaces unprintable characters by their espaced (or unicode escaped) + * equivalents in the given string + */ + protected static final String addEscapes(String str) { + StringBuffer retval = new StringBuffer(); + char ch; + for (int i = 0; i < str.length(); i++) { + switch (str.charAt(i)) + { + case 0 : + continue; + case '\b': + retval.append("\\b"); + continue; + case '\t': + retval.append("\\t"); + continue; + case '\n': + retval.append("\\n"); + continue; + case '\f': + retval.append("\\f"); + continue; + case '\r': + retval.append("\\r"); + continue; + case '\"': + retval.append("\\\""); + continue; + case '\'': + retval.append("\\\'"); + continue; + case '\\': + retval.append("\\\\"); + continue; + default: + if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { + String s = "0000" + Integer.toString(ch, 16); + retval.append("\\u" + s.substring(s.length() - 4, s.length())); + } else { + retval.append(ch); + } + continue; + } + } + return retval.toString(); + } + + /** + * Returns a detailed message for the Error when it is thrown by the + * token manager to indicate a lexical error. + * Parameters : + * EOFSeen : indicates if EOF caused the lexicl error + * curLexState : lexical state in which this error occured + * errorLine : line number when the error occured + * errorColumn : column number when the error occured + * errorAfter : prefix that was seen before this error occured + * curchar : the offending character + * Note: You can customize the lexical error message by modifying this method. + */ + protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) { + return("Lexical error at line " + + errorLine + ", column " + + errorColumn + ". Encountered: " + + (EOFSeen ? " " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") + + "after : \"" + addEscapes(errorAfter) + "\""); + } + + /** + * You can also modify the body of this method to customize your error messages. + * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not + * of end-users concern, so you can return something like : + * + * "Internal Error : Please file a bug report .... " + * + * from this method for such cases in the release version of your parser. + */ + public String getMessage() { + return super.getMessage(); + } + + /* + * Constructors of various flavors follow. + */ + + public TokenMgrError() { + } + + public TokenMgrError(String message, int reason) { + super(message); + errorCode = reason; + } + + public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) { + this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason); + } +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeExtends.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeExtends.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeExtends.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeExtends.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeExtends extends SimpleNode { + public DynamicSerDeExtends(int id) { + super(id); + } + + public DynamicSerDeExtends(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java (revision 0) @@ -0,0 +1,439 @@ +/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 4.0 */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +/** + * An implementation of interface CharStream, where the stream is assumed to + * contain only ASCII characters (without unicode processing). + */ + +public class SimpleCharStream +{ + public static final boolean staticFlag = false; + int bufsize; + int available; + int tokenBegin; + public int bufpos = -1; + protected int bufline[]; + protected int bufcolumn[]; + + protected int column = 0; + protected int line = 1; + + protected boolean prevCharIsCR = false; + protected boolean prevCharIsLF = false; + + protected java.io.Reader inputStream; + + protected char[] buffer; + protected int maxNextCharInd = 0; + protected int inBuf = 0; + protected int tabSize = 8; + + protected void setTabSize(int i) { tabSize = i; } + protected int getTabSize(int i) { return tabSize; } + + + protected void ExpandBuff(boolean wrapAround) + { + char[] newbuffer = new char[bufsize + 2048]; + int newbufline[] = new int[bufsize + 2048]; + int newbufcolumn[] = new int[bufsize + 2048]; + + try + { + if (wrapAround) + { + System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); + System.arraycopy(buffer, 0, newbuffer, + bufsize - tokenBegin, bufpos); + buffer = newbuffer; + + System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); + System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos); + bufline = newbufline; + + System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); + System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos); + bufcolumn = newbufcolumn; + + maxNextCharInd = (bufpos += (bufsize - tokenBegin)); + } + else + { + System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); + buffer = newbuffer; + + System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); + bufline = newbufline; + + System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); + bufcolumn = newbufcolumn; + + maxNextCharInd = (bufpos -= tokenBegin); + } + } + catch (Throwable t) + { + throw new Error(t.getMessage()); + } + + + bufsize += 2048; + available = bufsize; + tokenBegin = 0; + } + + protected void FillBuff() throws java.io.IOException + { + if (maxNextCharInd == available) + { + if (available == bufsize) + { + if (tokenBegin > 2048) + { + bufpos = maxNextCharInd = 0; + available = tokenBegin; + } + else if (tokenBegin < 0) + bufpos = maxNextCharInd = 0; + else + ExpandBuff(false); + } + else if (available > tokenBegin) + available = bufsize; + else if ((tokenBegin - available) < 2048) + ExpandBuff(true); + else + available = tokenBegin; + } + + int i; + try { + if ((i = inputStream.read(buffer, maxNextCharInd, + available - maxNextCharInd)) == -1) + { + inputStream.close(); + throw new java.io.IOException(); + } + else + maxNextCharInd += i; + return; + } + catch(java.io.IOException e) { + --bufpos; + backup(0); + if (tokenBegin == -1) + tokenBegin = bufpos; + throw e; + } + } + + public char BeginToken() throws java.io.IOException + { + tokenBegin = -1; + char c = readChar(); + tokenBegin = bufpos; + + return c; + } + + protected void UpdateLineColumn(char c) + { + column++; + + if (prevCharIsLF) + { + prevCharIsLF = false; + line += (column = 1); + } + else if (prevCharIsCR) + { + prevCharIsCR = false; + if (c == '\n') + { + prevCharIsLF = true; + } + else + line += (column = 1); + } + + switch (c) + { + case '\r' : + prevCharIsCR = true; + break; + case '\n' : + prevCharIsLF = true; + break; + case '\t' : + column--; + column += (tabSize - (column % tabSize)); + break; + default : + break; + } + + bufline[bufpos] = line; + bufcolumn[bufpos] = column; + } + + public char readChar() throws java.io.IOException + { + if (inBuf > 0) + { + --inBuf; + + if (++bufpos == bufsize) + bufpos = 0; + + return buffer[bufpos]; + } + + if (++bufpos >= maxNextCharInd) + FillBuff(); + + char c = buffer[bufpos]; + + UpdateLineColumn(c); + return (c); + } + + /** + * @deprecated + * @see #getEndColumn + */ + + public int getColumn() { + return bufcolumn[bufpos]; + } + + /** + * @deprecated + * @see #getEndLine + */ + + public int getLine() { + return bufline[bufpos]; + } + + public int getEndColumn() { + return bufcolumn[bufpos]; + } + + public int getEndLine() { + return bufline[bufpos]; + } + + public int getBeginColumn() { + return bufcolumn[tokenBegin]; + } + + public int getBeginLine() { + return bufline[tokenBegin]; + } + + public void backup(int amount) { + + inBuf += amount; + if ((bufpos -= amount) < 0) + bufpos += bufsize; + } + + public SimpleCharStream(java.io.Reader dstream, int startline, + int startcolumn, int buffersize) + { + inputStream = dstream; + line = startline; + column = startcolumn - 1; + + available = bufsize = buffersize; + buffer = new char[buffersize]; + bufline = new int[buffersize]; + bufcolumn = new int[buffersize]; + } + + public SimpleCharStream(java.io.Reader dstream, int startline, + int startcolumn) + { + this(dstream, startline, startcolumn, 4096); + } + + public SimpleCharStream(java.io.Reader dstream) + { + this(dstream, 1, 1, 4096); + } + public void ReInit(java.io.Reader dstream, int startline, + int startcolumn, int buffersize) + { + inputStream = dstream; + line = startline; + column = startcolumn - 1; + + if (buffer == null || buffersize != buffer.length) + { + available = bufsize = buffersize; + buffer = new char[buffersize]; + bufline = new int[buffersize]; + bufcolumn = new int[buffersize]; + } + prevCharIsLF = prevCharIsCR = false; + tokenBegin = inBuf = maxNextCharInd = 0; + bufpos = -1; + } + + public void ReInit(java.io.Reader dstream, int startline, + int startcolumn) + { + ReInit(dstream, startline, startcolumn, 4096); + } + + public void ReInit(java.io.Reader dstream) + { + ReInit(dstream, 1, 1, 4096); + } + public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, + int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException + { + this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); + } + + public SimpleCharStream(java.io.InputStream dstream, int startline, + int startcolumn, int buffersize) + { + this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); + } + + public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, + int startcolumn) throws java.io.UnsupportedEncodingException + { + this(dstream, encoding, startline, startcolumn, 4096); + } + + public SimpleCharStream(java.io.InputStream dstream, int startline, + int startcolumn) + { + this(dstream, startline, startcolumn, 4096); + } + + public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException + { + this(dstream, encoding, 1, 1, 4096); + } + + public SimpleCharStream(java.io.InputStream dstream) + { + this(dstream, 1, 1, 4096); + } + + public void ReInit(java.io.InputStream dstream, String encoding, int startline, + int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException + { + ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); + } + + public void ReInit(java.io.InputStream dstream, int startline, + int startcolumn, int buffersize) + { + ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); + } + + public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException + { + ReInit(dstream, encoding, 1, 1, 4096); + } + + public void ReInit(java.io.InputStream dstream) + { + ReInit(dstream, 1, 1, 4096); + } + public void ReInit(java.io.InputStream dstream, String encoding, int startline, + int startcolumn) throws java.io.UnsupportedEncodingException + { + ReInit(dstream, encoding, startline, startcolumn, 4096); + } + public void ReInit(java.io.InputStream dstream, int startline, + int startcolumn) + { + ReInit(dstream, startline, startcolumn, 4096); + } + public String GetImage() + { + if (bufpos >= tokenBegin) + return new String(buffer, tokenBegin, bufpos - tokenBegin + 1); + else + return new String(buffer, tokenBegin, bufsize - tokenBegin) + + new String(buffer, 0, bufpos + 1); + } + + public char[] GetSuffix(int len) + { + char[] ret = new char[len]; + + if ((bufpos + 1) >= len) + System.arraycopy(buffer, bufpos - len + 1, ret, 0, len); + else + { + System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0, + len - bufpos - 1); + System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1); + } + + return ret; + } + + public void Done() + { + buffer = null; + bufline = null; + bufcolumn = null; + } + + /** + * Method to adjust line and column numbers for the start of a token. + */ + public void adjustBeginLineColumn(int newLine, int newCol) + { + int start = tokenBegin; + int len; + + if (bufpos >= tokenBegin) + { + len = bufpos - tokenBegin + inBuf + 1; + } + else + { + len = bufsize - tokenBegin + bufpos + 1 + inBuf; + } + + int i = 0, j = 0, k = 0; + int nextColDiff = 0, columnDiff = 0; + + while (i < len && + bufline[j = start % bufsize] == bufline[k = ++start % bufsize]) + { + bufline[j] = newLine; + nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j]; + bufcolumn[j] = newCol + columnDiff; + columnDiff = nextColDiff; + i++; + } + + if (i < len) + { + bufline[j] = newLine++; + bufcolumn[j] = newCol + columnDiff; + + while (i++ < len) + { + if (bufline[j = start % bufsize] != bufline[++start % bufsize]) + bufline[j] = newLine++; + else + bufline[j] = newLine; + } + } + + line = bufline[j]; + column = bufcolumn[j]; + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBase.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBase.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBase.java (working copy) @@ -36,6 +36,11 @@ super(p,i); } + public void initialize() { + // for base type, do nothing. Other types, like structs may initialize internal data + // structures. + } + public Class getRealType() throws SerDeException { throw new SerDeException("Not implemented in base"); } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeAsync.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeAsync.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeAsync.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeAsync.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeAsync extends SimpleNode { + public DynamicSerDeAsync(int id) { + super(id); + } + + public DynamicSerDeAsync(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeList.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeList.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeList.java (working copy) @@ -59,6 +59,10 @@ @Override public ArrayList deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { TList thelist = iprot.readListBegin(); + if (thelist == null) { + return null; + } + ArrayList deserializeReuse; if (reuse != null) { deserializeReuse = (ArrayList)reuse; @@ -89,17 +93,32 @@ ObjectInspector elementObjectInspector = loi.getListElementObjectInspector(); DynamicSerDeTypeBase mt = this.getElementType(); + org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol nullProtocol = + (oprot instanceof org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol) + ? (org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol)oprot + : null; + if (o instanceof List) { List list = (List)o; oprot.writeListBegin(new TList(mt.getType(),list.size())); for (Object element: list) { - mt.serialize(element, elementObjectInspector, oprot); + if (element == null) { + assert(nullProtocol != null); + nullProtocol.writeNull(); + } else { + mt.serialize(element, elementObjectInspector, oprot); + } } } else { Object[] list = (Object[])o; oprot.writeListBegin(new TList(mt.getType(),list.length)); for (Object element: list) { - mt.serialize(element, elementObjectInspector, oprot); + if (element == null && nullProtocol != null) { + assert(nullProtocol != null); + nullProtocol.writeNull(); + } else { + mt.serialize(element, elementObjectInspector, oprot); + } } } // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java (revision 0) @@ -0,0 +1,2317 @@ +/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammar.java */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +import java.util.*; +import java.io.*; +import java.net.*; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.transport.*; +import org.apache.hadoop.hive.serde2.dynamic_type.*; + +public class thrift_grammar/*@bgen(jjtree)*/implements thrift_grammarTreeConstants, thrift_grammarConstants {/*@bgen(jjtree)*/ + protected JJTthrift_grammarState jjtree = new JJTthrift_grammarState(); + private List include_path = null; + + // for computing the autogenerated field ids in thrift + private int field_val; + + // store types and tables + // separately because one cannot use a table (ie service.method) as a Struct like type. + protected Map types; + protected Map tables; + + // system include path + final private static String default_include_path[] = { "/usr/local/include","/usr/include","/usr/local/include/thrift/if","/usr/local/include/fb303/if" }; + + // need three params to differentiate between this and 2 param method auto generated since + // some calls in the autogenerated code use null param for 2nd param and thus ambiguous. + protected thrift_grammar(InputStream is, List include_path, boolean junk) { + this(is,null); + this.types = new HashMap () ; + this.tables = new HashMap () ; + this.include_path = include_path; + this.field_val = -1; + } + + // find the file on the include path + private static File findFile(String fname, List include_path) { + for(String path: include_path) { + final String full = path + "/" + fname; + File f = new File(full); + if(f.exists()) { + return f; + } + } + return null; + } + + public static void main(String args[]) { + String filename = null; + List include_path = new ArrayList(); + + for(String path: default_include_path) { + include_path.add(path); + } + for(int i = 0; i < args.length; i++) { + String arg = args[i]; + if(arg.equals("--include") && i + 1 < args.length) { + include_path.add(args[++i]); + } + if(arg.equals("--file") && i + 1 < args.length) { + filename = args[++i]; + } + } + + InputStream is = System.in; + if(filename != null) { + try { + is = new FileInputStream(findFile(filename, include_path)); + } catch(IOException e) { + } + } + thrift_grammar t = new thrift_grammar(is,include_path,false); + + try { + t.Start(); + } catch (Exception e) { + System.out.println("Parse error."); + System.out.println(e.getMessage()); + e.printStackTrace(); + } + } + + final public SimpleNode Start() throws ParseException { + /*@bgen(jjtree) Start */ + DynamicSerDeStart jjtn000 = new DynamicSerDeStart(JJTSTART); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + HeaderList(); + label_1: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 59: + case 60: + CommaOrSemicolon(); + break; + default: + jj_la1[0] = jj_gen; + ; + } + Definition(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_const: + case tok_senum: + case tok_typedef: + case tok_struct: + case tok_exception: + case tok_service: + case tok_enum: + case 59: + case 60: + ; + break; + default: + jj_la1[1] = jj_gen; + break label_1; + } + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode HeaderList() throws ParseException { + /*@bgen(jjtree) HeaderList */ + DynamicSerDeHeaderList jjtn000 = new DynamicSerDeHeaderList(JJTHEADERLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + label_2: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_namespace: + case tok_cpp_namespace: + case tok_cpp_include: + case tok_java_package: + case tok_cocoa_prefix: + case tok_csharp_namespace: + case tok_php_namespace: + case tok_py_module: + case tok_perl_package: + case tok_ruby_namespace: + case tok_smalltalk_category: + case tok_smalltalk_prefix: + case tok_xsd_namespace: + case tok_include: + ; + break; + default: + jj_la1[2] = jj_gen; + break label_2; + } + Header(); + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Header() throws ParseException { + /*@bgen(jjtree) Header */ + DynamicSerDeHeader jjtn000 = new DynamicSerDeHeader(JJTHEADER); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_include: + Include(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_namespace: + case tok_cpp_namespace: + case tok_cpp_include: + case tok_java_package: + case tok_cocoa_prefix: + case tok_csharp_namespace: + case tok_php_namespace: + case tok_py_module: + case tok_perl_package: + case tok_ruby_namespace: + case tok_smalltalk_category: + case tok_smalltalk_prefix: + case tok_xsd_namespace: + Namespace(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[3] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Namespace() throws ParseException { + /*@bgen(jjtree) Namespace */ + DynamicSerDeNamespace jjtn000 = new DynamicSerDeNamespace(JJTNAMESPACE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_namespace: + jj_consume_token(tok_namespace); + jj_consume_token(IDENTIFIER); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_cpp_namespace: + jj_consume_token(tok_cpp_namespace); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_cpp_include: + jj_consume_token(tok_cpp_include); + jj_consume_token(tok_literal); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_php_namespace: + jj_consume_token(tok_php_namespace); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_py_module: + jj_consume_token(tok_py_module); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_perl_package: + jj_consume_token(tok_perl_package); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_ruby_namespace: + jj_consume_token(tok_ruby_namespace); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_smalltalk_category: + jj_consume_token(tok_smalltalk_category); + jj_consume_token(tok_st_identifier); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_smalltalk_prefix: + jj_consume_token(tok_smalltalk_prefix); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_java_package: + jj_consume_token(tok_java_package); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_cocoa_prefix: + jj_consume_token(tok_cocoa_prefix); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_xsd_namespace: + jj_consume_token(tok_xsd_namespace); + jj_consume_token(tok_literal); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_csharp_namespace: + jj_consume_token(tok_csharp_namespace); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[4] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Include() throws ParseException { + /*@bgen(jjtree) Include */ + DynamicSerDeInclude jjtn000 = new DynamicSerDeInclude(JJTINCLUDE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000);String fname; + boolean found = false; + try { + jj_consume_token(tok_include); + fname = jj_consume_token(tok_literal).image; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + // bugbug somewhat fragile below substring expression + fname = fname.substring(1,fname.length() - 1); + + // try to find the file on the include path + File f = thrift_grammar.findFile(fname, this.include_path); + if(f != null) { + found = true; + try { + FileInputStream fis = new FileInputStream(f); + thrift_grammar t = new thrift_grammar(fis,this.include_path, false); + t.Start(); + fis.close(); + found = true; + // add in what we found to our type and table tables. + this.tables.putAll(t.tables); + this.types.putAll(t.types); + } catch (Exception e) { + System.out.println("File: " + fname + " - Oops."); + System.out.println(e.getMessage()); + e.printStackTrace(); + } + } + if(!found) { + {if (true) throw new RuntimeException("include file not found: " + fname);} + } + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Definition() throws ParseException { + /*@bgen(jjtree) Definition */ + DynamicSerDeDefinition jjtn000 = new DynamicSerDeDefinition(JJTDEFINITION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_const: + Const(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_service: + Service(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_senum: + case tok_typedef: + case tok_struct: + case tok_exception: + case tok_enum: + TypeDefinition(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[5] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode TypeDefinition() throws ParseException { + /*@bgen(jjtree) TypeDefinition */ + DynamicSerDeTypeDefinition jjtn000 = new DynamicSerDeTypeDefinition(JJTTYPEDEFINITION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_typedef: + Typedef(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_enum: + Enum(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_senum: + Senum(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_struct: + Struct(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_exception: + Xception(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[6] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypedef Typedef() throws ParseException { + /*@bgen(jjtree) Typedef */ + DynamicSerDeTypedef jjtn000 = new DynamicSerDeTypedef(JJTTYPEDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_typedef); + DefinitionType(); + jjtn000.name = jj_consume_token(IDENTIFIER).image; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + // store the type for later retrieval + this.types.put(jjtn000.name, jjtn000); + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + +// returning void because we ignore this production. + final public void CommaOrSemicolon() throws ParseException { + /*@bgen(jjtree) CommaOrSemicolon */ + DynamicSerDeCommaOrSemicolon jjtn000 = new DynamicSerDeCommaOrSemicolon(JJTCOMMAORSEMICOLON); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 59: + jj_consume_token(59); + break; + case 60: + jj_consume_token(60); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + default: + jj_la1[7] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + } + + final public SimpleNode Enum() throws ParseException { + /*@bgen(jjtree) Enum */ + DynamicSerDeEnum jjtn000 = new DynamicSerDeEnum(JJTENUM); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_enum); + jj_consume_token(IDENTIFIER); + jj_consume_token(61); + EnumDefList(); + jj_consume_token(62); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode EnumDefList() throws ParseException { + /*@bgen(jjtree) EnumDefList */ + DynamicSerDeEnumDefList jjtn000 = new DynamicSerDeEnumDefList(JJTENUMDEFLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + label_3: + while (true) { + EnumDef(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case IDENTIFIER: + ; + break; + default: + jj_la1[8] = jj_gen; + break label_3; + } + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode EnumDef() throws ParseException { + /*@bgen(jjtree) EnumDef */ + DynamicSerDeEnumDef jjtn000 = new DynamicSerDeEnumDef(JJTENUMDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(IDENTIFIER); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 63: + jj_consume_token(63); + jj_consume_token(tok_int_constant); + break; + default: + jj_la1[9] = jj_gen; + ; + } + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 59: + case 60: + CommaOrSemicolon(); + break; + default: + jj_la1[10] = jj_gen; + ; + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Senum() throws ParseException { + /*@bgen(jjtree) Senum */ + DynamicSerDeSenum jjtn000 = new DynamicSerDeSenum(JJTSENUM); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_senum); + jj_consume_token(IDENTIFIER); + jj_consume_token(61); + SenumDefList(); + jj_consume_token(62); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode SenumDefList() throws ParseException { + /*@bgen(jjtree) SenumDefList */ + DynamicSerDeSenumDefList jjtn000 = new DynamicSerDeSenumDefList(JJTSENUMDEFLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + label_4: + while (true) { + SenumDef(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_literal: + ; + break; + default: + jj_la1[11] = jj_gen; + break label_4; + } + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode SenumDef() throws ParseException { + /*@bgen(jjtree) SenumDef */ + DynamicSerDeSenumDef jjtn000 = new DynamicSerDeSenumDef(JJTSENUMDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_literal); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 59: + case 60: + CommaOrSemicolon(); + break; + default: + jj_la1[12] = jj_gen; + ; + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Const() throws ParseException { + /*@bgen(jjtree) Const */ + DynamicSerDeConst jjtn000 = new DynamicSerDeConst(JJTCONST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_const); + FieldType(); + jj_consume_token(IDENTIFIER); + jj_consume_token(63); + ConstValue(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 59: + case 60: + CommaOrSemicolon(); + break; + default: + jj_la1[13] = jj_gen; + ; + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode ConstValue() throws ParseException { + /*@bgen(jjtree) ConstValue */ + DynamicSerDeConstValue jjtn000 = new DynamicSerDeConstValue(JJTCONSTVALUE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_int_constant: + jj_consume_token(tok_int_constant); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + case tok_double_constant: + jj_consume_token(tok_double_constant); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + case tok_literal: + jj_consume_token(tok_literal); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + case IDENTIFIER: + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + case 64: + ConstList(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + case 61: + ConstMap(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[14] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode ConstList() throws ParseException { + /*@bgen(jjtree) ConstList */ + DynamicSerDeConstList jjtn000 = new DynamicSerDeConstList(JJTCONSTLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(64); + ConstListContents(); + jj_consume_token(65); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode ConstListContents() throws ParseException { + /*@bgen(jjtree) ConstListContents */ + DynamicSerDeConstListContents jjtn000 = new DynamicSerDeConstListContents(JJTCONSTLISTCONTENTS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + label_5: + while (true) { + ConstValue(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 59: + case 60: + CommaOrSemicolon(); + break; + default: + jj_la1[15] = jj_gen; + ; + } + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_int_constant: + case tok_double_constant: + case IDENTIFIER: + case tok_literal: + case 61: + case 64: + ; + break; + default: + jj_la1[16] = jj_gen; + break label_5; + } + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode ConstMap() throws ParseException { + /*@bgen(jjtree) ConstMap */ + DynamicSerDeConstMap jjtn000 = new DynamicSerDeConstMap(JJTCONSTMAP); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(61); + ConstMapContents(); + jj_consume_token(62); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode ConstMapContents() throws ParseException { + /*@bgen(jjtree) ConstMapContents */ + DynamicSerDeConstMapContents jjtn000 = new DynamicSerDeConstMapContents(JJTCONSTMAPCONTENTS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_int_constant: + case tok_double_constant: + case IDENTIFIER: + case tok_literal: + case 61: + case 64: + label_6: + while (true) { + ConstValue(); + jj_consume_token(66); + ConstValue(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 59: + case 60: + CommaOrSemicolon(); + break; + default: + jj_la1[17] = jj_gen; + ; + } + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_int_constant: + case tok_double_constant: + case IDENTIFIER: + case tok_literal: + case 61: + case 64: + ; + break; + default: + jj_la1[18] = jj_gen; + break label_6; + } + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + default: + jj_la1[19] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeStruct Struct() throws ParseException { + /*@bgen(jjtree) Struct */ + DynamicSerDeStruct jjtn000 = new DynamicSerDeStruct(JJTSTRUCT); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_struct); + jjtn000.name = jj_consume_token(IDENTIFIER).image; + jj_consume_token(61); + FieldList(); + jj_consume_token(62); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + this.types.put(jjtn000.name,jjtn000); + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Xception() throws ParseException { + /*@bgen(jjtree) Xception */ + DynamicSerDeXception jjtn000 = new DynamicSerDeXception(JJTXCEPTION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_exception); + jj_consume_token(IDENTIFIER); + jj_consume_token(61); + FieldList(); + jj_consume_token(62); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Service() throws ParseException { + /*@bgen(jjtree) Service */ + DynamicSerDeService jjtn000 = new DynamicSerDeService(JJTSERVICE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_service); + jj_consume_token(IDENTIFIER); + Extends(); + jj_consume_token(61); + FlagArgs(); + label_7: + while (true) { + Function(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_void: + case tok_bool: + case tok_i16: + case tok_i32: + case tok_i64: + case tok_double: + case tok_string: + case tok_map: + case tok_list: + case tok_set: + case tok_async: + case IDENTIFIER: + ; + break; + default: + jj_la1[20] = jj_gen; + break label_7; + } + } + UnflagArgs(); + jj_consume_token(62); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + // at some point, these should be inserted as a "db" + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode FlagArgs() throws ParseException { + /*@bgen(jjtree) FlagArgs */ + DynamicSerDeFlagArgs jjtn000 = new DynamicSerDeFlagArgs(JJTFLAGARGS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode UnflagArgs() throws ParseException { + /*@bgen(jjtree) UnflagArgs */ + DynamicSerDeUnflagArgs jjtn000 = new DynamicSerDeUnflagArgs(JJTUNFLAGARGS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Extends() throws ParseException { + /*@bgen(jjtree) Extends */ + DynamicSerDeExtends jjtn000 = new DynamicSerDeExtends(JJTEXTENDS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_extends: + jj_consume_token(tok_extends); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[21] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeFunction Function() throws ParseException { + /*@bgen(jjtree) Function */ + DynamicSerDeFunction jjtn000 = new DynamicSerDeFunction(JJTFUNCTION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + Async(); + FunctionType(); + // the name of the function/table + jjtn000.name = jj_consume_token(IDENTIFIER).image; + jj_consume_token(67); + FieldList(); + jj_consume_token(68); + Throws(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 59: + case 60: + CommaOrSemicolon(); + break; + default: + jj_la1[22] = jj_gen; + ; + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + this.tables.put(jjtn000.name, jjtn000); + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public void Async() throws ParseException { + /*@bgen(jjtree) Async */ + DynamicSerDeAsync jjtn000 = new DynamicSerDeAsync(JJTASYNC); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_async: + jj_consume_token(tok_async); + break; + default: + jj_la1[23] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + } + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + } + + final public void Throws() throws ParseException { + /*@bgen(jjtree) Throws */ + DynamicSerDeThrows jjtn000 = new DynamicSerDeThrows(JJTTHROWS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_throws: + jj_consume_token(tok_throws); + jj_consume_token(67); + FieldList(); + jj_consume_token(68); + break; + default: + jj_la1[24] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + } + +// nothing special - just use the DynamicSerDeFieldList's children methods to access the fields + final public DynamicSerDeFieldList FieldList() throws ParseException { + /*@bgen(jjtree) FieldList */ + DynamicSerDeFieldList jjtn000 = new DynamicSerDeFieldList(JJTFIELDLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000);this.field_val = -1; + try { + label_8: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_bool: + case tok_i16: + case tok_i32: + case tok_i64: + case tok_double: + case tok_string: + case tok_map: + case tok_list: + case tok_set: + case tok_required: + case tok_optional: + case tok_skip: + case tok_int_constant: + case IDENTIFIER: + ; + break; + default: + jj_la1[25] = jj_gen; + break label_8; + } + Field(); + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeField Field() throws ParseException { + /*@bgen(jjtree) Field */ + DynamicSerDeField jjtn000 = new DynamicSerDeField(JJTFIELD); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000);String fidnum = ""; + String fid; + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_int_constant: + fidnum = jj_consume_token(tok_int_constant).image; + jj_consume_token(66); + break; + default: + jj_la1[26] = jj_gen; + ; + } + FieldRequiredness(); + FieldType(); + // the name of the field - not optional + jjtn000.name = jj_consume_token(IDENTIFIER).image; + FieldValue(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 59: + case 60: + CommaOrSemicolon(); + break; + default: + jj_la1[27] = jj_gen; + ; + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + if(fidnum.length() > 0) { + int fidInt = Integer.valueOf(fidnum); + jjtn000.fieldid = fidInt; + } else { + jjtn000.fieldid = this.field_val--; + } + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeFieldRequiredness FieldRequiredness() throws ParseException { + /*@bgen(jjtree) FieldRequiredness */ + DynamicSerDeFieldRequiredness jjtn000 = new DynamicSerDeFieldRequiredness(JJTFIELDREQUIREDNESS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_required: + jj_consume_token(tok_required); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + jjtn000.requiredness = DynamicSerDeFieldRequiredness.RequirednessTypes.Required; + {if (true) return jjtn000;} + break; + case tok_optional: + jj_consume_token(tok_optional); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + jjtn000.requiredness = DynamicSerDeFieldRequiredness.RequirednessTypes.Optional; + {if (true) return jjtn000;} + break; + case tok_skip: + jj_consume_token(tok_skip); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + jjtn000.requiredness = DynamicSerDeFieldRequiredness.RequirednessTypes.Skippable; + {if (true) return jjtn000;} + break; + default: + jj_la1[28] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode FieldValue() throws ParseException { + /*@bgen(jjtree) FieldValue */ + DynamicSerDeFieldValue jjtn000 = new DynamicSerDeFieldValue(JJTFIELDVALUE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 63: + jj_consume_token(63); + ConstValue(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[29] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode DefinitionType() throws ParseException { + /*@bgen(jjtree) DefinitionType */ + DynamicSerDeDefinitionType jjtn000 = new DynamicSerDeDefinitionType(JJTDEFINITIONTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_string: + TypeString(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_bool: + TypeBool(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i16: + Typei16(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i32: + Typei32(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i64: + Typei64(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_double: + TypeDouble(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_map: + TypeMap(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_set: + TypeSet(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_list: + TypeList(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[30] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public void FunctionType() throws ParseException { + /*@bgen(jjtree) FunctionType */ + DynamicSerDeFunctionType jjtn000 = new DynamicSerDeFunctionType(JJTFUNCTIONTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_bool: + case tok_i16: + case tok_i32: + case tok_i64: + case tok_double: + case tok_string: + case tok_map: + case tok_list: + case tok_set: + case IDENTIFIER: + FieldType(); + break; + case tok_void: + jj_consume_token(tok_void); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + default: + jj_la1[31] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + } + + final public DynamicSerDeFieldType FieldType() throws ParseException { + /*@bgen(jjtree) FieldType */ + DynamicSerDeFieldType jjtn000 = new DynamicSerDeFieldType(JJTFIELDTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_string: + TypeString(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_bool: + TypeBool(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i16: + Typei16(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i32: + Typei32(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i64: + Typei64(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_double: + TypeDouble(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_map: + TypeMap(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_set: + TypeSet(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_list: + TypeList(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case IDENTIFIER: + jjtn000.name = jj_consume_token(IDENTIFIER).image; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + if (this.types.get(jjtn000.name) == null) { + System.err.println("ERROR: DDL specifying type " + jjtn000.name + " which has not been defined"); + {if (true) throw new RuntimeException("specifying type " + jjtn000.name + " which has not been defined");} + } + // lookup the specified type and set this nodes type to it. Precludes forward and self references for now. + jjtn000.jjtAddChild(this.types.get(jjtn000.name),0); + {if (true) return jjtn000;} + break; + default: + jj_la1[32] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeString TypeString() throws ParseException { + /*@bgen(jjtree) TypeString */ + DynamicSerDeTypeString jjtn000 = new DynamicSerDeTypeString(JJTTYPESTRING); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_string); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeByte TypeByte() throws ParseException { + /*@bgen(jjtree) TypeByte */ + DynamicSerDeTypeByte jjtn000 = new DynamicSerDeTypeByte(JJTTYPEBYTE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_byte); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypei16 Typei16() throws ParseException { + /*@bgen(jjtree) Typei16 */ + DynamicSerDeTypei16 jjtn000 = new DynamicSerDeTypei16(JJTTYPEI16); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_i16); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypei32 Typei32() throws ParseException { + /*@bgen(jjtree) Typei32 */ + DynamicSerDeTypei32 jjtn000 = new DynamicSerDeTypei32(JJTTYPEI32); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_i32); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypei64 Typei64() throws ParseException { + /*@bgen(jjtree) Typei64 */ + DynamicSerDeTypei64 jjtn000 = new DynamicSerDeTypei64(JJTTYPEI64); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_i64); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeDouble TypeDouble() throws ParseException { + /*@bgen(jjtree) TypeDouble */ + DynamicSerDeTypeDouble jjtn000 = new DynamicSerDeTypeDouble(JJTTYPEDOUBLE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_double); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeBool TypeBool() throws ParseException { + /*@bgen(jjtree) TypeBool */ + DynamicSerDeTypeBool jjtn000 = new DynamicSerDeTypeBool(JJTTYPEBOOL); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_bool); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeMap TypeMap() throws ParseException { + /*@bgen(jjtree) TypeMap */ + DynamicSerDeTypeMap jjtn000 = new DynamicSerDeTypeMap(JJTTYPEMAP); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_map); + jj_consume_token(69); + FieldType(); + jj_consume_token(59); + FieldType(); + jj_consume_token(70); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeSet TypeSet() throws ParseException { + /*@bgen(jjtree) TypeSet */ + DynamicSerDeTypeSet jjtn000 = new DynamicSerDeTypeSet(JJTTYPESET); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_set); + jj_consume_token(69); + FieldType(); + jj_consume_token(70); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeList TypeList() throws ParseException { + /*@bgen(jjtree) TypeList */ + DynamicSerDeTypeList jjtn000 = new DynamicSerDeTypeList(JJTTYPELIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_list); + jj_consume_token(69); + FieldType(); + jj_consume_token(70); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + public thrift_grammarTokenManager token_source; + SimpleCharStream jj_input_stream; + public Token token, jj_nt; + private int jj_ntk; + private int jj_gen; + final private int[] jj_la1 = new int[33]; + static private int[] jj_la1_0; + static private int[] jj_la1_1; + static private int[] jj_la1_2; + static { + jj_la1_0(); + jj_la1_1(); + jj_la1_2(); + } + private static void jj_la1_0() { + jj_la1_0 = new int[] {0x0,0x100,0xa3fee00,0xa3fee00,0x23fee00,0x100,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xb0000000,0x0,0x0,0x0,0x0,0xa0000000,0x0,0x0,0x0,0x0,0xa0000000,0xb0000000,0xa0000000,}; + } + private static void jj_la1_1() { + jj_la1_1 = new int[] {0x18000000,0x18019c20,0x0,0x0,0x0,0x19c20,0x11c20,0x18000000,0x400000,0x80000000,0x18000000,0x2000000,0x18000000,0x18000000,0x22700000,0x18000000,0x22700000,0x18000000,0x22700000,0x22700000,0x4003cf,0x2000,0x18000000,0x200,0x4000,0x5e01cf,0x100000,0x18000000,0xe0000,0x80000000,0x1cf,0x4001cf,0x4001cf,}; + } + private static void jj_la1_2() { + jj_la1_2 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1,0x0,0x1,0x0,0x1,0x1,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; + } + + public thrift_grammar(java.io.InputStream stream) { + this(stream, null); + } + public thrift_grammar(java.io.InputStream stream, String encoding) { + try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } + token_source = new thrift_grammarTokenManager(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 33; i++) jj_la1[i] = -1; + } + + public void ReInit(java.io.InputStream stream) { + ReInit(stream, null); + } + public void ReInit(java.io.InputStream stream, String encoding) { + try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } + token_source.ReInit(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jjtree.reset(); + jj_gen = 0; + for (int i = 0; i < 33; i++) jj_la1[i] = -1; + } + + public thrift_grammar(java.io.Reader stream) { + jj_input_stream = new SimpleCharStream(stream, 1, 1); + token_source = new thrift_grammarTokenManager(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 33; i++) jj_la1[i] = -1; + } + + public void ReInit(java.io.Reader stream) { + jj_input_stream.ReInit(stream, 1, 1); + token_source.ReInit(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jjtree.reset(); + jj_gen = 0; + for (int i = 0; i < 33; i++) jj_la1[i] = -1; + } + + public thrift_grammar(thrift_grammarTokenManager tm) { + token_source = tm; + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 33; i++) jj_la1[i] = -1; + } + + public void ReInit(thrift_grammarTokenManager tm) { + token_source = tm; + token = new Token(); + jj_ntk = -1; + jjtree.reset(); + jj_gen = 0; + for (int i = 0; i < 33; i++) jj_la1[i] = -1; + } + + final private Token jj_consume_token(int kind) throws ParseException { + Token oldToken; + if ((oldToken = token).next != null) token = token.next; + else token = token.next = token_source.getNextToken(); + jj_ntk = -1; + if (token.kind == kind) { + jj_gen++; + return token; + } + token = oldToken; + jj_kind = kind; + throw generateParseException(); + } + + final public Token getNextToken() { + if (token.next != null) token = token.next; + else token = token.next = token_source.getNextToken(); + jj_ntk = -1; + jj_gen++; + return token; + } + + final public Token getToken(int index) { + Token t = token; + for (int i = 0; i < index; i++) { + if (t.next != null) t = t.next; + else t = t.next = token_source.getNextToken(); + } + return t; + } + + final private int jj_ntk() { + if ((jj_nt=token.next) == null) + return (jj_ntk = (token.next=token_source.getNextToken()).kind); + else + return (jj_ntk = jj_nt.kind); + } + + private java.util.Vector jj_expentries = new java.util.Vector(); + private int[] jj_expentry; + private int jj_kind = -1; + + public ParseException generateParseException() { + jj_expentries.removeAllElements(); + boolean[] la1tokens = new boolean[71]; + for (int i = 0; i < 71; i++) { + la1tokens[i] = false; + } + if (jj_kind >= 0) { + la1tokens[jj_kind] = true; + jj_kind = -1; + } + for (int i = 0; i < 33; i++) { + if (jj_la1[i] == jj_gen) { + for (int j = 0; j < 32; j++) { + if ((jj_la1_0[i] & (1< | | | - + | + } TOKEN: { @@ -176,7 +177,7 @@ SimpleNode Start() : {} { - HeaderList() (Definition())+ + HeaderList() ([CommaOrSemicolon()] Definition())+ { return jjtThis; } @@ -641,16 +642,23 @@ -SimpleNode FieldRequiredness() : {} +DynamicSerDeFieldRequiredness FieldRequiredness() : {} { { - return jjtThis; + jjtThis.requiredness = DynamicSerDeFieldRequiredness.RequirednessTypes.Required; + return jjtThis; } | { + jjtThis.requiredness = DynamicSerDeFieldRequiredness.RequirednessTypes.Optional; return jjtThis; } +| + { + jjtThis.requiredness = DynamicSerDeFieldRequiredness.RequirednessTypes.Skippable; + return jjtThis; + } | { return jjtThis; @@ -724,7 +732,7 @@ { TypeString() { - return jjtThis; + return jjtThis; } | TypeBool() { @@ -764,6 +772,13 @@ | jjtThis.name = .image { + + if (this.types.get(jjtThis.name) == null) { + System.err.println("ERROR: DDL specifying type " + jjtThis.name + " which has not been defined"); + throw new RuntimeException("specifying type " + jjtThis.name + " which has not been defined"); + } + // lookup the specified type and set this nodes type to it. Precludes forward and self references for now. + jjtThis.jjtAddChild(this.types.get(jjtThis.name),0); return jjtThis; } } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConst.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConst.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConst.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeConst.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeConst extends SimpleNode { + public DynamicSerDeConst(int id) { + super(id); + } + + public DynamicSerDeConst(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeader.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeader.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeader.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeHeader.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeHeader extends SimpleNode { + public DynamicSerDeHeader(int id) { + super(id); + } + + public DynamicSerDeHeader(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeUnflagArgs.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeUnflagArgs.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeUnflagArgs.java (revision 0) @@ -0,0 +1,14 @@ +/* Generated By:JJTree: Do not edit this line. DynamicSerDeUnflagArgs.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeUnflagArgs extends SimpleNode { + public DynamicSerDeUnflagArgs(int id) { + super(id); + } + + public DynamicSerDeUnflagArgs(thrift_grammar p, int id) { + super(p, id); + } + +} Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java (working copy) @@ -76,6 +76,9 @@ public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { TSet theset = iprot.readSetBegin(); + if (theset == null) { + return null; + } Set result; if (reuse != null) { result = (Set)reuse; @@ -96,6 +99,7 @@ * The code uses ListObjectInspector right now. We need to change it to * SetObjectInspector when that is done. */ + TSet tset = new TSet(); @Override public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException, @@ -105,7 +109,9 @@ Set set = (Set)o; DynamicSerDeTypeBase mt = this.getElementType(); - oprot.writeSetBegin(new TSet(mt.getType(),set.size())); + tset.elemType = mt.getType(); + tset.size = set.size(); + oprot.writeSetBegin(tset); for(Object element: set) { mt.serialize(element, loi.getListElementObjectInspector(), oprot); } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardPrimitiveObjectInspector.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardPrimitiveObjectInspector.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardPrimitiveObjectInspector.java (working copy) @@ -43,7 +43,7 @@ } public String getTypeName() { - return ObjectInspectorUtils.getClassShortName(primitiveClass.getName()); + return ObjectInspectorUtils.getClassShortName(primitiveClass); } } Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (revision 712243) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (working copy) @@ -63,34 +63,32 @@ return primitiveClass; } + public static final Map, String> classToTypeName = new HashMap, String>(); + static { + classToTypeName.put(Boolean.class, org.apache.hadoop.hive.serde.Constants.BOOLEAN_TYPE_NAME); + classToTypeName.put(Byte.class, org.apache.hadoop.hive.serde.Constants.TINYINT_TYPE_NAME); + classToTypeName.put(Short.class, org.apache.hadoop.hive.serde.Constants.SMALLINT_TYPE_NAME); + classToTypeName.put(Integer.class, org.apache.hadoop.hive.serde.Constants.INT_TYPE_NAME); + classToTypeName.put(Long.class, org.apache.hadoop.hive.serde.Constants.BIGINT_TYPE_NAME); + classToTypeName.put(Float.class, org.apache.hadoop.hive.serde.Constants.FLOAT_TYPE_NAME); + classToTypeName.put(Double.class, org.apache.hadoop.hive.serde.Constants.DOUBLE_TYPE_NAME); + classToTypeName.put(String.class, org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME); + classToTypeName.put(java.sql.Date.class, org.apache.hadoop.hive.serde.Constants.DATE_TYPE_NAME); + } /** * Get the short name for the types */ - public static String getClassShortName(String className) { - String result = className; - - if (result.equals(String.class.getName())) { - result = org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME; - } else if (result.equals(Integer.class.getName())) { - result = org.apache.hadoop.hive.serde.Constants.INT_TYPE_NAME; - } else if (result.equals(Float.class.getName())) { - result = org.apache.hadoop.hive.serde.Constants.FLOAT_TYPE_NAME; - } else if (result.equals(Double.class.getName())) { - result = org.apache.hadoop.hive.serde.Constants.DOUBLE_TYPE_NAME; - } else if (result.equals(Long.class.getName())) { - result = org.apache.hadoop.hive.serde.Constants.BIGINT_TYPE_NAME; - } else if (result.equals(java.sql.Date.class.getName())) { - result = org.apache.hadoop.hive.serde.Constants.DATE_TYPE_NAME; - } else { - LOG.warn("unsupported class: " + className); + public static String getClassShortName(Class classObject) { + String result = classToTypeName.get(classObject); + if (result == null) { + result = classObject.getName(); + LOG.warn("unsupported class: " + result); + // Remove prefix + String prefix = "java.lang."; + if (result.startsWith(prefix)) { + result = result.substring(prefix.length()); + } } - - // Remove prefix - String prefix = "java.lang."; - if (result.startsWith(prefix)) { - result = result.substring(prefix.length()); - } - return result; } @@ -251,6 +249,15 @@ return fields.get(i); } } + // For backward compatibility: fieldNames can also be integer Strings. + try { + int i = Integer.parseInt(fieldName); + if (i>=0 && i list = new ArrayList(); + Matcher m = p.matcher(line); + // For each field + while (m.find()) { + String match = m.group(); + if (match == null) + break; + if (match.length() == 0) + match = null; + else { + if(stripSeparatorPrefix.matcher(match).find()) { + match = match.substring(1); + } + if(stripQuotePrefix.matcher(match).find()) { + match = match.substring(1); + } + if(stripQuotePostfix.matcher(match).find()) { + match = match.substring(0,match.length() - 1); + } + } + list.add(match); + } + return (String[])list.toArray(new String[1]); + } + + + + protected String getByteValue(String altValue, String defaultVal) { + if (altValue != null && altValue.length() > 0) { + try { + byte b [] = new byte[1]; + b[0] = Byte.valueOf(altValue).byteValue(); + return new String(b); + } catch(NumberFormatException e) { + return altValue; + } + } + return defaultVal; + } + + + /** * Initialize the TProtocol * @param conf System properties * @param tbl table properties * @throws TException */ public void initialize(Configuration conf, Properties tbl) throws TException { - primarySeparatorByte = Byte.valueOf(tbl.getProperty(Constants.FIELD_DELIM, String.valueOf(primarySeparatorByte))).byteValue(); - LOG.debug("collections delim=<" + tbl.getProperty(Constants.COLLECTION_DELIM) + ">" ); - secondarySeparatorByte = Byte.valueOf(tbl.getProperty(Constants.COLLECTION_DELIM, String.valueOf(secondarySeparatorByte))).byteValue(); - rowSeparatorByte = Byte.valueOf(tbl.getProperty(Constants.LINE_DELIM, String.valueOf(rowSeparatorByte))).byteValue(); - mapSeparatorByte = Byte.valueOf(tbl.getProperty(Constants.MAPKEY_DELIM, String.valueOf(mapSeparatorByte))).byteValue(); + + + primarySeparator = getByteValue(tbl.getProperty(Constants.FIELD_DELIM), primarySeparator); + secondarySeparator = getByteValue(tbl.getProperty(Constants.COLLECTION_DELIM), secondarySeparator); + rowSeparator = getByteValue(tbl.getProperty(Constants.LINE_DELIM), rowSeparator); + mapSeparator = getByteValue(tbl.getProperty(Constants.MAPKEY_DELIM), mapSeparator); returnNulls = Boolean.valueOf(tbl.getProperty(ReturnNullsKey, String.valueOf(returnNulls))).booleanValue(); bufferSize = Integer.valueOf(tbl.getProperty(BufferSizeKey, String.valueOf(bufferSize))).intValue(); + nullString = tbl.getProperty(Constants.SERIALIZATION_NULL_FORMAT, "\\N"); + quote = tbl.getProperty(Constants.QUOTE_CHAR, null); internalInitialize(); @@ -329,7 +425,7 @@ public void writeFieldBegin(TField field) throws TException { if(! firstField) { - writeByte(primarySeparatorByte); + internalWriteString(primarySeparator); } firstField = false; } @@ -424,21 +520,34 @@ writeString(String.valueOf(dub)); } + public void internalWriteString(String str) throws TException { + if(str != null) { + final byte buf[] = str.getBytes(); + trans_.write(buf, 0, buf.length); + } else { + trans_.write(nullBuf, 0, nullBuf.length); + } + } + public void writeString(String str) throws TException { if(inner) { if(!firstInnerField) { // super hack city notice the mod plus only happens after firstfield hit, so == 0 is right. if(isMap && elemIndex++ % 2 == 0) { - writeByte(mapSeparatorByte); + internalWriteString(mapSeparator); } else { - writeByte(secondarySeparatorByte); + internalWriteString(secondarySeparator); } } else { firstInnerField = false; } } - final byte buf[] = str.getBytes(); - trans_.write(buf, 0, buf.length); + if(str != null) { + final byte buf[] = str.getBytes(); + trans_.write(buf, 0, buf.length); + } else { + trans_.write(nullBuf, 0, nullBuf.length); + } } public void writeBinary(byte[] bin) throws TException { @@ -456,7 +565,7 @@ assert(!inner); try { final String tmp = transportTokenizer.nextToken(); - columns = primaryPattern.split(tmp); + columns = quote == null ? primaryPattern.split(tmp) : complexSplit(tmp, primaryPattern); index = 0; return new TStruct(); } catch(EOFException e) { @@ -468,6 +577,20 @@ columns = null; } + + /** + * Skip past the current field + * Just increments the field index counter. + */ + public void skip(byte type) { + if( inner) { + innerIndex++; + } else { + index++; + } + } + + public TField readFieldBegin() throws TException { assert( !inner); TField f = new TField(); @@ -483,11 +606,19 @@ public TMap readMapBegin() throws TException { assert( !inner); TMap map = new TMap(); - fields = mapPattern.split(columns[index++]); - if(fields != null) { + if(columns[index] == null || + columns[index].equals(nullString)) { + index++; + if(returnNulls) { + return null; + } + map.size = 0; + } else if(columns[index].isEmpty()) { + map.size = 0; + index++; + } else { + fields = mapPattern.split(columns[index++]); map.size = fields.length/2; - } else { - map.size = 0; } innerIndex = 0; inner = true; @@ -503,11 +634,19 @@ public TList readListBegin() throws TException { assert( !inner); TList list = new TList(); - fields = secondaryPattern.split(columns[index++]); - if(fields != null) { + if(columns[index] == null || + columns[index].equals(nullString)) { + index++; + if(returnNulls) { + return null; + } + list.size = 0; + } else if(columns[index].isEmpty()) { + list.size = 0; + index++; + } else { + fields = secondaryPattern.split(columns[index++]); list.size = fields.length ; - } else { - list.size = 0; } innerIndex = 0; inner = true; @@ -521,53 +660,88 @@ public TSet readSetBegin() throws TException { assert( !inner); TSet set = new TSet(); - fields = secondaryPattern.split(columns[index++]); - if(fields != null) { + if(columns[index] == null || + columns[index].equals(nullString)) { + index++; + if(returnNulls) { + return null; + } + set.size = 0; + } else if(columns[index].isEmpty()) { + set.size = 0; + index++; + } else { + fields = secondaryPattern.split(columns[index++]); set.size = fields.length ; - } else { - set.size = 0; } inner = true; innerIndex = 0; return set; } + protected boolean lastPrimitiveWasNullFlag; + + public boolean lastPrimitiveWasNull() throws TException { + return lastPrimitiveWasNullFlag; + } + + public void writeNull() throws TException { + writeString(null); + } + public void readSetEnd() throws TException { inner = false; } + public boolean readBool() throws TException { - return Boolean.valueOf(readString()).booleanValue(); + String val = readString(); + lastPrimitiveWasNullFlag = val == null; + return val == null || val.isEmpty() ? false : Boolean.valueOf(val).booleanValue(); } public byte readByte() throws TException { - return Byte.valueOf(readString()).byteValue(); + String val = readString(); + lastPrimitiveWasNullFlag = val == null; + return val == null || val.isEmpty() ? 0 : Byte.valueOf(val).byteValue(); } public short readI16() throws TException { - return Short.valueOf(readString()).shortValue(); + String val = readString(); + lastPrimitiveWasNullFlag = val == null; + return val == null || val.isEmpty() ? 0 : Short.valueOf(val).shortValue(); } public int readI32() throws TException { - return Integer.valueOf(readString()).intValue(); + String val = readString(); + lastPrimitiveWasNullFlag = val == null; + return val == null || val.isEmpty() ? 0 : Integer.valueOf(val).intValue(); } public long readI64() throws TException { - return Long.valueOf(readString()).longValue(); + String val = readString(); + lastPrimitiveWasNullFlag = val == null; + return val == null || val.isEmpty() ? 0 : Long.valueOf(val).longValue(); } public double readDouble() throws TException { - return Double.valueOf(readString()).doubleValue(); + String val = readString(); + lastPrimitiveWasNullFlag = val == null; + return val == null || val.isEmpty() ? 0 :Double.valueOf(val).doubleValue(); } - protected String [] curMapPair; public String readString() throws TException { String ret; if(!inner) { - ret = columns != null && index < columns.length ? columns[index++] : null; + ret = columns != null && index < columns.length ? columns[index] : null; + index++; } else { - ret = fields != null && innerIndex < fields.length ? fields[innerIndex++] : null; + ret = fields != null && innerIndex < fields.length ? fields[innerIndex] : null; + innerIndex++; } - return ret == null && ! returnNulls ? "" : ret; + if(ret == null || ret.equals(nullString)) + return returnNulls ? null : ""; + else + return ret; } public byte[] readBinary() throws TException { Index: src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java =================================================================== --- src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java (revision 0) +++ src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java (revision 0) @@ -0,0 +1,547 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.hadoop.hive.serde2.thrift; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import com.facebook.thrift.TException; +import com.facebook.thrift.transport.*; +import com.facebook.thrift.protocol.*; +import java.util.*; +import java.io.*; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.serde.Constants; + +import java.util.Properties; + +/** + * An implementation of the Thrift Protocol for binary sortable records. + * + * The data format: + * NULL: a single byte \0 + * NON-NULL Primitives: ALWAYS prepend a single byte \1, and then: + * Boolean: FALSE = \1, TRUE = \2 + * Byte: flip the sign-bit to make sure negative comes before positive + * Short: flip the sign-bit to make sure negative comes before positive + * Int: flip the sign-bit to make sure negative comes before positive + * Long: flip the sign-bit to make sure negative comes before positive + * Double: flip the sign-bit for positive double, and all bits for negative double values + * String: NULL-terminated string + * NON-NULL Complex Types: + * Struct: first the single byte \1, and then one field by one field. + * List: size stored as Int (see above), then one element by one element. + * Map: size stored as Int (see above), then one key by one value, and then the next pair and so on. + * Binary: size stored as Int (see above), then the binary data in its original form + * + * Note that the relative order of list/map/binary will be based on the size first (and elements one by one if + * the sizes are equal). + * + * This protocol takes an additional parameter SERIALIZATION_SORT_ORDER which is a string containing only "+" and "-". + * The length of the string should equal to the number of fields in the top-level struct for serialization. + * "+" means the field should be sorted ascendingly, and "-" means descendingly. The sub fields in the same top-level + * field will have the same sort order. + * + * This is not thrift compliant in that it doesn't write out field ids + * so things cannot actually be versioned. + */ +public class TBinarySortableProtocol extends TProtocol implements ConfigurableTProtocol, WriteNullsProtocol { + + final static Log LOG = LogFactory.getLog(TBinarySortableProtocol.class.getName()); + + /** + * Factory for TBinarySortableProtocol objects + */ + public static class Factory implements TProtocolFactory { + + public TProtocol getProtocol(TTransport trans) { + return new TBinarySortableProtocol(trans); + } + } + + public TBinarySortableProtocol(TTransport trans) { + super(trans); + stackLevel = 0; + } + + /** + * The stack level of the current field. Top-level fields have a stackLevel value of 1. + * Each nested struct/list/map will increase the stackLevel value by 1. + */ + int stackLevel; + /** + * The field ID in the top level struct. This is used to determine whether this field + * should be sorted ascendingly or descendingly. + */ + int topLevelStructFieldID; + /** + * A string that consists of only "+" and "-". It should have the same length as the number + * of fields in the top level struct. "+" means the corresponding field is sorted ascendingly + * and "-" means the corresponding field is sorted descendingly. + */ + String sortOrder; + /** + * Whether the current field is sorted ascendingly. Always equals to + * sortOrder.charAt(topLevelStructFieldID) != '-' + */ + boolean ascending; + + @Override + public void initialize(Configuration conf, Properties tbl) throws TException { + sortOrder = tbl.getProperty(Constants.SERIALIZATION_SORT_ORDER); + if (sortOrder == null) { + sortOrder = ""; + } + for(int i=0; i= sortOrder.length() || sortOrder.charAt(topLevelStructFieldID) != '-'); + } else { + writeRawBytes(nonNullByte, 0, 1); + // If the struct is null and level > 1, DynamicSerDe will call writeNull(); + } + } + + public void writeStructEnd() throws TException { + stackLevel --; + } + + public void writeFieldBegin(TField field) throws TException { + } + + public void writeFieldEnd() throws TException { + if (stackLevel == 1) { + topLevelStructFieldID ++; + ascending = (topLevelStructFieldID >= sortOrder.length() || sortOrder.charAt(topLevelStructFieldID) != '-'); + } + } + + public void writeFieldStop() { + } + + public void writeMapBegin(TMap map) throws TException { + stackLevel++; + if (map == null) { + writeRawBytes(nonNullByte, 0, 1); + } else { + writeI32(map.size); + } + } + + public void writeMapEnd() throws TException { + stackLevel --; + } + + public void writeListBegin(TList list) throws TException { + stackLevel++; + if (list == null) { + writeRawBytes(nonNullByte, 0, 1); + } else { + writeI32(list.size); + } + } + + public void writeListEnd() throws TException { + stackLevel --; + } + + public void writeSetBegin(TSet set) throws TException { + stackLevel++; + if (set == null) { + writeRawBytes(nonNullByte, 0, 1); + } else { + writeI32(set.size); + } + } + + public void writeSetEnd() throws TException { + stackLevel --; + } + + byte[] rawBytesBuffer; + // This method takes care of bit-flipping for descending order + // Declare this method as final for performance reasons + final private void writeRawBytes(byte[] bytes, int begin, int length) throws TException { + if (ascending) { + trans_.write(bytes, begin, length); + } else { + // For fields in descending order, do a bit flip first. + if (rawBytesBuffer == null || rawBytesBuffer.length < bytes.length) { + rawBytesBuffer = new byte[bytes.length]; + } + for(int i=begin; i> 8) ^ 0x80)); + i16out[1] = (byte)(0xff & (i16)); + writeRawBytes(nonNullByte, 0, 1); + writeRawBytes(i16out, 0, 2); + } + + private byte[] i32out = new byte[4]; + public void writeI32(int i32) throws TException { + i32out[0] = (byte)(0xff & ((i32 >> 24) ^ 0x80)); + i32out[1] = (byte)(0xff & (i32 >> 16)); + i32out[2] = (byte)(0xff & (i32 >> 8)); + i32out[3] = (byte)(0xff & (i32)); + writeRawBytes(nonNullByte, 0, 1); + writeRawBytes(i32out, 0, 4); + } + + private byte[] i64out = new byte[8]; + public void writeI64(long i64) throws TException { + i64out[0] = (byte)(0xff & ((i64 >> 56) ^ 0x80)); + i64out[1] = (byte)(0xff & (i64 >> 48)); + i64out[2] = (byte)(0xff & (i64 >> 40)); + i64out[3] = (byte)(0xff & (i64 >> 32)); + i64out[4] = (byte)(0xff & (i64 >> 24)); + i64out[5] = (byte)(0xff & (i64 >> 16)); + i64out[6] = (byte)(0xff & (i64 >> 8)); + i64out[7] = (byte)(0xff & (i64)); + writeRawBytes(nonNullByte, 0, 1); + writeRawBytes(i64out, 0, 8); + } + + public void writeDouble(double dub) throws TException { + long i64 = Double.doubleToLongBits(dub); + if ((i64 & (1L << 63)) != 0) { + // negative numbers, flip all bits + i64out[0] = (byte)(0xff & ((i64 >> 56) ^ 0xff)); + i64out[1] = (byte)(0xff & ((i64 >> 48) ^ 0xff)); + i64out[2] = (byte)(0xff & ((i64 >> 40) ^ 0xff)); + i64out[3] = (byte)(0xff & ((i64 >> 32) ^ 0xff)); + i64out[4] = (byte)(0xff & ((i64 >> 24) ^ 0xff)); + i64out[5] = (byte)(0xff & ((i64 >> 16) ^ 0xff)); + i64out[6] = (byte)(0xff & ((i64 >> 8) ^ 0xff)); + i64out[7] = (byte)(0xff & ((i64) ^ 0xff)); + } else { + // positive numbers, flip just the first bit + i64out[0] = (byte)(0xff & ((i64 >> 56) ^ 0x80)); + i64out[1] = (byte)(0xff & (i64 >> 48)); + i64out[2] = (byte)(0xff & (i64 >> 40)); + i64out[3] = (byte)(0xff & (i64 >> 32)); + i64out[4] = (byte)(0xff & (i64 >> 24)); + i64out[5] = (byte)(0xff & (i64 >> 16)); + i64out[6] = (byte)(0xff & (i64 >> 8)); + i64out[7] = (byte)(0xff & (i64)); + } + writeRawBytes(nonNullByte, 0, 1); + writeRawBytes(i64out, 0, 8); + } + + final protected byte[] nullByte = new byte[] {0}; + final protected byte[] nonNullByte = new byte[] {1}; + public void writeString(String str) throws TException { + byte[] dat; + try { + dat = str.getBytes("UTF-8"); + } catch (UnsupportedEncodingException uex) { + throw new TException("JVM DOES NOT SUPPORT UTF-8: " + uex.getMessage()); + } + for(int i=0; i= sortOrder.length() || sortOrder.charAt(topLevelStructFieldID) != '-'); + } else { + // is this a null? + // only read the is-null byte for level > 1 because the top-level struct can never be null. + if (readIsNull()) return null; + } + return tstruct; + } + + public void readStructEnd() throws TException { + stackLevel --; + } + + TField f = new TField(); + public TField readFieldBegin() throws TException { + // slight hack to communicate to DynamicSerDe that the field ids are not being set but things are ordered. + f.type = -1; + return f; + } + + public void readFieldEnd() throws TException { + if (stackLevel == 1) { + topLevelStructFieldID ++; + ascending = (topLevelStructFieldID >= sortOrder.length() || sortOrder.charAt(topLevelStructFieldID) != '-'); + } + } + + private TMap tmap = new TMap(); + /** + * This method always return the same instance of TMap to avoid creating new instances. + * It is the responsibility of the caller to read the value before calling this method again. + */ + public TMap readMapBegin() throws TException { + stackLevel ++; + tmap.size = readI32(); + if (tmap.size == 0 && lastPrimitiveWasNull()) { + return null; + } + return tmap; + } + + public void readMapEnd() throws TException { + stackLevel --; + } + + private TList tlist = new TList(); + /** + * This method always return the same instance of TList to avoid creating new instances. + * It is the responsibility of the caller to read the value before calling this method again. + */ + public TList readListBegin() throws TException { + stackLevel ++; + tlist.size = readI32(); + if (tlist.size == 0 && lastPrimitiveWasNull()) { + return null; + } + return tlist; + } + + public void readListEnd() throws TException { + stackLevel --; + } + + private TSet set = new TSet(); + /** + * This method always return the same instance of TSet to avoid creating new instances. + * It is the responsibility of the caller to read the value before calling this method again. + */ + public TSet readSetBegin() throws TException { + stackLevel ++; + set.size = readI32(); + if (set.size == 0 && lastPrimitiveWasNull()) { + return null; + } + return set; + } + + public void readSetEnd() throws TException { + stackLevel --; + } + + // This method takes care of bit-flipping for descending order + // Make this method final to improve performance. + final private int readRawAll(byte[] buf, int off, int len) throws TException { + int bytes = trans_.readAll(buf, off, len); + if (!ascending) { + for(int i=off; i nativeSerDeNames = new ArrayList(); + static { + nativeSerDeNames.add(org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe.class.getName()); + nativeSerDeNames.add(org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class.getName()); + nativeSerDeNames.add(org.apache.hadoop.hive.serde.thrift.columnsetSerDe.class.getName()); + } + public static boolean isNativeSerDe(String serde) { + return nativeSerDeNames.contains(serde); + } + + private static boolean initCoreSerDes = registerCoreSerDes(); protected static boolean registerCoreSerDes() { Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/SimpleNode.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/SimpleNode.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/SimpleNode.java (working copy) @@ -1,90 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. SimpleNode.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class SimpleNode implements Node { - protected Node parent; - protected Node[] children; - protected int id; - protected thrift_grammar parser; - - public SimpleNode(int i) { - id = i; - } - - public SimpleNode(thrift_grammar p, int i) { - this(i); - parser = p; - } - - public void jjtOpen() { - } - - public void jjtClose() { - } - - public void jjtSetParent(Node n) { parent = n; } - public Node jjtGetParent() { return parent; } - - public void jjtAddChild(Node n, int i) { - if (children == null) { - children = new Node[i + 1]; - } else if (i >= children.length) { - Node c[] = new Node[i + 1]; - System.arraycopy(children, 0, c, 0, children.length); - children = c; - } - children[i] = n; - } - - public Node jjtGetChild(int i) { - return children[i]; - } - - public int jjtGetNumChildren() { - return (children == null) ? 0 : children.length; - } - - /* You can override these two methods in subclasses of SimpleNode to - customize the way the node appears when the tree is dumped. If - your output uses more than one line you should override - toString(String), otherwise overriding toString() is probably all - you need to do. */ - - public String toString() { return thrift_grammarTreeConstants.jjtNodeName[id]; } - public String toString(String prefix) { return prefix + toString(); } - - /* Override this method if you want to customize how the node dumps - out its children. */ - - public void dump(String prefix) { - System.out.println(toString(prefix)); - if (children != null) { - for (int i = 0; i < children.length; ++i) { - SimpleNode n = (SimpleNode)children[i]; - if (n != null) { - n.dump(prefix + " "); - } - } - } - } -} - Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammarConstants.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammarConstants.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammarConstants.java (working copy) @@ -1,151 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammarConstants.java */ -package org.apache.hadoop.hive.serde.dynamic_type; - -public interface thrift_grammarConstants { - - int EOF = 0; - int tok_const = 8; - int tok_namespace = 9; - int tok_cpp_namespace = 10; - int tok_cpp_include = 11; - int tok_cpp_type = 12; - int tok_java_package = 13; - int tok_cocoa_prefix = 14; - int tok_csharp_namespace = 15; - int tok_php_namespace = 16; - int tok_py_module = 17; - int tok_perl_package = 18; - int tok_ruby_namespace = 19; - int tok_smalltalk_category = 20; - int tok_smalltalk_prefix = 21; - int tok_xsd_all = 22; - int tok_xsd_optional = 23; - int tok_xsd_nillable = 24; - int tok_xsd_namespace = 25; - int tok_xsd_attrs = 26; - int tok_include = 27; - int tok_void = 28; - int tok_bool = 29; - int tok_byte = 30; - int tok_i16 = 31; - int tok_i32 = 32; - int tok_i64 = 33; - int tok_double = 34; - int tok_string = 35; - int tok_slist = 36; - int tok_senum = 37; - int tok_map = 38; - int tok_list = 39; - int tok_set = 40; - int tok_async = 41; - int tok_typedef = 42; - int tok_struct = 43; - int tok_exception = 44; - int tok_extends = 45; - int tok_throws = 46; - int tok_service = 47; - int tok_enum = 48; - int tok_required = 49; - int tok_optional = 50; - int tok_int_constant = 51; - int tok_double_constant = 52; - int IDENTIFIER = 53; - int LETTER = 54; - int DIGIT = 55; - int tok_literal = 56; - int tok_st_identifier = 57; - - int DEFAULT = 0; - - String[] tokenImage = { - "", - "\" \"", - "\"\\t\"", - "\"\\n\"", - "\"\\r\"", - "", - "", - "", - "\"const\"", - "\"namespace\"", - "\"cpp_namespace\"", - "\"cpp_include\"", - "\"cpp_type\"", - "\"java_package\"", - "\"cocoa_prefix\"", - "\"csharp_namespace\"", - "\"php_namespace\"", - "\"py_module\"", - "\"perl_package\"", - "\"ruby_namespace\"", - "\"smalltalk_category\"", - "\"smalltalk_prefix\"", - "\"xsd_all\"", - "\"xsd_optional\"", - "\"xsd_nillable\"", - "\"xsd_namespace\"", - "\"xsd_attrs\"", - "\"include\"", - "\"void\"", - "\"bool\"", - "\"byte\"", - "\"i16\"", - "\"i32\"", - "\"i64\"", - "\"double\"", - "\"string\"", - "\"slist\"", - "\"senum\"", - "\"map\"", - "\"list\"", - "\"set\"", - "\"async\"", - "\"typedef\"", - "\"struct\"", - "\"exception\"", - "\"extends\"", - "\"throws\"", - "\"service\"", - "\"enum\"", - "\"required\"", - "\"optional\"", - "", - "", - "", - "", - "", - "", - "", - "\",\"", - "\";\"", - "\"{\"", - "\"}\"", - "\"=\"", - "\"[\"", - "\"]\"", - "\":\"", - "\"(\"", - "\")\"", - "\"<\"", - "\">\"", - }; - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFieldRequiredness.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFieldRequiredness.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFieldRequiredness.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeFieldRequiredness.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeFieldRequiredness extends SimpleNode { - public DynamicSerDeFieldRequiredness(int id) { - super(id); - } - - public DynamicSerDeFieldRequiredness(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeInclude.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeInclude.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeInclude.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeInclude.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeInclude extends SimpleNode { - public DynamicSerDeInclude(int id) { - super(id); - } - - public DynamicSerDeInclude(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConstMap.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConstMap.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConstMap.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstMap.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeConstMap extends SimpleNode { - public DynamicSerDeConstMap(int id) { - super(id); - } - - public DynamicSerDeConstMap(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeThrows.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeThrows.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeThrows.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeThrows.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeThrows extends SimpleNode { - public DynamicSerDeThrows(int id) { - super(id); - } - - public DynamicSerDeThrows(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFlagArgs.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFlagArgs.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFlagArgs.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeFlagArgs.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeFlagArgs extends SimpleNode { - public DynamicSerDeFlagArgs(int id) { - super(id); - } - - public DynamicSerDeFlagArgs(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/JJTthrift_grammarState.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/JJTthrift_grammarState.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/JJTthrift_grammarState.java (working copy) @@ -1,141 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. /home/pwyckoff/projects/hadoop/trunk/src/org.apache.hadoop.hive.serde/build/JJTthrift_grammarState.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -class JJTthrift_grammarState { - private java.util.Stack nodes; - private java.util.Stack marks; - - private int sp; // number of nodes on stack - private int mk; // current mark - private boolean node_created; - - JJTthrift_grammarState() { - nodes = new java.util.Stack(); - marks = new java.util.Stack(); - sp = 0; - mk = 0; - } - - /* Determines whether the current node was actually closed and - pushed. This should only be called in the final user action of a - node scope. */ - boolean nodeCreated() { - return node_created; - } - - /* Call this to reinitialize the node stack. It is called - automatically by the parser's ReInit() method. */ - void reset() { - nodes.removeAllElements(); - marks.removeAllElements(); - sp = 0; - mk = 0; - } - - /* Returns the root node of the AST. It only makes sense to call - this after a successful parse. */ - Node rootNode() { - return (Node)nodes.elementAt(0); - } - - /* Pushes a node on to the stack. */ - void pushNode(Node n) { - nodes.push(n); - ++sp; - } - - /* Returns the node on the top of the stack, and remove it from the - stack. */ - Node popNode() { - if (--sp < mk) { - mk = ((Integer)marks.pop()).intValue(); - } - return (Node)nodes.pop(); - } - - /* Returns the node currently on the top of the stack. */ - Node peekNode() { - return (Node)nodes.peek(); - } - - /* Returns the number of children on the stack in the current node - scope. */ - int nodeArity() { - return sp - mk; - } - - - void clearNodeScope(Node n) { - while (sp > mk) { - popNode(); - } - mk = ((Integer)marks.pop()).intValue(); - } - - - void openNodeScope(Node n) { - marks.push(Integer.valueOf(mk)); - mk = sp; - n.jjtOpen(); - } - - - /* A definite node is constructed from a specified number of - children. That number of nodes are popped from the stack and - made the children of the definite node. Then the definite node - is pushed on to the stack. */ - void closeNodeScope(Node n, int num) { - mk = ((Integer)marks.pop()).intValue(); - while (num-- > 0) { - Node c = popNode(); - c.jjtSetParent(n); - n.jjtAddChild(c, num); - } - n.jjtClose(); - pushNode(n); - node_created = true; - } - - - /* A conditional node is constructed if its condition is true. All - the nodes that have been pushed since the node was opened are - made children of the the conditional node, which is then pushed - on to the stack. If the condition is false the node is not - constructed and they are left on the stack. */ - void closeNodeScope(Node n, boolean condition) { - if (condition) { - int a = nodeArity(); - mk = ((Integer)marks.pop()).intValue(); - while (a-- > 0) { - Node c = popNode(); - c.jjtSetParent(n); - n.jjtAddChild(c, a); - } - n.jjtClose(); - pushNode(n); - node_created = true; - } else { - mk = ((Integer)marks.pop()).intValue(); - node_created = false; - } - } -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSenum.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSenum.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSenum.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenum.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeSenum extends SimpleNode { - public DynamicSerDeSenum(int id) { - super(id); - } - - public DynamicSerDeSenum(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFunctionType.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFunctionType.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeFunctionType.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeFunctionType.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeFunctionType extends SimpleNode { - public DynamicSerDeFunctionType(int id) { - super(id); - } - - public DynamicSerDeFunctionType(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeDefinition.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeDefinition.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeDefinition.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeDefinition.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeDefinition extends SimpleNode { - public DynamicSerDeDefinition(int id) { - super(id); - } - - public DynamicSerDeDefinition(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeXception.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeXception.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeXception.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeXception.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeXception extends SimpleNode { - public DynamicSerDeXception(int id) { - super(id); - } - - public DynamicSerDeXception(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeNamespace.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeNamespace.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeNamespace.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeNamespace.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeNamespace extends SimpleNode { - public DynamicSerDeNamespace(int id) { - super(id); - } - - public DynamicSerDeNamespace(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeStart.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeStart.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeStart.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeStart.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeStart extends SimpleNode { - public DynamicSerDeStart(int id) { - super(id); - } - - public DynamicSerDeStart(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/Node.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/Node.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/Node.java (working copy) @@ -1,52 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. Node.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -/* All AST nodes must implement this interface. It provides basic - machinery for constructing the parent and child relationships - between nodes. */ - -public interface Node { - - /** This method is called after the node has been made the current - node. It indicates that child nodes can now be added to it. */ - public void jjtOpen(); - - /** This method is called after all the child nodes have been - added. */ - public void jjtClose(); - - /** This pair of methods are used to inform the node of its - parent. */ - public void jjtSetParent(Node n); - public Node jjtGetParent(); - - /** This method tells the node to add its argument to the node's - list of children. */ - public void jjtAddChild(Node n, int i); - - /** This method returns a child node. The children are numbered - from zero, left to right. */ - public Node jjtGetChild(int i); - - /** Return the number of children the node has. */ - public int jjtGetNumChildren(); -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammar.jj =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammar.jj (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammar.jj (working copy) @@ -1,2346 +0,0 @@ -/*@bgen(jjtree) Generated By:JJTree: Do not edit this line. /home/pwyckoff/projects/hadoop/trunk/src/com/facebook/serde/build/thrift_grammar.jj */ -/*@egen*/options { - STATIC = false; -} - - -PARSER_BEGIN(thrift_grammar) - -package com.facebook.serde.dynamic_type; - -import java.util.*; -import java.io.*; -import java.net.*; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.transport.*; -import com.facebook.serde.thrift.*; -import com.facebook.serde.*; - -public class thrift_grammar/*@bgen(jjtree)*/implements thrift_grammarTreeConstants/*@egen*/ {/*@bgen(jjtree)*/ - protected JJTthrift_grammarState jjtree = new JJTthrift_grammarState(); - -/*@egen*/ - - private List include_path = null; - - // for computing the autogenerated field ids in thrift - private int field_val; - - // store types and tables - // separately because one cannot use a table (ie service.method) as a Struct like type. - protected Map types; - protected Map tables; - - // system include path - final private static String default_include_path[] = { "/usr/local/include","/usr/include","/usr/local/include/thrift/if","/usr/local/include/fb303/if" }; - - // need three params to differentiate between this and 2 param method auto generated since - // some calls in the autogenerated code use null param for 2nd param and thus ambiguous. - protected thrift_grammar(InputStream is, List include_path, boolean junk) { - this(is,null); - this.types = new HashMap () ; - this.tables = new HashMap () ; - this.include_path = include_path; - this.field_val = -1; - } - - // find the file on the include path - private static File findFile(String fname, List include_path) { - for(String path: include_path) { - final String full = path + "/" + fname; - File f = new File(full); - if(f.exists()) { - return f; - } - } - return null; - } - - public static void main(String args[]) { - String filename = null; - List include_path = new ArrayList(); - - for(String path: default_include_path) { - include_path.add(path); - } - for(int i = 0; i < args.length; i++) { - String arg = args[i]; - if(arg.equals("--include") && i + 1 < args.length) { - include_path.add(args[++i]); - } - if(arg.equals("--file") && i + 1 < args.length) { - filename = args[++i]; - } - } - - InputStream is = System.in; - if(filename != null) { - try { - is = new FileInputStream(findFile(filename, include_path)); - } catch(IOException e) { - } - } - thrift_grammar t = new thrift_grammar(is,include_path,false); - - try { - t.Start(); - } catch (Exception e) { - System.out.println("Parse error."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } -} - -PARSER_END(thrift_grammar) - - - -SKIP : -{ - " " -| "\t" -| "\n" -| "\r" -| <"#"(~["\n"])* ("\n"|"\r"|"\r\n")> -| <"//" (~["\n","\r"])* ("\n"|"\r"|"\r\n")> -| <"/*" (~["*"])* "*" (~["/"] (~["*"])* "*")* "/"> -} - - -/** - * HELPER DEFINITIONS, COMMENTS, CONSTANTS, AND WHATNOT - */ - -TOKEN: -{ -| - | -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| - | - | - | - | - | - | - | - | - | - | - | - | - -} - -TOKEN: { - - -| -)*"."()+(["e","E"](["+","-"])?()+)?> -| -(||"."|"_")*> -| -<#LETTER: (["a"-"z", "A"-"Z" ]) > -| -<#DIGIT: ["0"-"9"] > -| - -| - -} - - -SimpleNode Start() : {/*@bgen(jjtree) Start */ - DynamicSerDeStart jjtn000 = new DynamicSerDeStart(JJTSTART); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Start */ - try { -/*@egen*/ - HeaderList() (Definition())+/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode HeaderList() : {/*@bgen(jjtree) HeaderList */ - DynamicSerDeHeaderList jjtn000 = new DynamicSerDeHeaderList(JJTHEADERLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) HeaderList */ - try { -/*@egen*/ - (Header())*/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ - -} - -SimpleNode Header() : {/*@bgen(jjtree) Header */ - DynamicSerDeHeader jjtn000 = new DynamicSerDeHeader(JJTHEADER); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Header */ - try { -/*@egen*/ - Include()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Namespace()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode Namespace() : {/*@bgen(jjtree) Namespace */ - DynamicSerDeNamespace jjtn000 = new DynamicSerDeNamespace(JJTNAMESPACE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Namespace */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -}/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -SimpleNode Include() : {/*@bgen(jjtree) Include */ - DynamicSerDeInclude jjtn000 = new DynamicSerDeInclude(JJTINCLUDE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ - String fname; - boolean found = false; -} -{/*@bgen(jjtree) Include */ - try { -/*@egen*/ - - fname=.image/*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - // bugbug somewhat fragile below substring expression - fname = fname.substring(1,fname.length() - 1); - - // try to find the file on the include path - File f = thrift_grammar.findFile(fname, this.include_path); - if(f != null) { - found = true; - try { - FileInputStream fis = new FileInputStream(f); - thrift_grammar t = new thrift_grammar(fis,this.include_path, false); - t.Start(); - fis.close(); - found = true; - // add in what we found to our type and table tables. - this.tables.putAll(t.tables); - this.types.putAll(t.types); - } catch (Exception e) { - System.out.println("File: " + fname + " - Oops."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } - if(!found) { - throw new RuntimeException("include file not found: " + fname); - } - return jjtn000; -}/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -SimpleNode Definition() : {/*@bgen(jjtree) Definition */ - DynamicSerDeDefinition jjtn000 = new DynamicSerDeDefinition(JJTDEFINITION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Definition */ - try { -/*@egen*/ - Const()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Service()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeDefinition()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode TypeDefinition() : {/*@bgen(jjtree) TypeDefinition */ - DynamicSerDeTypeDefinition jjtn000 = new DynamicSerDeTypeDefinition(JJTTYPEDEFINITION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeDefinition */ - try { -/*@egen*/ - Typedef()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Enum()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Senum()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Struct()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Xception()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ - -} - -DynamicSerDeTypedef Typedef() : {/*@bgen(jjtree) Typedef */ - DynamicSerDeTypedef jjtn000 = new DynamicSerDeTypedef(JJTTYPEDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Typedef */ - try { -/*@egen*/ - - DefinitionType() - jjtn000.name = .image/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - // store the type for later retrieval - this.types.put(jjtn000.name, jjtn000); - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -// returning void because we ignore this production. -void CommaOrSemicolon() : {/*@bgen(jjtree) CommaOrSemicolon */ - DynamicSerDeCommaOrSemicolon jjtn000 = new DynamicSerDeCommaOrSemicolon(JJTCOMMAORSEMICOLON); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) CommaOrSemicolon */ - try { -/*@egen*/ - "," -| - ";"/*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ -}/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode Enum() : {/*@bgen(jjtree) Enum */ - DynamicSerDeEnum jjtn000 = new DynamicSerDeEnum(JJTENUM); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Enum */ - try { -/*@egen*/ - "{" EnumDefList() "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode EnumDefList() : {/*@bgen(jjtree) EnumDefList */ - DynamicSerDeEnumDefList jjtn000 = new DynamicSerDeEnumDefList(JJTENUMDEFLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) EnumDefList */ - try { -/*@egen*/ - (EnumDef())+/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode EnumDef() : {/*@bgen(jjtree) EnumDef */ - DynamicSerDeEnumDef jjtn000 = new DynamicSerDeEnumDef(JJTENUMDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) EnumDef */ - try { -/*@egen*/ - ["=" ] [CommaOrSemicolon()]/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode Senum() : {/*@bgen(jjtree) Senum */ - DynamicSerDeSenum jjtn000 = new DynamicSerDeSenum(JJTSENUM); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Senum */ - try { -/*@egen*/ - "{" SenumDefList() "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode SenumDefList() : {/*@bgen(jjtree) SenumDefList */ - DynamicSerDeSenumDefList jjtn000 = new DynamicSerDeSenumDefList(JJTSENUMDEFLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) SenumDefList */ - try { -/*@egen*/ - (SenumDef())+/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode SenumDef() : {/*@bgen(jjtree) SenumDef */ - DynamicSerDeSenumDef jjtn000 = new DynamicSerDeSenumDef(JJTSENUMDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) SenumDef */ - try { -/*@egen*/ - [CommaOrSemicolon()]/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -SimpleNode Const() : {/*@bgen(jjtree) Const */ - DynamicSerDeConst jjtn000 = new DynamicSerDeConst(JJTCONST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Const */ - try { -/*@egen*/ - FieldType() "=" ConstValue() [CommaOrSemicolon()]/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode ConstValue() : {/*@bgen(jjtree) ConstValue */ - DynamicSerDeConstValue jjtn000 = new DynamicSerDeConstValue(JJTCONSTVALUE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) ConstValue */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -| /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -| /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -| /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -| ConstList()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -| ConstMap()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode ConstList() : {/*@bgen(jjtree) ConstList */ - DynamicSerDeConstList jjtn000 = new DynamicSerDeConstList(JJTCONSTLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) ConstList */ - try { -/*@egen*/ - "[" ConstListContents() "]"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode ConstListContents() : {/*@bgen(jjtree) ConstListContents */ - DynamicSerDeConstListContents jjtn000 = new DynamicSerDeConstListContents(JJTCONSTLISTCONTENTS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) ConstListContents */ - try { -/*@egen*/ - (ConstValue() [CommaOrSemicolon()])+/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode ConstMap() : {/*@bgen(jjtree) ConstMap */ - DynamicSerDeConstMap jjtn000 = new DynamicSerDeConstMap(JJTCONSTMAP); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) ConstMap */ - try { -/*@egen*/ - "{" ConstMapContents() "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode ConstMapContents() : {/*@bgen(jjtree) ConstMapContents */ - DynamicSerDeConstMapContents jjtn000 = new DynamicSerDeConstMapContents(JJTCONSTMAPCONTENTS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) ConstMapContents */ - try { -/*@egen*/ - (ConstValue() ":" ConstValue() [CommaOrSemicolon()])+/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -|/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeStruct Struct() : {/*@bgen(jjtree) Struct */ - DynamicSerDeStruct jjtn000 = new DynamicSerDeStruct(JJTSTRUCT); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ - -} -{/*@bgen(jjtree) Struct */ - try { -/*@egen*/ - - jjtn000.name = .image - "{" - FieldList() - "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - this.types.put(jjtn000.name,jjtn000); - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -SimpleNode Xception() : {/*@bgen(jjtree) Xception */ - DynamicSerDeXception jjtn000 = new DynamicSerDeXception(JJTXCEPTION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Xception */ - try { -/*@egen*/ - "{" FieldList() "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -SimpleNode Service() : {/*@bgen(jjtree) Service */ - DynamicSerDeService jjtn000 = new DynamicSerDeService(JJTSERVICE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Service */ - try { -/*@egen*/ - - - Extends() - "{" - FlagArgs() - (Function())+ - UnflagArgs() - "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - // at some point, these should be inserted as a "db" - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode FlagArgs() : {/*@bgen(jjtree) FlagArgs */ - DynamicSerDeFlagArgs jjtn000 = new DynamicSerDeFlagArgs(JJTFLAGARGS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) FlagArgs */ - try { -/*@egen*//*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode UnflagArgs() : {/*@bgen(jjtree) UnflagArgs */ - DynamicSerDeUnflagArgs jjtn000 = new DynamicSerDeUnflagArgs(JJTUNFLAGARGS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) UnflagArgs */ - try { -/*@egen*//*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode Extends() : {/*@bgen(jjtree) Extends */ - DynamicSerDeExtends jjtn000 = new DynamicSerDeExtends(JJTEXTENDS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Extends */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -|/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -DynamicSerDeFunction Function() : {/*@bgen(jjtree) Function */ - DynamicSerDeFunction jjtn000 = new DynamicSerDeFunction(JJTFUNCTION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Function */ - try { -/*@egen*/ - // metastore ignores async and type - Async() - FunctionType() - - // the name of the function/table - jjtn000.name = .image - "(" - FieldList() - ")" - Throws() - [CommaOrSemicolon()]/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - - { - this.tables.put(jjtn000.name, jjtn000); - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -void Async() : {/*@bgen(jjtree) Async */ - DynamicSerDeAsync jjtn000 = new DynamicSerDeAsync(JJTASYNC); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Async */ - try { -/*@egen*/ - -|/*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{}/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -void Throws() : {/*@bgen(jjtree) Throws */ - DynamicSerDeThrows jjtn000 = new DynamicSerDeThrows(JJTTHROWS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Throws */ - try { -/*@egen*/ - "(" FieldList() ")" -|/*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{}/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -// nothing special - just use the DynamicSerDeFieldList's children methods to access the fields -DynamicSerDeFieldList FieldList() : {/*@bgen(jjtree) FieldList */ - DynamicSerDeFieldList jjtn000 = new DynamicSerDeFieldList(JJTFIELDLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ - this.field_val = -1; -} -{/*@bgen(jjtree) FieldList */ - try { -/*@egen*/ - (Field())*/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -DynamicSerDeField Field() : {/*@bgen(jjtree) Field */ - DynamicSerDeField jjtn000 = new DynamicSerDeField(JJTFIELD); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ - - String fidnum = ""; - String fid; -} -{/*@bgen(jjtree) Field */ - try { -/*@egen*/ - - // parse the field id which is optional - [fidnum=.image ":"] - - // is this field required or optional? default is optional - FieldRequiredness() - - // field type - obviously not optional - FieldType() - - // the name of the field - not optional - jjtn000.name = .image - - // does it have = some value? - FieldValue() - - // take it or leave it - [CommaOrSemicolon()]/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - - { - if(fidnum.length() > 0) { - int fidInt = Integer.valueOf(fidnum); - jjtn000.fieldid = fidInt; - } else { - jjtn000.fieldid = this.field_val--; - } - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - - -SimpleNode FieldRequiredness() : {/*@bgen(jjtree) FieldRequiredness */ - DynamicSerDeFieldRequiredness jjtn000 = new DynamicSerDeFieldRequiredness(JJTFIELDREQUIREDNESS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) FieldRequiredness */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -|/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode FieldValue() : {/*@bgen(jjtree) FieldValue */ - DynamicSerDeFieldValue jjtn000 = new DynamicSerDeFieldValue(JJTFIELDVALUE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) FieldValue */ - try { -/*@egen*/ - "=" - ConstValue()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -|/*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -}/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode DefinitionType() : {/*@bgen(jjtree) DefinitionType */ - DynamicSerDeDefinitionType jjtn000 = new DynamicSerDeDefinitionType(JJTDEFINITIONTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) DefinitionType */ - try { -/*@egen*/ -// BaseType() xxx - TypeString()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeBool()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei16()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei32()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei64()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeDouble()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeMap()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeSet()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeList()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -void FunctionType() : {/*@bgen(jjtree) FunctionType */ - DynamicSerDeFunctionType jjtn000 = new DynamicSerDeFunctionType(JJTFUNCTIONTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) FunctionType */ - try { -/*@egen*/ - FieldType() -| /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{}/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeFieldType FieldType() : {/*@bgen(jjtree) FieldType */ - DynamicSerDeFieldType jjtn000 = new DynamicSerDeFieldType(JJTFIELDTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ -} - -{/*@bgen(jjtree) FieldType */ - try { -/*@egen*/ - TypeString()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeBool()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei16()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei32()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei64()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeDouble()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| - TypeMap()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| - TypeSet()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| - TypeList()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| - jjtn000.name = .image/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeString TypeString() : {/*@bgen(jjtree) TypeString */ - DynamicSerDeTypeString jjtn000 = new DynamicSerDeTypeString(JJTTYPESTRING); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeString */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeByte TypeByte() : {/*@bgen(jjtree) TypeByte */ - DynamicSerDeTypeByte jjtn000 = new DynamicSerDeTypeByte(JJTTYPEBYTE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ -} -{/*@bgen(jjtree) TypeByte */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypei16 Typei16() : {/*@bgen(jjtree) Typei16 */ - DynamicSerDeTypei16 jjtn000 = new DynamicSerDeTypei16(JJTTYPEI16); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ -} -{/*@bgen(jjtree) Typei16 */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypei32 Typei32() : {/*@bgen(jjtree) Typei32 */ - DynamicSerDeTypei32 jjtn000 = new DynamicSerDeTypei32(JJTTYPEI32); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Typei32 */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypei64 Typei64() : {/*@bgen(jjtree) Typei64 */ - DynamicSerDeTypei64 jjtn000 = new DynamicSerDeTypei64(JJTTYPEI64); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Typei64 */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeDouble TypeDouble() : {/*@bgen(jjtree) TypeDouble */ - DynamicSerDeTypeDouble jjtn000 = new DynamicSerDeTypeDouble(JJTTYPEDOUBLE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeDouble */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeBool TypeBool() : {/*@bgen(jjtree) TypeBool */ - DynamicSerDeTypeBool jjtn000 = new DynamicSerDeTypeBool(JJTTYPEBOOL); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeBool */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeMap TypeMap() : {/*@bgen(jjtree) TypeMap */ - DynamicSerDeTypeMap jjtn000 = new DynamicSerDeTypeMap(JJTTYPEMAP); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeMap */ - try { -/*@egen*/ - - "<" - FieldType() - "," - FieldType() - ">"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeSet TypeSet() : {/*@bgen(jjtree) TypeSet */ - DynamicSerDeTypeSet jjtn000 = new DynamicSerDeTypeSet(JJTTYPESET); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeSet */ - try { -/*@egen*/ - - "<" - - FieldType() - - ">"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeList TypeList() : {/*@bgen(jjtree) TypeList */ - DynamicSerDeTypeList jjtn000 = new DynamicSerDeTypeList(JJTTYPELIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeList */ - try { -/*@egen*/ - - "<" - - FieldType() - - ">"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/ParseException.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/ParseException.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/ParseException.java (working copy) @@ -1,210 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */ -package org.apache.hadoop.hive.serde.dynamic_type; - -/** - * This exception is thrown when parse errors are encountered. - * You can explicitly create objects of this exception type by - * calling the method generateParseException in the generated - * parser. - * - * You can modify this class to customize your error reporting - * mechanisms so long as you retain the public fields. - */ -public class ParseException extends Exception { - - /** - * This constructor is used by the method "generateParseException" - * in the generated parser. Calling this constructor generates - * a new object of this type with the fields "currentToken", - * "expectedTokenSequences", and "tokenImage" set. The boolean - * flag "specialConstructor" is also set to true to indicate that - * this constructor was used to create this object. - * This constructor calls its super class with the empty string - * to force the "toString" method of parent class "Throwable" to - * print the error message in the form: - * ParseException: - */ - public ParseException(Token currentTokenVal, - int[][] expectedTokenSequencesVal, - String[] tokenImageVal - ) - { - super(""); - specialConstructor = true; - currentToken = currentTokenVal; - expectedTokenSequences = expectedTokenSequencesVal; - tokenImage = tokenImageVal; - } - - /** - * The following constructors are for use by you for whatever - * purpose you can think of. Constructing the exception in this - * manner makes the exception behave in the normal way - i.e., as - * documented in the class "Throwable". The fields "errorToken", - * "expectedTokenSequences", and "tokenImage" do not contain - * relevant information. The JavaCC generated code does not use - * these constructors. - */ - - public ParseException() { - super(); - specialConstructor = false; - } - - public ParseException(String message) { - super(message); - specialConstructor = false; - } - - /** - * This variable determines which constructor was used to create - * this object and thereby affects the semantics of the - * "getMessage" method (see below). - */ - protected boolean specialConstructor; - - /** - * This is the last token that has been consumed successfully. If - * this object has been created due to a parse error, the token - * followng this token will (therefore) be the first error token. - */ - public Token currentToken; - - /** - * Each entry in this array is an array of integers. Each array - * of integers represents a sequence of tokens (by their ordinal - * values) that is expected at this point of the parse. - */ - public int[][] expectedTokenSequences; - - /** - * This is a reference to the "tokenImage" array of the generated - * parser within which the parse error occurred. This array is - * defined in the generated ...Constants interface. - */ - public String[] tokenImage; - - /** - * This method has the standard behavior when this object has been - * created using the standard constructors. Otherwise, it uses - * "currentToken" and "expectedTokenSequences" to generate a parse - * error message and returns it. If this object has been created - * due to a parse error, and you do not catch it (it gets thrown - * from the parser), then this method is called during the printing - * of the final stack trace, and hence the correct error message - * gets displayed. - */ - public String getMessage() { - if (!specialConstructor) { - return super.getMessage(); - } - StringBuffer expected = new StringBuffer(); - int maxSize = 0; - for (int i = 0; i < expectedTokenSequences.length; i++) { - if (maxSize < expectedTokenSequences[i].length) { - maxSize = expectedTokenSequences[i].length; - } - for (int j = 0; j < expectedTokenSequences[i].length; j++) { - expected.append(tokenImage[expectedTokenSequences[i][j]]).append(" "); - } - if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) { - expected.append("..."); - } - expected.append(eol).append(" "); - } - String retval = "Encountered \""; - Token tok = currentToken.next; - for (int i = 0; i < maxSize; i++) { - if (i != 0) retval += " "; - if (tok.kind == 0) { - retval += tokenImage[0]; - break; - } - retval += add_escapes(tok.image); - tok = tok.next; - } - retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn; - retval += "." + eol; - if (expectedTokenSequences.length == 1) { - retval += "Was expecting:" + eol + " "; - } else { - retval += "Was expecting one of:" + eol + " "; - } - retval += expected.toString(); - return retval; - } - - /** - * The end of line string for this machine. - */ - protected String eol = System.getProperty("line.separator", "\n"); - - /** - * Used to convert raw characters to their escaped version - * when these raw version cannot be used as part of an ASCII - * string literal. - */ - protected String add_escapes(String str) { - StringBuffer retval = new StringBuffer(); - char ch; - for (int i = 0; i < str.length(); i++) { - switch (str.charAt(i)) - { - case 0 : - continue; - case '\b': - retval.append("\\b"); - continue; - case '\t': - retval.append("\\t"); - continue; - case '\n': - retval.append("\\n"); - continue; - case '\f': - retval.append("\\f"); - continue; - case '\r': - retval.append("\\r"); - continue; - case '\"': - retval.append("\\\""); - continue; - case '\'': - retval.append("\\\'"); - continue; - case '\\': - retval.append("\\\\"); - continue; - default: - if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { - String s = "0000" + Integer.toString(ch, 16); - retval.append("\\u" + s.substring(s.length() - 4, s.length())); - } else { - retval.append(ch); - } - continue; - } - } - return retval.toString(); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConstMapContents.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConstMapContents.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConstMapContents.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstMapContents.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeConstMapContents extends SimpleNode { - public DynamicSerDeConstMapContents(int id) { - super(id); - } - - public DynamicSerDeConstMapContents(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSenumDefList.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSenumDefList.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSenumDefList.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenumDefList.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeSenumDefList extends SimpleNode { - public DynamicSerDeSenumDefList(int id) { - super(id); - } - - public DynamicSerDeSenumDefList(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeCommaOrSemicolon.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeCommaOrSemicolon.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeCommaOrSemicolon.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeCommaOrSemicolon.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeCommaOrSemicolon extends SimpleNode { - public DynamicSerDeCommaOrSemicolon(int id) { - super(id); - } - - public DynamicSerDeCommaOrSemicolon(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSenumDef.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSenumDef.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeSenumDef.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenumDef.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeSenumDef extends SimpleNode { - public DynamicSerDeSenumDef(int id) { - super(id); - } - - public DynamicSerDeSenumDef(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/TokenMgrError.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/TokenMgrError.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/TokenMgrError.java (working copy) @@ -1,151 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 3.0 */ -package org.apache.hadoop.hive.serde.dynamic_type; - -public class TokenMgrError extends Error -{ - /* - * Ordinals for various reasons why an Error of this type can be thrown. - */ - - /** - * Lexical error occured. - */ - static final int LEXICAL_ERROR = 0; - - /** - * An attempt wass made to create a second instance of a static token manager. - */ - static final int STATIC_LEXER_ERROR = 1; - - /** - * Tried to change to an invalid lexical state. - */ - static final int INVALID_LEXICAL_STATE = 2; - - /** - * Detected (and bailed out of) an infinite loop in the token manager. - */ - static final int LOOP_DETECTED = 3; - - /** - * Indicates the reason why the exception is thrown. It will have - * one of the above 4 values. - */ - int errorCode; - - /** - * Replaces unprintable characters by their espaced (or unicode escaped) - * equivalents in the given string - */ - protected static final String addEscapes(String str) { - StringBuffer retval = new StringBuffer(); - char ch; - for (int i = 0; i < str.length(); i++) { - switch (str.charAt(i)) - { - case 0 : - continue; - case '\b': - retval.append("\\b"); - continue; - case '\t': - retval.append("\\t"); - continue; - case '\n': - retval.append("\\n"); - continue; - case '\f': - retval.append("\\f"); - continue; - case '\r': - retval.append("\\r"); - continue; - case '\"': - retval.append("\\\""); - continue; - case '\'': - retval.append("\\\'"); - continue; - case '\\': - retval.append("\\\\"); - continue; - default: - if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { - String s = "0000" + Integer.toString(ch, 16); - retval.append("\\u" + s.substring(s.length() - 4, s.length())); - } else { - retval.append(ch); - } - continue; - } - } - return retval.toString(); - } - - /** - * Returns a detailed message for the Error when it is thrown by the - * token manager to indicate a lexical error. - * Parameters : - * EOFSeen : indicates if EOF caused the lexicl error - * curLexState : lexical state in which this error occured - * errorLine : line number when the error occured - * errorColumn : column number when the error occured - * errorAfter : prefix that was seen before this error occured - * curchar : the offending character - * Note: You can customize the lexical error message by modifying this method. - */ - protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) { - return("Lexical error at line " + - errorLine + ", column " + - errorColumn + ". Encountered: " + - (EOFSeen ? " " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") + - "after : \"" + addEscapes(errorAfter) + "\""); - } - - /** - * You can also modify the body of this method to customize your error messages. - * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not - * of end-users concern, so you can return something like : - * - * "Internal Error : Please file a bug report .... " - * - * from this method for such cases in the release version of your parser. - */ - public String getMessage() { - return super.getMessage(); - } - - /* - * Constructors of various flavors follow. - */ - - public TokenMgrError() { - } - - public TokenMgrError(String message, int reason) { - super(message); - errorCode = reason; - } - - public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) { - this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason); - } -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammarTokenManager.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammarTokenManager.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammarTokenManager.java (working copy) @@ -1,1474 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammarTokenManager.java */ -package org.apache.hadoop.hive.serde.dynamic_type; -import java.util.*; -import java.io.*; -import java.net.*; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.transport.*; -import org.apache.hadoop.hive.serde.thrift.*; -import org.apache.hadoop.hive.serde.*; - -public class thrift_grammarTokenManager implements thrift_grammarConstants -{ - public java.io.PrintStream debugStream = System.out; - public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; } -private final int jjStopStringLiteralDfa_0(int pos, long active0, long active1) -{ - switch (pos) - { - case 0: - if ((active0 & 0x7ffffffffff00L) != 0L) - { - jjmatchedKind = 53; - return 35; - } - return -1; - case 1: - if ((active0 & 0x7ffffffffff00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 1; - return 35; - } - return -1; - case 2: - if ((active0 & 0x14380000000L) != 0L) - return 35; - if ((active0 & 0x7febc7fffff00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 2; - return 35; - } - return -1; - case 3: - if ((active0 & 0x1008070000000L) != 0L) - return 35; - if ((active0 & 0x6fe3c0fffff00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 3; - return 35; - } - return -1; - case 4: - if ((active0 & 0x23000000100L) != 0L) - return 35; - if ((active0 & 0x6fc0c0ffffe00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 4; - return 35; - } - return -1; - case 5: - if ((active0 & 0x480c00000000L) != 0L) - return 35; - if ((active0 & 0x6b4000ffffe00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 5; - return 35; - } - return -1; - case 6: - if ((active0 & 0xa40008400000L) != 0L) - return 35; - if ((active0 & 0x6100007bffe00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 6; - return 35; - } - return -1; - case 7: - if ((active0 & 0x6000000001000L) != 0L) - return 35; - if ((active0 & 0x100007bfee00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 7; - return 35; - } - return -1; - case 8: - if ((active0 & 0x3bdec00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 8; - return 35; - } - if ((active0 & 0x100004020200L) != 0L) - return 35; - return -1; - case 9: - if ((active0 & 0x3bdec00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 9; - return 35; - } - return -1; - case 10: - if ((active0 & 0x800L) != 0L) - return 35; - if ((active0 & 0x3bde400L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 10; - return 35; - } - return -1; - case 11: - if ((active0 & 0x1846000L) != 0L) - return 35; - if ((active0 & 0x2398400L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 11; - return 35; - } - return -1; - case 12: - if ((active0 & 0x2010400L) != 0L) - return 35; - if ((active0 & 0x388000L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 12; - return 35; - } - return -1; - case 13: - if ((active0 & 0x80000L) != 0L) - return 35; - if ((active0 & 0x308000L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 13; - return 35; - } - return -1; - case 14: - if ((active0 & 0x308000L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 14; - return 35; - } - return -1; - case 15: - if ((active0 & 0x208000L) != 0L) - return 35; - if ((active0 & 0x100000L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 15; - return 35; - } - return -1; - case 16: - if ((active0 & 0x100000L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 16; - return 35; - } - return -1; - default : - return -1; - } -} -private final int jjStartNfa_0(int pos, long active0, long active1) -{ - return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0, active1), pos + 1); -} -private final int jjStopAtPos(int pos, int kind) -{ - jjmatchedKind = kind; - jjmatchedPos = pos; - return pos + 1; -} -private final int jjStartNfaWithStates_0(int pos, int kind, int state) -{ - jjmatchedKind = kind; - jjmatchedPos = pos; - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { return pos + 1; } - return jjMoveNfa_0(state, pos + 1); -} -private final int jjMoveStringLiteralDfa0_0() -{ - switch(curChar) - { - case 40: - return jjStopAtPos(0, 66); - case 41: - return jjStopAtPos(0, 67); - case 44: - return jjStopAtPos(0, 58); - case 58: - return jjStopAtPos(0, 65); - case 59: - return jjStopAtPos(0, 59); - case 60: - return jjStopAtPos(0, 68); - case 61: - return jjStopAtPos(0, 62); - case 62: - return jjStopAtPos(0, 69); - case 91: - return jjStopAtPos(0, 63); - case 93: - return jjStopAtPos(0, 64); - case 97: - return jjMoveStringLiteralDfa1_0(0x20000000000L); - case 98: - return jjMoveStringLiteralDfa1_0(0x60000000L); - case 99: - return jjMoveStringLiteralDfa1_0(0xdd00L); - case 100: - return jjMoveStringLiteralDfa1_0(0x400000000L); - case 101: - return jjMoveStringLiteralDfa1_0(0x1300000000000L); - case 105: - return jjMoveStringLiteralDfa1_0(0x388000000L); - case 106: - return jjMoveStringLiteralDfa1_0(0x2000L); - case 108: - return jjMoveStringLiteralDfa1_0(0x8000000000L); - case 109: - return jjMoveStringLiteralDfa1_0(0x4000000000L); - case 110: - return jjMoveStringLiteralDfa1_0(0x200L); - case 111: - return jjMoveStringLiteralDfa1_0(0x4000000000000L); - case 112: - return jjMoveStringLiteralDfa1_0(0x70000L); - case 114: - return jjMoveStringLiteralDfa1_0(0x2000000080000L); - case 115: - return jjMoveStringLiteralDfa1_0(0x893800300000L); - case 116: - return jjMoveStringLiteralDfa1_0(0x440000000000L); - case 118: - return jjMoveStringLiteralDfa1_0(0x10000000L); - case 120: - return jjMoveStringLiteralDfa1_0(0x7c00000L); - case 123: - return jjStopAtPos(0, 60); - case 125: - return jjStopAtPos(0, 61); - default : - return jjMoveNfa_0(0, 0); - } -} -private final int jjMoveStringLiteralDfa1_0(long active0) -{ - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(0, active0, 0L); - return 1; - } - switch(curChar) - { - case 49: - return jjMoveStringLiteralDfa2_0(active0, 0x80000000L); - case 51: - return jjMoveStringLiteralDfa2_0(active0, 0x100000000L); - case 54: - return jjMoveStringLiteralDfa2_0(active0, 0x200000000L); - case 97: - return jjMoveStringLiteralDfa2_0(active0, 0x4000002200L); - case 101: - return jjMoveStringLiteralDfa2_0(active0, 0x2812000040000L); - case 104: - return jjMoveStringLiteralDfa2_0(active0, 0x400000010000L); - case 105: - return jjMoveStringLiteralDfa2_0(active0, 0x8000000000L); - case 108: - return jjMoveStringLiteralDfa2_0(active0, 0x1000000000L); - case 109: - return jjMoveStringLiteralDfa2_0(active0, 0x300000L); - case 110: - return jjMoveStringLiteralDfa2_0(active0, 0x1000008000000L); - case 111: - return jjMoveStringLiteralDfa2_0(active0, 0x430004100L); - case 112: - return jjMoveStringLiteralDfa2_0(active0, 0x4000000001c00L); - case 115: - return jjMoveStringLiteralDfa2_0(active0, 0x20007c08000L); - case 116: - return jjMoveStringLiteralDfa2_0(active0, 0x80800000000L); - case 117: - return jjMoveStringLiteralDfa2_0(active0, 0x80000L); - case 120: - return jjMoveStringLiteralDfa2_0(active0, 0x300000000000L); - case 121: - return jjMoveStringLiteralDfa2_0(active0, 0x40040020000L); - default : - break; - } - return jjStartNfa_0(0, active0, 0L); -} -private final int jjMoveStringLiteralDfa2_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(0, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(1, active0, 0L); - return 2; - } - switch(curChar) - { - case 50: - if ((active0 & 0x100000000L) != 0L) - return jjStartNfaWithStates_0(2, 32, 35); - break; - case 52: - if ((active0 & 0x200000000L) != 0L) - return jjStartNfaWithStates_0(2, 33, 35); - break; - case 54: - if ((active0 & 0x80000000L) != 0L) - return jjStartNfaWithStates_0(2, 31, 35); - break; - case 95: - return jjMoveStringLiteralDfa3_0(active0, 0x20000L); - case 97: - return jjMoveStringLiteralDfa3_0(active0, 0x300000L); - case 98: - return jjMoveStringLiteralDfa3_0(active0, 0x80000L); - case 99: - return jjMoveStringLiteralDfa3_0(active0, 0x100008004000L); - case 100: - return jjMoveStringLiteralDfa3_0(active0, 0x7c00000L); - case 104: - return jjMoveStringLiteralDfa3_0(active0, 0x8000L); - case 105: - return jjMoveStringLiteralDfa3_0(active0, 0x1010000000L); - case 109: - return jjMoveStringLiteralDfa3_0(active0, 0x200L); - case 110: - return jjMoveStringLiteralDfa3_0(active0, 0x2000000100L); - case 111: - return jjMoveStringLiteralDfa3_0(active0, 0x20000000L); - case 112: - if ((active0 & 0x4000000000L) != 0L) - return jjStartNfaWithStates_0(2, 38, 35); - return jjMoveStringLiteralDfa3_0(active0, 0x40000011c00L); - case 113: - return jjMoveStringLiteralDfa3_0(active0, 0x2000000000000L); - case 114: - return jjMoveStringLiteralDfa3_0(active0, 0xc80800040000L); - case 115: - return jjMoveStringLiteralDfa3_0(active0, 0x8000000000L); - case 116: - if ((active0 & 0x10000000000L) != 0L) - return jjStartNfaWithStates_0(2, 40, 35); - return jjMoveStringLiteralDfa3_0(active0, 0x4200040000000L); - case 117: - return jjMoveStringLiteralDfa3_0(active0, 0x1000400000000L); - case 118: - return jjMoveStringLiteralDfa3_0(active0, 0x2000L); - case 121: - return jjMoveStringLiteralDfa3_0(active0, 0x20000000000L); - default : - break; - } - return jjStartNfa_0(1, active0, 0L); -} -private final int jjMoveStringLiteralDfa3_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(1, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(2, active0, 0L); - return 3; - } - switch(curChar) - { - case 95: - return jjMoveStringLiteralDfa4_0(active0, 0x7c11c00L); - case 97: - return jjMoveStringLiteralDfa4_0(active0, 0xa000L); - case 98: - return jjMoveStringLiteralDfa4_0(active0, 0x400000000L); - case 100: - if ((active0 & 0x10000000L) != 0L) - return jjStartNfaWithStates_0(3, 28, 35); - break; - case 101: - if ((active0 & 0x40000000L) != 0L) - return jjStartNfaWithStates_0(3, 30, 35); - return jjMoveStringLiteralDfa4_0(active0, 0x340000000200L); - case 105: - return jjMoveStringLiteralDfa4_0(active0, 0x4000800000000L); - case 108: - if ((active0 & 0x20000000L) != 0L) - return jjStartNfaWithStates_0(3, 29, 35); - return jjMoveStringLiteralDfa4_0(active0, 0x8340000L); - case 109: - if ((active0 & 0x1000000000000L) != 0L) - return jjStartNfaWithStates_0(3, 48, 35); - return jjMoveStringLiteralDfa4_0(active0, 0x20000L); - case 110: - return jjMoveStringLiteralDfa4_0(active0, 0x20000000000L); - case 111: - return jjMoveStringLiteralDfa4_0(active0, 0x400000004000L); - case 115: - return jjMoveStringLiteralDfa4_0(active0, 0x1000000100L); - case 116: - if ((active0 & 0x8000000000L) != 0L) - return jjStartNfaWithStates_0(3, 39, 35); - break; - case 117: - return jjMoveStringLiteralDfa4_0(active0, 0x2082000000000L); - case 118: - return jjMoveStringLiteralDfa4_0(active0, 0x800000000000L); - case 121: - return jjMoveStringLiteralDfa4_0(active0, 0x80000L); - default : - break; - } - return jjStartNfa_0(2, active0, 0L); -} -private final int jjMoveStringLiteralDfa4_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(2, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(3, active0, 0L); - return 4; - } - switch(curChar) - { - case 95: - return jjMoveStringLiteralDfa5_0(active0, 0xc2000L); - case 97: - return jjMoveStringLiteralDfa5_0(active0, 0x4404000L); - case 99: - if ((active0 & 0x20000000000L) != 0L) - return jjStartNfaWithStates_0(4, 41, 35); - return jjMoveStringLiteralDfa5_0(active0, 0x80000000000L); - case 100: - return jjMoveStringLiteralDfa5_0(active0, 0x40000000000L); - case 105: - return jjMoveStringLiteralDfa5_0(active0, 0x2800000000800L); - case 108: - return jjMoveStringLiteralDfa5_0(active0, 0x400300000L); - case 109: - if ((active0 & 0x2000000000L) != 0L) - return jjStartNfaWithStates_0(4, 37, 35); - break; - case 110: - return jjMoveStringLiteralDfa5_0(active0, 0x200803010400L); - case 111: - return jjMoveStringLiteralDfa5_0(active0, 0x4000000820000L); - case 112: - return jjMoveStringLiteralDfa5_0(active0, 0x100000000000L); - case 114: - return jjMoveStringLiteralDfa5_0(active0, 0x8000L); - case 115: - return jjMoveStringLiteralDfa5_0(active0, 0x200L); - case 116: - if ((active0 & 0x100L) != 0L) - return jjStartNfaWithStates_0(4, 8, 35); - else if ((active0 & 0x1000000000L) != 0L) - return jjStartNfaWithStates_0(4, 36, 35); - return jjMoveStringLiteralDfa5_0(active0, 0x1000L); - case 117: - return jjMoveStringLiteralDfa5_0(active0, 0x8000000L); - case 119: - return jjMoveStringLiteralDfa5_0(active0, 0x400000000000L); - default : - break; - } - return jjStartNfa_0(3, active0, 0L); -} -private final int jjMoveStringLiteralDfa5_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(3, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(4, active0, 0L); - return 5; - } - switch(curChar) - { - case 95: - return jjMoveStringLiteralDfa6_0(active0, 0x4000L); - case 97: - return jjMoveStringLiteralDfa6_0(active0, 0x2010400L); - case 99: - return jjMoveStringLiteralDfa6_0(active0, 0x800000000000L); - case 100: - return jjMoveStringLiteralDfa6_0(active0, 0x200008020000L); - case 101: - if ((active0 & 0x400000000L) != 0L) - return jjStartNfaWithStates_0(5, 34, 35); - return jjMoveStringLiteralDfa6_0(active0, 0x40000000000L); - case 103: - if ((active0 & 0x800000000L) != 0L) - return jjStartNfaWithStates_0(5, 35, 35); - break; - case 105: - return jjMoveStringLiteralDfa6_0(active0, 0x1000000L); - case 108: - return jjMoveStringLiteralDfa6_0(active0, 0x400000L); - case 110: - return jjMoveStringLiteralDfa6_0(active0, 0x4000000080800L); - case 112: - return jjMoveStringLiteralDfa6_0(active0, 0x84a200L); - case 114: - return jjMoveStringLiteralDfa6_0(active0, 0x2000000000000L); - case 115: - if ((active0 & 0x400000000000L) != 0L) - return jjStartNfaWithStates_0(5, 46, 35); - break; - case 116: - if ((active0 & 0x80000000000L) != 0L) - return jjStartNfaWithStates_0(5, 43, 35); - return jjMoveStringLiteralDfa6_0(active0, 0x100004300000L); - case 121: - return jjMoveStringLiteralDfa6_0(active0, 0x1000L); - default : - break; - } - return jjStartNfa_0(4, active0, 0L); -} -private final int jjMoveStringLiteralDfa6_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(4, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(5, active0, 0L); - return 6; - } - switch(curChar) - { - case 95: - return jjMoveStringLiteralDfa7_0(active0, 0x8000L); - case 97: - return jjMoveStringLiteralDfa7_0(active0, 0x40000003c2200L); - case 99: - return jjMoveStringLiteralDfa7_0(active0, 0x800L); - case 101: - if ((active0 & 0x8000000L) != 0L) - return jjStartNfaWithStates_0(6, 27, 35); - else if ((active0 & 0x800000000000L) != 0L) - return jjStartNfaWithStates_0(6, 47, 35); - return jjMoveStringLiteralDfa7_0(active0, 0x2000000000000L); - case 102: - if ((active0 & 0x40000000000L) != 0L) - return jjStartNfaWithStates_0(6, 42, 35); - break; - case 105: - return jjMoveStringLiteralDfa7_0(active0, 0x100000000000L); - case 108: - if ((active0 & 0x400000L) != 0L) - return jjStartNfaWithStates_0(6, 22, 35); - return jjMoveStringLiteralDfa7_0(active0, 0x1000000L); - case 109: - return jjMoveStringLiteralDfa7_0(active0, 0x2010400L); - case 112: - return jjMoveStringLiteralDfa7_0(active0, 0x5000L); - case 115: - if ((active0 & 0x200000000000L) != 0L) - return jjStartNfaWithStates_0(6, 45, 35); - break; - case 116: - return jjMoveStringLiteralDfa7_0(active0, 0x4800000L); - case 117: - return jjMoveStringLiteralDfa7_0(active0, 0x20000L); - default : - break; - } - return jjStartNfa_0(5, active0, 0L); -} -private final int jjMoveStringLiteralDfa7_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(5, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(6, active0, 0L); - return 7; - } - switch(curChar) - { - case 99: - return jjMoveStringLiteralDfa8_0(active0, 0x42200L); - case 100: - if ((active0 & 0x2000000000000L) != 0L) - return jjStartNfaWithStates_0(7, 49, 35); - break; - case 101: - if ((active0 & 0x1000L) != 0L) - return jjStartNfaWithStates_0(7, 12, 35); - return jjMoveStringLiteralDfa8_0(active0, 0x2010400L); - case 105: - return jjMoveStringLiteralDfa8_0(active0, 0x800000L); - case 108: - if ((active0 & 0x4000000000000L) != 0L) - return jjStartNfaWithStates_0(7, 50, 35); - return jjMoveStringLiteralDfa8_0(active0, 0x1320800L); - case 109: - return jjMoveStringLiteralDfa8_0(active0, 0x80000L); - case 110: - return jjMoveStringLiteralDfa8_0(active0, 0x8000L); - case 111: - return jjMoveStringLiteralDfa8_0(active0, 0x100000000000L); - case 114: - return jjMoveStringLiteralDfa8_0(active0, 0x4004000L); - default : - break; - } - return jjStartNfa_0(6, active0, 0L); -} -private final int jjMoveStringLiteralDfa8_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(6, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(7, active0, 0L); - return 8; - } - switch(curChar) - { - case 97: - return jjMoveStringLiteralDfa9_0(active0, 0x1008000L); - case 101: - if ((active0 & 0x200L) != 0L) - return jjStartNfaWithStates_0(8, 9, 35); - else if ((active0 & 0x20000L) != 0L) - return jjStartNfaWithStates_0(8, 17, 35); - return jjMoveStringLiteralDfa9_0(active0, 0x84000L); - case 107: - return jjMoveStringLiteralDfa9_0(active0, 0x342000L); - case 110: - if ((active0 & 0x100000000000L) != 0L) - return jjStartNfaWithStates_0(8, 44, 35); - break; - case 111: - return jjMoveStringLiteralDfa9_0(active0, 0x800000L); - case 115: - if ((active0 & 0x4000000L) != 0L) - return jjStartNfaWithStates_0(8, 26, 35); - return jjMoveStringLiteralDfa9_0(active0, 0x2010400L); - case 117: - return jjMoveStringLiteralDfa9_0(active0, 0x800L); - default : - break; - } - return jjStartNfa_0(7, active0, 0L); -} -private final int jjMoveStringLiteralDfa9_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(7, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(8, active0, 0L); - return 9; - } - switch(curChar) - { - case 95: - return jjMoveStringLiteralDfa10_0(active0, 0x300000L); - case 97: - return jjMoveStringLiteralDfa10_0(active0, 0x42000L); - case 98: - return jjMoveStringLiteralDfa10_0(active0, 0x1000000L); - case 100: - return jjMoveStringLiteralDfa10_0(active0, 0x800L); - case 102: - return jjMoveStringLiteralDfa10_0(active0, 0x4000L); - case 109: - return jjMoveStringLiteralDfa10_0(active0, 0x8000L); - case 110: - return jjMoveStringLiteralDfa10_0(active0, 0x800000L); - case 112: - return jjMoveStringLiteralDfa10_0(active0, 0x2010400L); - case 115: - return jjMoveStringLiteralDfa10_0(active0, 0x80000L); - default : - break; - } - return jjStartNfa_0(8, active0, 0L); -} -private final int jjMoveStringLiteralDfa10_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(8, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(9, active0, 0L); - return 10; - } - switch(curChar) - { - case 97: - return jjMoveStringLiteralDfa11_0(active0, 0x2810400L); - case 99: - return jjMoveStringLiteralDfa11_0(active0, 0x100000L); - case 101: - if ((active0 & 0x800L) != 0L) - return jjStartNfaWithStates_0(10, 11, 35); - return jjMoveStringLiteralDfa11_0(active0, 0x8000L); - case 103: - return jjMoveStringLiteralDfa11_0(active0, 0x42000L); - case 105: - return jjMoveStringLiteralDfa11_0(active0, 0x4000L); - case 108: - return jjMoveStringLiteralDfa11_0(active0, 0x1000000L); - case 112: - return jjMoveStringLiteralDfa11_0(active0, 0x280000L); - default : - break; - } - return jjStartNfa_0(9, active0, 0L); -} -private final int jjMoveStringLiteralDfa11_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(9, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(10, active0, 0L); - return 11; - } - switch(curChar) - { - case 97: - return jjMoveStringLiteralDfa12_0(active0, 0x180000L); - case 99: - return jjMoveStringLiteralDfa12_0(active0, 0x2010400L); - case 101: - if ((active0 & 0x2000L) != 0L) - return jjStartNfaWithStates_0(11, 13, 35); - else if ((active0 & 0x40000L) != 0L) - return jjStartNfaWithStates_0(11, 18, 35); - else if ((active0 & 0x1000000L) != 0L) - return jjStartNfaWithStates_0(11, 24, 35); - break; - case 108: - if ((active0 & 0x800000L) != 0L) - return jjStartNfaWithStates_0(11, 23, 35); - break; - case 114: - return jjMoveStringLiteralDfa12_0(active0, 0x200000L); - case 115: - return jjMoveStringLiteralDfa12_0(active0, 0x8000L); - case 120: - if ((active0 & 0x4000L) != 0L) - return jjStartNfaWithStates_0(11, 14, 35); - break; - default : - break; - } - return jjStartNfa_0(10, active0, 0L); -} -private final int jjMoveStringLiteralDfa12_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(10, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(11, active0, 0L); - return 12; - } - switch(curChar) - { - case 99: - return jjMoveStringLiteralDfa13_0(active0, 0x80000L); - case 101: - if ((active0 & 0x400L) != 0L) - return jjStartNfaWithStates_0(12, 10, 35); - else if ((active0 & 0x10000L) != 0L) - return jjStartNfaWithStates_0(12, 16, 35); - else if ((active0 & 0x2000000L) != 0L) - return jjStartNfaWithStates_0(12, 25, 35); - return jjMoveStringLiteralDfa13_0(active0, 0x200000L); - case 112: - return jjMoveStringLiteralDfa13_0(active0, 0x8000L); - case 116: - return jjMoveStringLiteralDfa13_0(active0, 0x100000L); - default : - break; - } - return jjStartNfa_0(11, active0, 0L); -} -private final int jjMoveStringLiteralDfa13_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(11, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(12, active0, 0L); - return 13; - } - switch(curChar) - { - case 97: - return jjMoveStringLiteralDfa14_0(active0, 0x8000L); - case 101: - if ((active0 & 0x80000L) != 0L) - return jjStartNfaWithStates_0(13, 19, 35); - return jjMoveStringLiteralDfa14_0(active0, 0x100000L); - case 102: - return jjMoveStringLiteralDfa14_0(active0, 0x200000L); - default : - break; - } - return jjStartNfa_0(12, active0, 0L); -} -private final int jjMoveStringLiteralDfa14_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(12, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(13, active0, 0L); - return 14; - } - switch(curChar) - { - case 99: - return jjMoveStringLiteralDfa15_0(active0, 0x8000L); - case 103: - return jjMoveStringLiteralDfa15_0(active0, 0x100000L); - case 105: - return jjMoveStringLiteralDfa15_0(active0, 0x200000L); - default : - break; - } - return jjStartNfa_0(13, active0, 0L); -} -private final int jjMoveStringLiteralDfa15_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(13, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(14, active0, 0L); - return 15; - } - switch(curChar) - { - case 101: - if ((active0 & 0x8000L) != 0L) - return jjStartNfaWithStates_0(15, 15, 35); - break; - case 111: - return jjMoveStringLiteralDfa16_0(active0, 0x100000L); - case 120: - if ((active0 & 0x200000L) != 0L) - return jjStartNfaWithStates_0(15, 21, 35); - break; - default : - break; - } - return jjStartNfa_0(14, active0, 0L); -} -private final int jjMoveStringLiteralDfa16_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(14, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(15, active0, 0L); - return 16; - } - switch(curChar) - { - case 114: - return jjMoveStringLiteralDfa17_0(active0, 0x100000L); - default : - break; - } - return jjStartNfa_0(15, active0, 0L); -} -private final int jjMoveStringLiteralDfa17_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(15, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(16, active0, 0L); - return 17; - } - switch(curChar) - { - case 121: - if ((active0 & 0x100000L) != 0L) - return jjStartNfaWithStates_0(17, 20, 35); - break; - default : - break; - } - return jjStartNfa_0(16, active0, 0L); -} -private final void jjCheckNAdd(int state) -{ - if (jjrounds[state] != jjround) - { - jjstateSet[jjnewStateCnt++] = state; - jjrounds[state] = jjround; - } -} -private final void jjAddStates(int start, int end) -{ - do { - jjstateSet[jjnewStateCnt++] = jjnextStates[start]; - } while (start++ != end); -} -private final void jjCheckNAddTwoStates(int state1, int state2) -{ - jjCheckNAdd(state1); - jjCheckNAdd(state2); -} -private final void jjCheckNAddStates(int start, int end) -{ - do { - jjCheckNAdd(jjnextStates[start]); - } while (start++ != end); -} -private final void jjCheckNAddStates(int start) -{ - jjCheckNAdd(jjnextStates[start]); - jjCheckNAdd(jjnextStates[start + 1]); -} -static final long[] jjbitVec0 = { - 0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL -}; -private final int jjMoveNfa_0(int startState, int curPos) -{ - int[] nextStates; - int startsAt = 0; - jjnewStateCnt = 35; - int i = 1; - jjstateSet[0] = startState; - int j, kind = 0x7fffffff; - for (;;) - { - if (++jjround == 0x7fffffff) - ReInitRounds(); - if (curChar < 64) - { - long l = 1L << curChar; - MatchLoop: do - { - switch(jjstateSet[--i]) - { - case 35: - if ((0x3ff600000000000L & l) != 0L) - { - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - } - if ((0x3ff400000000000L & l) != 0L) - { - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - } - break; - case 0: - if ((0x3ff000000000000L & l) != 0L) - { - if (kind > 51) - kind = 51; - jjCheckNAdd(5); - } - else if ((0x280000000000L & l) != 0L) - jjCheckNAddStates(0, 2); - else if (curChar == 47) - jjAddStates(3, 4); - else if (curChar == 39) - jjCheckNAddTwoStates(12, 13); - else if (curChar == 34) - jjCheckNAddTwoStates(9, 10); - else if (curChar == 35) - jjCheckNAddStates(5, 7); - if (curChar == 45) - { - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - } - break; - case 1: - if ((0xfffffffffffffbffL & l) != 0L) - jjCheckNAddStates(5, 7); - break; - case 2: - if ((0x2400L & l) != 0L && kind > 5) - kind = 5; - break; - case 3: - if (curChar == 10 && kind > 5) - kind = 5; - break; - case 4: - if (curChar == 13) - jjstateSet[jjnewStateCnt++] = 3; - break; - case 5: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 51) - kind = 51; - jjCheckNAdd(5); - break; - case 7: - if ((0x3ff400000000000L & l) == 0L) - break; - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - break; - case 8: - if (curChar == 34) - jjCheckNAddTwoStates(9, 10); - break; - case 9: - if ((0xfffffffbffffffffL & l) != 0L) - jjCheckNAddTwoStates(9, 10); - break; - case 10: - if (curChar == 34 && kind > 56) - kind = 56; - break; - case 11: - if (curChar == 39) - jjCheckNAddTwoStates(12, 13); - break; - case 12: - if ((0xffffff7fffffffffL & l) != 0L) - jjCheckNAddTwoStates(12, 13); - break; - case 13: - if (curChar == 39 && kind > 56) - kind = 56; - break; - case 14: - if (curChar != 45) - break; - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - break; - case 15: - if ((0x3ff600000000000L & l) == 0L) - break; - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - break; - case 16: - if (curChar == 47) - jjAddStates(3, 4); - break; - case 17: - if (curChar == 47) - jjCheckNAddStates(8, 10); - break; - case 18: - if ((0xffffffffffffdbffL & l) != 0L) - jjCheckNAddStates(8, 10); - break; - case 19: - if ((0x2400L & l) != 0L && kind > 6) - kind = 6; - break; - case 20: - if (curChar == 10 && kind > 6) - kind = 6; - break; - case 21: - if (curChar == 13) - jjstateSet[jjnewStateCnt++] = 20; - break; - case 22: - if (curChar == 42) - jjCheckNAddTwoStates(23, 24); - break; - case 23: - if ((0xfffffbffffffffffL & l) != 0L) - jjCheckNAddTwoStates(23, 24); - break; - case 24: - if (curChar == 42) - jjAddStates(11, 12); - break; - case 25: - if ((0xffff7fffffffffffL & l) != 0L) - jjCheckNAddTwoStates(26, 24); - break; - case 26: - if ((0xfffffbffffffffffL & l) != 0L) - jjCheckNAddTwoStates(26, 24); - break; - case 27: - if (curChar == 47 && kind > 7) - kind = 7; - break; - case 28: - if ((0x280000000000L & l) != 0L) - jjCheckNAddStates(0, 2); - break; - case 29: - if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(29, 30); - break; - case 30: - if (curChar == 46) - jjCheckNAdd(31); - break; - case 31: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 52) - kind = 52; - jjCheckNAddTwoStates(31, 32); - break; - case 33: - if ((0x280000000000L & l) != 0L) - jjCheckNAdd(34); - break; - case 34: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 52) - kind = 52; - jjCheckNAdd(34); - break; - default : break; - } - } while(i != startsAt); - } - else if (curChar < 128) - { - long l = 1L << (curChar & 077); - MatchLoop: do - { - switch(jjstateSet[--i]) - { - case 35: - if ((0x7fffffe87fffffeL & l) != 0L) - { - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - } - if ((0x7fffffe87fffffeL & l) != 0L) - { - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - } - break; - case 0: - if ((0x7fffffe07fffffeL & l) != 0L) - { - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - } - if ((0x7fffffe07fffffeL & l) != 0L) - { - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - } - break; - case 1: - jjAddStates(5, 7); - break; - case 6: - if ((0x7fffffe07fffffeL & l) == 0L) - break; - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - break; - case 7: - if ((0x7fffffe87fffffeL & l) == 0L) - break; - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - break; - case 9: - jjAddStates(13, 14); - break; - case 12: - jjAddStates(15, 16); - break; - case 14: - if ((0x7fffffe07fffffeL & l) == 0L) - break; - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - break; - case 15: - if ((0x7fffffe87fffffeL & l) == 0L) - break; - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - break; - case 18: - jjAddStates(8, 10); - break; - case 23: - jjCheckNAddTwoStates(23, 24); - break; - case 25: - case 26: - jjCheckNAddTwoStates(26, 24); - break; - case 32: - if ((0x2000000020L & l) != 0L) - jjAddStates(17, 18); - break; - default : break; - } - } while(i != startsAt); - } - else - { - int i2 = (curChar & 0xff) >> 6; - long l2 = 1L << (curChar & 077); - MatchLoop: do - { - switch(jjstateSet[--i]) - { - case 1: - if ((jjbitVec0[i2] & l2) != 0L) - jjAddStates(5, 7); - break; - case 9: - if ((jjbitVec0[i2] & l2) != 0L) - jjAddStates(13, 14); - break; - case 12: - if ((jjbitVec0[i2] & l2) != 0L) - jjAddStates(15, 16); - break; - case 18: - if ((jjbitVec0[i2] & l2) != 0L) - jjAddStates(8, 10); - break; - case 23: - if ((jjbitVec0[i2] & l2) != 0L) - jjCheckNAddTwoStates(23, 24); - break; - case 25: - case 26: - if ((jjbitVec0[i2] & l2) != 0L) - jjCheckNAddTwoStates(26, 24); - break; - default : break; - } - } while(i != startsAt); - } - if (kind != 0x7fffffff) - { - jjmatchedKind = kind; - jjmatchedPos = curPos; - kind = 0x7fffffff; - } - ++curPos; - if ((i = jjnewStateCnt) == (startsAt = 35 - (jjnewStateCnt = startsAt))) - return curPos; - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { return curPos; } - } -} -static final int[] jjnextStates = { - 5, 29, 30, 17, 22, 1, 2, 4, 18, 19, 21, 25, 27, 9, 10, 12, - 13, 33, 34, -}; -public static final String[] jjstrLiteralImages = { -"", null, null, null, null, null, null, null, "\143\157\156\163\164", -"\156\141\155\145\163\160\141\143\145", "\143\160\160\137\156\141\155\145\163\160\141\143\145", -"\143\160\160\137\151\156\143\154\165\144\145", "\143\160\160\137\164\171\160\145", -"\152\141\166\141\137\160\141\143\153\141\147\145", "\143\157\143\157\141\137\160\162\145\146\151\170", -"\143\163\150\141\162\160\137\156\141\155\145\163\160\141\143\145", "\160\150\160\137\156\141\155\145\163\160\141\143\145", -"\160\171\137\155\157\144\165\154\145", "\160\145\162\154\137\160\141\143\153\141\147\145", -"\162\165\142\171\137\156\141\155\145\163\160\141\143\145", "\163\155\141\154\154\164\141\154\153\137\143\141\164\145\147\157\162\171", -"\163\155\141\154\154\164\141\154\153\137\160\162\145\146\151\170", "\170\163\144\137\141\154\154", -"\170\163\144\137\157\160\164\151\157\156\141\154", "\170\163\144\137\156\151\154\154\141\142\154\145", -"\170\163\144\137\156\141\155\145\163\160\141\143\145", "\170\163\144\137\141\164\164\162\163", "\151\156\143\154\165\144\145", -"\166\157\151\144", "\142\157\157\154", "\142\171\164\145", "\151\61\66", "\151\63\62", -"\151\66\64", "\144\157\165\142\154\145", "\163\164\162\151\156\147", -"\163\154\151\163\164", "\163\145\156\165\155", "\155\141\160", "\154\151\163\164", "\163\145\164", -"\141\163\171\156\143", "\164\171\160\145\144\145\146", "\163\164\162\165\143\164", -"\145\170\143\145\160\164\151\157\156", "\145\170\164\145\156\144\163", "\164\150\162\157\167\163", -"\163\145\162\166\151\143\145", "\145\156\165\155", "\162\145\161\165\151\162\145\144", -"\157\160\164\151\157\156\141\154", null, null, null, null, null, null, null, "\54", "\73", "\173", "\175", "\75", -"\133", "\135", "\72", "\50", "\51", "\74", "\76", }; -public static final String[] lexStateNames = { - "DEFAULT", -}; -static final long[] jjtoToken = { - 0xff3fffffffffff01L, 0x3fL, -}; -static final long[] jjtoSkip = { - 0xfeL, 0x0L, -}; -protected SimpleCharStream input_stream; -private final int[] jjrounds = new int[35]; -private final int[] jjstateSet = new int[70]; -protected char curChar; -public thrift_grammarTokenManager(SimpleCharStream stream){ - if (SimpleCharStream.staticFlag) - throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer."); - input_stream = stream; -} -public thrift_grammarTokenManager(SimpleCharStream stream, int lexState){ - this(stream); - SwitchTo(lexState); -} -public void ReInit(SimpleCharStream stream) -{ - jjmatchedPos = jjnewStateCnt = 0; - curLexState = defaultLexState; - input_stream = stream; - ReInitRounds(); -} -private final void ReInitRounds() -{ - int i; - jjround = 0x80000001; - for (i = 35; i-- > 0;) - jjrounds[i] = 0x80000000; -} -public void ReInit(SimpleCharStream stream, int lexState) -{ - ReInit(stream); - SwitchTo(lexState); -} -public void SwitchTo(int lexState) -{ - if (lexState >= 1 || lexState < 0) - throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE); - else - curLexState = lexState; -} - -protected Token jjFillToken() -{ - Token t = Token.newToken(jjmatchedKind); - t.kind = jjmatchedKind; - String im = jjstrLiteralImages[jjmatchedKind]; - t.image = (im == null) ? input_stream.GetImage() : im; - t.beginLine = input_stream.getBeginLine(); - t.beginColumn = input_stream.getBeginColumn(); - t.endLine = input_stream.getEndLine(); - t.endColumn = input_stream.getEndColumn(); - return t; -} - -int curLexState = 0; -int defaultLexState = 0; -int jjnewStateCnt; -int jjround; -int jjmatchedPos; -int jjmatchedKind; - -public Token getNextToken() -{ - int kind; - Token specialToken = null; - Token matchedToken; - int curPos = 0; - - EOFLoop : - for (;;) - { - try - { - curChar = input_stream.BeginToken(); - } - catch(java.io.IOException e) - { - jjmatchedKind = 0; - matchedToken = jjFillToken(); - return matchedToken; - } - - try { input_stream.backup(0); - while (curChar <= 32 && (0x100002600L & (1L << curChar)) != 0L) - curChar = input_stream.BeginToken(); - } - catch (java.io.IOException e1) { continue EOFLoop; } - jjmatchedKind = 0x7fffffff; - jjmatchedPos = 0; - curPos = jjMoveStringLiteralDfa0_0(); - if (jjmatchedKind != 0x7fffffff) - { - if (jjmatchedPos + 1 < curPos) - input_stream.backup(curPos - jjmatchedPos - 1); - if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) - { - matchedToken = jjFillToken(); - return matchedToken; - } - else - { - continue EOFLoop; - } - } - int error_line = input_stream.getEndLine(); - int error_column = input_stream.getEndColumn(); - String error_after = null; - boolean EOFSeen = false; - try { input_stream.readChar(); input_stream.backup(1); } - catch (java.io.IOException e1) { - EOFSeen = true; - error_after = curPos <= 1 ? "" : input_stream.GetImage(); - if (curChar == '\n' || curChar == '\r') { - error_line++; - error_column = 0; - } - else - error_column++; - } - if (!EOFSeen) { - input_stream.backup(1); - error_after = curPos <= 1 ? "" : input_stream.GetImage(); - } - throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR); - } -} - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeExtends.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeExtends.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeExtends.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeExtends.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeExtends extends SimpleNode { - public DynamicSerDeExtends(int id) { - super(id); - } - - public DynamicSerDeExtends(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/SimpleCharStream.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/SimpleCharStream.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/SimpleCharStream.java (working copy) @@ -1,457 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 4.0 */ -package org.apache.hadoop.hive.serde.dynamic_type; - -/** - * An implementation of interface CharStream, where the stream is assumed to - * contain only ASCII characters (without unicode processing). - */ - -public class SimpleCharStream -{ - public static final boolean staticFlag = false; - int bufsize; - int available; - int tokenBegin; - public int bufpos = -1; - protected int bufline[]; - protected int bufcolumn[]; - - protected int column = 0; - protected int line = 1; - - protected boolean prevCharIsCR = false; - protected boolean prevCharIsLF = false; - - protected java.io.Reader inputStream; - - protected char[] buffer; - protected int maxNextCharInd = 0; - protected int inBuf = 0; - protected int tabSize = 8; - - protected void setTabSize(int i) { tabSize = i; } - protected int getTabSize(int i) { return tabSize; } - - - protected void ExpandBuff(boolean wrapAround) - { - char[] newbuffer = new char[bufsize + 2048]; - int newbufline[] = new int[bufsize + 2048]; - int newbufcolumn[] = new int[bufsize + 2048]; - - try - { - if (wrapAround) - { - System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); - System.arraycopy(buffer, 0, newbuffer, - bufsize - tokenBegin, bufpos); - buffer = newbuffer; - - System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); - System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos); - bufline = newbufline; - - System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); - System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos); - bufcolumn = newbufcolumn; - - maxNextCharInd = (bufpos += (bufsize - tokenBegin)); - } - else - { - System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); - buffer = newbuffer; - - System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); - bufline = newbufline; - - System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); - bufcolumn = newbufcolumn; - - maxNextCharInd = (bufpos -= tokenBegin); - } - } - catch (Throwable t) - { - throw new Error(t.getMessage()); - } - - - bufsize += 2048; - available = bufsize; - tokenBegin = 0; - } - - protected void FillBuff() throws java.io.IOException - { - if (maxNextCharInd == available) - { - if (available == bufsize) - { - if (tokenBegin > 2048) - { - bufpos = maxNextCharInd = 0; - available = tokenBegin; - } - else if (tokenBegin < 0) - bufpos = maxNextCharInd = 0; - else - ExpandBuff(false); - } - else if (available > tokenBegin) - available = bufsize; - else if ((tokenBegin - available) < 2048) - ExpandBuff(true); - else - available = tokenBegin; - } - - int i; - try { - if ((i = inputStream.read(buffer, maxNextCharInd, - available - maxNextCharInd)) == -1) - { - inputStream.close(); - throw new java.io.IOException(); - } - else - maxNextCharInd += i; - return; - } - catch(java.io.IOException e) { - --bufpos; - backup(0); - if (tokenBegin == -1) - tokenBegin = bufpos; - throw e; - } - } - - public char BeginToken() throws java.io.IOException - { - tokenBegin = -1; - char c = readChar(); - tokenBegin = bufpos; - - return c; - } - - protected void UpdateLineColumn(char c) - { - column++; - - if (prevCharIsLF) - { - prevCharIsLF = false; - line += (column = 1); - } - else if (prevCharIsCR) - { - prevCharIsCR = false; - if (c == '\n') - { - prevCharIsLF = true; - } - else - line += (column = 1); - } - - switch (c) - { - case '\r' : - prevCharIsCR = true; - break; - case '\n' : - prevCharIsLF = true; - break; - case '\t' : - column--; - column += (tabSize - (column % tabSize)); - break; - default : - break; - } - - bufline[bufpos] = line; - bufcolumn[bufpos] = column; - } - - public char readChar() throws java.io.IOException - { - if (inBuf > 0) - { - --inBuf; - - if (++bufpos == bufsize) - bufpos = 0; - - return buffer[bufpos]; - } - - if (++bufpos >= maxNextCharInd) - FillBuff(); - - char c = buffer[bufpos]; - - UpdateLineColumn(c); - return (c); - } - - /** - * @deprecated - * @see #getEndColumn - */ - - public int getColumn() { - return bufcolumn[bufpos]; - } - - /** - * @deprecated - * @see #getEndLine - */ - - public int getLine() { - return bufline[bufpos]; - } - - public int getEndColumn() { - return bufcolumn[bufpos]; - } - - public int getEndLine() { - return bufline[bufpos]; - } - - public int getBeginColumn() { - return bufcolumn[tokenBegin]; - } - - public int getBeginLine() { - return bufline[tokenBegin]; - } - - public void backup(int amount) { - - inBuf += amount; - if ((bufpos -= amount) < 0) - bufpos += bufsize; - } - - public SimpleCharStream(java.io.Reader dstream, int startline, - int startcolumn, int buffersize) - { - inputStream = dstream; - line = startline; - column = startcolumn - 1; - - available = bufsize = buffersize; - buffer = new char[buffersize]; - bufline = new int[buffersize]; - bufcolumn = new int[buffersize]; - } - - public SimpleCharStream(java.io.Reader dstream, int startline, - int startcolumn) - { - this(dstream, startline, startcolumn, 4096); - } - - public SimpleCharStream(java.io.Reader dstream) - { - this(dstream, 1, 1, 4096); - } - public void ReInit(java.io.Reader dstream, int startline, - int startcolumn, int buffersize) - { - inputStream = dstream; - line = startline; - column = startcolumn - 1; - - if (buffer == null || buffersize != buffer.length) - { - available = bufsize = buffersize; - buffer = new char[buffersize]; - bufline = new int[buffersize]; - bufcolumn = new int[buffersize]; - } - prevCharIsLF = prevCharIsCR = false; - tokenBegin = inBuf = maxNextCharInd = 0; - bufpos = -1; - } - - public void ReInit(java.io.Reader dstream, int startline, - int startcolumn) - { - ReInit(dstream, startline, startcolumn, 4096); - } - - public void ReInit(java.io.Reader dstream) - { - ReInit(dstream, 1, 1, 4096); - } - public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, - int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException - { - this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); - } - - public SimpleCharStream(java.io.InputStream dstream, int startline, - int startcolumn, int buffersize) - { - this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); - } - - public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, - int startcolumn) throws java.io.UnsupportedEncodingException - { - this(dstream, encoding, startline, startcolumn, 4096); - } - - public SimpleCharStream(java.io.InputStream dstream, int startline, - int startcolumn) - { - this(dstream, startline, startcolumn, 4096); - } - - public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException - { - this(dstream, encoding, 1, 1, 4096); - } - - public SimpleCharStream(java.io.InputStream dstream) - { - this(dstream, 1, 1, 4096); - } - - public void ReInit(java.io.InputStream dstream, String encoding, int startline, - int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException - { - ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); - } - - public void ReInit(java.io.InputStream dstream, int startline, - int startcolumn, int buffersize) - { - ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); - } - - public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException - { - ReInit(dstream, encoding, 1, 1, 4096); - } - - public void ReInit(java.io.InputStream dstream) - { - ReInit(dstream, 1, 1, 4096); - } - public void ReInit(java.io.InputStream dstream, String encoding, int startline, - int startcolumn) throws java.io.UnsupportedEncodingException - { - ReInit(dstream, encoding, startline, startcolumn, 4096); - } - public void ReInit(java.io.InputStream dstream, int startline, - int startcolumn) - { - ReInit(dstream, startline, startcolumn, 4096); - } - public String GetImage() - { - if (bufpos >= tokenBegin) - return new String(buffer, tokenBegin, bufpos - tokenBegin + 1); - else - return new String(buffer, tokenBegin, bufsize - tokenBegin) + - new String(buffer, 0, bufpos + 1); - } - - public char[] GetSuffix(int len) - { - char[] ret = new char[len]; - - if ((bufpos + 1) >= len) - System.arraycopy(buffer, bufpos - len + 1, ret, 0, len); - else - { - System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0, - len - bufpos - 1); - System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1); - } - - return ret; - } - - public void Done() - { - buffer = null; - bufline = null; - bufcolumn = null; - } - - /** - * Method to adjust line and column numbers for the start of a token. - */ - public void adjustBeginLineColumn(int newLine, int newCol) - { - int start = tokenBegin; - int len; - - if (bufpos >= tokenBegin) - { - len = bufpos - tokenBegin + inBuf + 1; - } - else - { - len = bufsize - tokenBegin + bufpos + 1 + inBuf; - } - - int i = 0, j = 0, k = 0; - int nextColDiff = 0, columnDiff = 0; - - while (i < len && - bufline[j = start % bufsize] == bufline[k = ++start % bufsize]) - { - bufline[j] = newLine; - nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j]; - bufcolumn[j] = newCol + columnDiff; - columnDiff = nextColDiff; - i++; - } - - if (i < len) - { - bufline[j] = newLine++; - bufcolumn[j] = newCol + columnDiff; - - while (i++ < len) - { - if (bufline[j = start % bufsize] != bufline[++start % bufsize]) - bufline[j] = newLine++; - else - bufline[j] = newLine; - } - } - - line = bufline[j]; - column = bufcolumn[j]; - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeAsync.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeAsync.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeAsync.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeAsync.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeAsync extends SimpleNode { - public DynamicSerDeAsync(int id) { - super(id); - } - - public DynamicSerDeAsync(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammar.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammar.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammar.java (working copy) @@ -1,2309 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammar.java */ -package org.apache.hadoop.hive.serde.dynamic_type; - -import java.util.*; -import java.io.*; -import java.net.*; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.transport.*; -import org.apache.hadoop.hive.serde.thrift.*; -import org.apache.hadoop.hive.serde.*; - -public class thrift_grammar/*@bgen(jjtree)*/implements thrift_grammarTreeConstants, thrift_grammarConstants {/*@bgen(jjtree)*/ - protected JJTthrift_grammarState jjtree = new JJTthrift_grammarState(); - private List include_path = null; - - // for computing the autogenerated field ids in thrift - private int field_val; - - // store types and tables - // separately because one cannot use a table (ie service.method) as a Struct like type. - protected Map types; - protected Map tables; - - // system include path - final private static String default_include_path[] = { "/usr/local/include","/usr/include","/usr/local/include/thrift/if","/usr/local/include/fb303/if" }; - - // need three params to differentiate between this and 2 param method auto generated since - // some calls in the autogenerated code use null param for 2nd param and thus ambiguous. - protected thrift_grammar(InputStream is, List include_path, boolean junk) { - this(is,null); - this.types = new HashMap () ; - this.tables = new HashMap () ; - this.include_path = include_path; - this.field_val = -1; - } - - // find the file on the include path - private static File findFile(String fname, List include_path) { - for(String path: include_path) { - final String full = path + "/" + fname; - File f = new File(full); - if(f.exists()) { - return f; - } - } - return null; - } - - public static void main(String args[]) { - String filename = null; - List include_path = new ArrayList(); - - for(String path: default_include_path) { - include_path.add(path); - } - for(int i = 0; i < args.length; i++) { - String arg = args[i]; - if(arg.equals("--include") && i + 1 < args.length) { - include_path.add(args[++i]); - } - if(arg.equals("--file") && i + 1 < args.length) { - filename = args[++i]; - } - } - - InputStream is = System.in; - if(filename != null) { - try { - is = new FileInputStream(findFile(filename, include_path)); - } catch(IOException e) { - } - } - thrift_grammar t = new thrift_grammar(is,include_path,false); - - try { - t.Start(); - } catch (Exception e) { - System.out.println("Parse error."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } - - final public SimpleNode Start() throws ParseException { - /*@bgen(jjtree) Start */ - DynamicSerDeStart jjtn000 = new DynamicSerDeStart(JJTSTART); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - HeaderList(); - label_1: - while (true) { - Definition(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_const: - case tok_senum: - case tok_typedef: - case tok_struct: - case tok_exception: - case tok_service: - case tok_enum: - ; - break; - default: - jj_la1[0] = jj_gen; - break label_1; - } - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode HeaderList() throws ParseException { - /*@bgen(jjtree) HeaderList */ - DynamicSerDeHeaderList jjtn000 = new DynamicSerDeHeaderList(JJTHEADERLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - label_2: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_namespace: - case tok_cpp_namespace: - case tok_cpp_include: - case tok_java_package: - case tok_cocoa_prefix: - case tok_csharp_namespace: - case tok_php_namespace: - case tok_py_module: - case tok_perl_package: - case tok_ruby_namespace: - case tok_smalltalk_category: - case tok_smalltalk_prefix: - case tok_xsd_namespace: - case tok_include: - ; - break; - default: - jj_la1[1] = jj_gen; - break label_2; - } - Header(); - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Header() throws ParseException { - /*@bgen(jjtree) Header */ - DynamicSerDeHeader jjtn000 = new DynamicSerDeHeader(JJTHEADER); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_include: - Include(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_namespace: - case tok_cpp_namespace: - case tok_cpp_include: - case tok_java_package: - case tok_cocoa_prefix: - case tok_csharp_namespace: - case tok_php_namespace: - case tok_py_module: - case tok_perl_package: - case tok_ruby_namespace: - case tok_smalltalk_category: - case tok_smalltalk_prefix: - case tok_xsd_namespace: - Namespace(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[2] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Namespace() throws ParseException { - /*@bgen(jjtree) Namespace */ - DynamicSerDeNamespace jjtn000 = new DynamicSerDeNamespace(JJTNAMESPACE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_namespace: - jj_consume_token(tok_namespace); - jj_consume_token(IDENTIFIER); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_cpp_namespace: - jj_consume_token(tok_cpp_namespace); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_cpp_include: - jj_consume_token(tok_cpp_include); - jj_consume_token(tok_literal); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_php_namespace: - jj_consume_token(tok_php_namespace); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_py_module: - jj_consume_token(tok_py_module); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_perl_package: - jj_consume_token(tok_perl_package); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_ruby_namespace: - jj_consume_token(tok_ruby_namespace); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_smalltalk_category: - jj_consume_token(tok_smalltalk_category); - jj_consume_token(tok_st_identifier); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_smalltalk_prefix: - jj_consume_token(tok_smalltalk_prefix); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_java_package: - jj_consume_token(tok_java_package); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_cocoa_prefix: - jj_consume_token(tok_cocoa_prefix); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_xsd_namespace: - jj_consume_token(tok_xsd_namespace); - jj_consume_token(tok_literal); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_csharp_namespace: - jj_consume_token(tok_csharp_namespace); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[3] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Include() throws ParseException { - /*@bgen(jjtree) Include */ - DynamicSerDeInclude jjtn000 = new DynamicSerDeInclude(JJTINCLUDE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000);String fname; - boolean found = false; - try { - jj_consume_token(tok_include); - fname = jj_consume_token(tok_literal).image; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - // bugbug somewhat fragile below substring expression - fname = fname.substring(1,fname.length() - 1); - - // try to find the file on the include path - File f = thrift_grammar.findFile(fname, this.include_path); - if(f != null) { - found = true; - try { - FileInputStream fis = new FileInputStream(f); - thrift_grammar t = new thrift_grammar(fis,this.include_path, false); - t.Start(); - fis.close(); - found = true; - // add in what we found to our type and table tables. - this.tables.putAll(t.tables); - this.types.putAll(t.types); - } catch (Exception e) { - System.out.println("File: " + fname + " - Oops."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } - if(!found) { - {if (true) throw new RuntimeException("include file not found: " + fname);} - } - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Definition() throws ParseException { - /*@bgen(jjtree) Definition */ - DynamicSerDeDefinition jjtn000 = new DynamicSerDeDefinition(JJTDEFINITION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_const: - Const(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_service: - Service(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_senum: - case tok_typedef: - case tok_struct: - case tok_exception: - case tok_enum: - TypeDefinition(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[4] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode TypeDefinition() throws ParseException { - /*@bgen(jjtree) TypeDefinition */ - DynamicSerDeTypeDefinition jjtn000 = new DynamicSerDeTypeDefinition(JJTTYPEDEFINITION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_typedef: - Typedef(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_enum: - Enum(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_senum: - Senum(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_struct: - Struct(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_exception: - Xception(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[5] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypedef Typedef() throws ParseException { - /*@bgen(jjtree) Typedef */ - DynamicSerDeTypedef jjtn000 = new DynamicSerDeTypedef(JJTTYPEDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_typedef); - DefinitionType(); - jjtn000.name = jj_consume_token(IDENTIFIER).image; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - // store the type for later retrieval - this.types.put(jjtn000.name, jjtn000); - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - -// returning void because we ignore this production. - final public void CommaOrSemicolon() throws ParseException { - /*@bgen(jjtree) CommaOrSemicolon */ - DynamicSerDeCommaOrSemicolon jjtn000 = new DynamicSerDeCommaOrSemicolon(JJTCOMMAORSEMICOLON); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - jj_consume_token(58); - break; - case 59: - jj_consume_token(59); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - default: - jj_la1[6] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - } - - final public SimpleNode Enum() throws ParseException { - /*@bgen(jjtree) Enum */ - DynamicSerDeEnum jjtn000 = new DynamicSerDeEnum(JJTENUM); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_enum); - jj_consume_token(IDENTIFIER); - jj_consume_token(60); - EnumDefList(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode EnumDefList() throws ParseException { - /*@bgen(jjtree) EnumDefList */ - DynamicSerDeEnumDefList jjtn000 = new DynamicSerDeEnumDefList(JJTENUMDEFLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - label_3: - while (true) { - EnumDef(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case IDENTIFIER: - ; - break; - default: - jj_la1[7] = jj_gen; - break label_3; - } - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode EnumDef() throws ParseException { - /*@bgen(jjtree) EnumDef */ - DynamicSerDeEnumDef jjtn000 = new DynamicSerDeEnumDef(JJTENUMDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(IDENTIFIER); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 62: - jj_consume_token(62); - jj_consume_token(tok_int_constant); - break; - default: - jj_la1[8] = jj_gen; - ; - } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[9] = jj_gen; - ; - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Senum() throws ParseException { - /*@bgen(jjtree) Senum */ - DynamicSerDeSenum jjtn000 = new DynamicSerDeSenum(JJTSENUM); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_senum); - jj_consume_token(IDENTIFIER); - jj_consume_token(60); - SenumDefList(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode SenumDefList() throws ParseException { - /*@bgen(jjtree) SenumDefList */ - DynamicSerDeSenumDefList jjtn000 = new DynamicSerDeSenumDefList(JJTSENUMDEFLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - label_4: - while (true) { - SenumDef(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_literal: - ; - break; - default: - jj_la1[10] = jj_gen; - break label_4; - } - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode SenumDef() throws ParseException { - /*@bgen(jjtree) SenumDef */ - DynamicSerDeSenumDef jjtn000 = new DynamicSerDeSenumDef(JJTSENUMDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_literal); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[11] = jj_gen; - ; - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Const() throws ParseException { - /*@bgen(jjtree) Const */ - DynamicSerDeConst jjtn000 = new DynamicSerDeConst(JJTCONST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_const); - FieldType(); - jj_consume_token(IDENTIFIER); - jj_consume_token(62); - ConstValue(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[12] = jj_gen; - ; - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode ConstValue() throws ParseException { - /*@bgen(jjtree) ConstValue */ - DynamicSerDeConstValue jjtn000 = new DynamicSerDeConstValue(JJTCONSTVALUE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_int_constant: - jj_consume_token(tok_int_constant); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - case tok_double_constant: - jj_consume_token(tok_double_constant); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - case tok_literal: - jj_consume_token(tok_literal); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - case IDENTIFIER: - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - case 63: - ConstList(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - case 60: - ConstMap(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[13] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode ConstList() throws ParseException { - /*@bgen(jjtree) ConstList */ - DynamicSerDeConstList jjtn000 = new DynamicSerDeConstList(JJTCONSTLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(63); - ConstListContents(); - jj_consume_token(64); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode ConstListContents() throws ParseException { - /*@bgen(jjtree) ConstListContents */ - DynamicSerDeConstListContents jjtn000 = new DynamicSerDeConstListContents(JJTCONSTLISTCONTENTS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - label_5: - while (true) { - ConstValue(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[14] = jj_gen; - ; - } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_int_constant: - case tok_double_constant: - case IDENTIFIER: - case tok_literal: - case 60: - case 63: - ; - break; - default: - jj_la1[15] = jj_gen; - break label_5; - } - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode ConstMap() throws ParseException { - /*@bgen(jjtree) ConstMap */ - DynamicSerDeConstMap jjtn000 = new DynamicSerDeConstMap(JJTCONSTMAP); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(60); - ConstMapContents(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode ConstMapContents() throws ParseException { - /*@bgen(jjtree) ConstMapContents */ - DynamicSerDeConstMapContents jjtn000 = new DynamicSerDeConstMapContents(JJTCONSTMAPCONTENTS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_int_constant: - case tok_double_constant: - case IDENTIFIER: - case tok_literal: - case 60: - case 63: - label_6: - while (true) { - ConstValue(); - jj_consume_token(65); - ConstValue(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[16] = jj_gen; - ; - } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_int_constant: - case tok_double_constant: - case IDENTIFIER: - case tok_literal: - case 60: - case 63: - ; - break; - default: - jj_la1[17] = jj_gen; - break label_6; - } - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - default: - jj_la1[18] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeStruct Struct() throws ParseException { - /*@bgen(jjtree) Struct */ - DynamicSerDeStruct jjtn000 = new DynamicSerDeStruct(JJTSTRUCT); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_struct); - jjtn000.name = jj_consume_token(IDENTIFIER).image; - jj_consume_token(60); - FieldList(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - this.types.put(jjtn000.name,jjtn000); - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Xception() throws ParseException { - /*@bgen(jjtree) Xception */ - DynamicSerDeXception jjtn000 = new DynamicSerDeXception(JJTXCEPTION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_exception); - jj_consume_token(IDENTIFIER); - jj_consume_token(60); - FieldList(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Service() throws ParseException { - /*@bgen(jjtree) Service */ - DynamicSerDeService jjtn000 = new DynamicSerDeService(JJTSERVICE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_service); - jj_consume_token(IDENTIFIER); - Extends(); - jj_consume_token(60); - FlagArgs(); - label_7: - while (true) { - Function(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_void: - case tok_bool: - case tok_i16: - case tok_i32: - case tok_i64: - case tok_double: - case tok_string: - case tok_map: - case tok_list: - case tok_set: - case tok_async: - case IDENTIFIER: - ; - break; - default: - jj_la1[19] = jj_gen; - break label_7; - } - } - UnflagArgs(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - // at some point, these should be inserted as a "db" - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode FlagArgs() throws ParseException { - /*@bgen(jjtree) FlagArgs */ - DynamicSerDeFlagArgs jjtn000 = new DynamicSerDeFlagArgs(JJTFLAGARGS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode UnflagArgs() throws ParseException { - /*@bgen(jjtree) UnflagArgs */ - DynamicSerDeUnflagArgs jjtn000 = new DynamicSerDeUnflagArgs(JJTUNFLAGARGS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Extends() throws ParseException { - /*@bgen(jjtree) Extends */ - DynamicSerDeExtends jjtn000 = new DynamicSerDeExtends(JJTEXTENDS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_extends: - jj_consume_token(tok_extends); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[20] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeFunction Function() throws ParseException { - /*@bgen(jjtree) Function */ - DynamicSerDeFunction jjtn000 = new DynamicSerDeFunction(JJTFUNCTION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - Async(); - FunctionType(); - // the name of the function/table - jjtn000.name = jj_consume_token(IDENTIFIER).image; - jj_consume_token(66); - FieldList(); - jj_consume_token(67); - Throws(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[21] = jj_gen; - ; - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - this.tables.put(jjtn000.name, jjtn000); - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public void Async() throws ParseException { - /*@bgen(jjtree) Async */ - DynamicSerDeAsync jjtn000 = new DynamicSerDeAsync(JJTASYNC); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_async: - jj_consume_token(tok_async); - break; - default: - jj_la1[22] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - } - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - } - - final public void Throws() throws ParseException { - /*@bgen(jjtree) Throws */ - DynamicSerDeThrows jjtn000 = new DynamicSerDeThrows(JJTTHROWS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_throws: - jj_consume_token(tok_throws); - jj_consume_token(66); - FieldList(); - jj_consume_token(67); - break; - default: - jj_la1[23] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - } - -// nothing special - just use the DynamicSerDeFieldList's children methods to access the fields - final public DynamicSerDeFieldList FieldList() throws ParseException { - /*@bgen(jjtree) FieldList */ - DynamicSerDeFieldList jjtn000 = new DynamicSerDeFieldList(JJTFIELDLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000);this.field_val = -1; - try { - label_8: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_bool: - case tok_i16: - case tok_i32: - case tok_i64: - case tok_double: - case tok_string: - case tok_map: - case tok_list: - case tok_set: - case tok_required: - case tok_optional: - case tok_int_constant: - case IDENTIFIER: - ; - break; - default: - jj_la1[24] = jj_gen; - break label_8; - } - Field(); - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeField Field() throws ParseException { - /*@bgen(jjtree) Field */ - DynamicSerDeField jjtn000 = new DynamicSerDeField(JJTFIELD); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000);String fidnum = ""; - String fid; - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_int_constant: - fidnum = jj_consume_token(tok_int_constant).image; - jj_consume_token(65); - break; - default: - jj_la1[25] = jj_gen; - ; - } - FieldRequiredness(); - FieldType(); - // the name of the field - not optional - jjtn000.name = jj_consume_token(IDENTIFIER).image; - FieldValue(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[26] = jj_gen; - ; - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - if(fidnum.length() > 0) { - int fidInt = Integer.valueOf(fidnum); - jjtn000.fieldid = fidInt; - } else { - jjtn000.fieldid = this.field_val--; - } - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode FieldRequiredness() throws ParseException { - /*@bgen(jjtree) FieldRequiredness */ - DynamicSerDeFieldRequiredness jjtn000 = new DynamicSerDeFieldRequiredness(JJTFIELDREQUIREDNESS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_required: - jj_consume_token(tok_required); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_optional: - jj_consume_token(tok_optional); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[27] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode FieldValue() throws ParseException { - /*@bgen(jjtree) FieldValue */ - DynamicSerDeFieldValue jjtn000 = new DynamicSerDeFieldValue(JJTFIELDVALUE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 62: - jj_consume_token(62); - ConstValue(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[28] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode DefinitionType() throws ParseException { - /*@bgen(jjtree) DefinitionType */ - DynamicSerDeDefinitionType jjtn000 = new DynamicSerDeDefinitionType(JJTDEFINITIONTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_string: - TypeString(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_bool: - TypeBool(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i16: - Typei16(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i32: - Typei32(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i64: - Typei64(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_double: - TypeDouble(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_map: - TypeMap(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_set: - TypeSet(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_list: - TypeList(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[29] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public void FunctionType() throws ParseException { - /*@bgen(jjtree) FunctionType */ - DynamicSerDeFunctionType jjtn000 = new DynamicSerDeFunctionType(JJTFUNCTIONTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_bool: - case tok_i16: - case tok_i32: - case tok_i64: - case tok_double: - case tok_string: - case tok_map: - case tok_list: - case tok_set: - case IDENTIFIER: - FieldType(); - break; - case tok_void: - jj_consume_token(tok_void); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - default: - jj_la1[30] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - } - - final public DynamicSerDeFieldType FieldType() throws ParseException { - /*@bgen(jjtree) FieldType */ - DynamicSerDeFieldType jjtn000 = new DynamicSerDeFieldType(JJTFIELDTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_string: - TypeString(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_bool: - TypeBool(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i16: - Typei16(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i32: - Typei32(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i64: - Typei64(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_double: - TypeDouble(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_map: - TypeMap(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_set: - TypeSet(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_list: - TypeList(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case IDENTIFIER: - jjtn000.name = jj_consume_token(IDENTIFIER).image; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[31] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeString TypeString() throws ParseException { - /*@bgen(jjtree) TypeString */ - DynamicSerDeTypeString jjtn000 = new DynamicSerDeTypeString(JJTTYPESTRING); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_string); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeByte TypeByte() throws ParseException { - /*@bgen(jjtree) TypeByte */ - DynamicSerDeTypeByte jjtn000 = new DynamicSerDeTypeByte(JJTTYPEBYTE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_byte); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypei16 Typei16() throws ParseException { - /*@bgen(jjtree) Typei16 */ - DynamicSerDeTypei16 jjtn000 = new DynamicSerDeTypei16(JJTTYPEI16); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_i16); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypei32 Typei32() throws ParseException { - /*@bgen(jjtree) Typei32 */ - DynamicSerDeTypei32 jjtn000 = new DynamicSerDeTypei32(JJTTYPEI32); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_i32); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypei64 Typei64() throws ParseException { - /*@bgen(jjtree) Typei64 */ - DynamicSerDeTypei64 jjtn000 = new DynamicSerDeTypei64(JJTTYPEI64); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_i64); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeDouble TypeDouble() throws ParseException { - /*@bgen(jjtree) TypeDouble */ - DynamicSerDeTypeDouble jjtn000 = new DynamicSerDeTypeDouble(JJTTYPEDOUBLE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_double); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeBool TypeBool() throws ParseException { - /*@bgen(jjtree) TypeBool */ - DynamicSerDeTypeBool jjtn000 = new DynamicSerDeTypeBool(JJTTYPEBOOL); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_bool); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeMap TypeMap() throws ParseException { - /*@bgen(jjtree) TypeMap */ - DynamicSerDeTypeMap jjtn000 = new DynamicSerDeTypeMap(JJTTYPEMAP); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_map); - jj_consume_token(68); - FieldType(); - jj_consume_token(58); - FieldType(); - jj_consume_token(69); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeSet TypeSet() throws ParseException { - /*@bgen(jjtree) TypeSet */ - DynamicSerDeTypeSet jjtn000 = new DynamicSerDeTypeSet(JJTTYPESET); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_set); - jj_consume_token(68); - FieldType(); - jj_consume_token(69); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeList TypeList() throws ParseException { - /*@bgen(jjtree) TypeList */ - DynamicSerDeTypeList jjtn000 = new DynamicSerDeTypeList(JJTTYPELIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_list); - jj_consume_token(68); - FieldType(); - jj_consume_token(69); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - public thrift_grammarTokenManager token_source; - SimpleCharStream jj_input_stream; - public Token token, jj_nt; - private int jj_ntk; - private int jj_gen; - final private int[] jj_la1 = new int[32]; - static private int[] jj_la1_0; - static private int[] jj_la1_1; - static private int[] jj_la1_2; - static { - jj_la1_0(); - jj_la1_1(); - jj_la1_2(); - } - private static void jj_la1_0() { - jj_la1_0 = new int[] {0x100,0xa3fee00,0xa3fee00,0x23fee00,0x100,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xb0000000,0x0,0x0,0x0,0x0,0xa0000000,0x0,0x0,0x0,0x0,0xa0000000,0xb0000000,0xa0000000,}; - } - private static void jj_la1_1() { - jj_la1_1 = new int[] {0x19c20,0x0,0x0,0x0,0x19c20,0x11c20,0xc000000,0x200000,0x40000000,0xc000000,0x1000000,0xc000000,0xc000000,0x91380000,0xc000000,0x91380000,0xc000000,0x91380000,0x91380000,0x2003cf,0x2000,0xc000000,0x200,0x4000,0x2e01cf,0x80000,0xc000000,0x60000,0x40000000,0x1cf,0x2001cf,0x2001cf,}; - } - private static void jj_la1_2() { - jj_la1_2 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; - } - - public thrift_grammar(java.io.InputStream stream) { - this(stream, null); - } - public thrift_grammar(java.io.InputStream stream, String encoding) { - try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } - token_source = new thrift_grammarTokenManager(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - public void ReInit(java.io.InputStream stream) { - ReInit(stream, null); - } - public void ReInit(java.io.InputStream stream, String encoding) { - try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } - token_source.ReInit(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jjtree.reset(); - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - public thrift_grammar(java.io.Reader stream) { - jj_input_stream = new SimpleCharStream(stream, 1, 1); - token_source = new thrift_grammarTokenManager(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - public void ReInit(java.io.Reader stream) { - jj_input_stream.ReInit(stream, 1, 1); - token_source.ReInit(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jjtree.reset(); - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - public thrift_grammar(thrift_grammarTokenManager tm) { - token_source = tm; - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - public void ReInit(thrift_grammarTokenManager tm) { - token_source = tm; - token = new Token(); - jj_ntk = -1; - jjtree.reset(); - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - final private Token jj_consume_token(int kind) throws ParseException { - Token oldToken; - if ((oldToken = token).next != null) token = token.next; - else token = token.next = token_source.getNextToken(); - jj_ntk = -1; - if (token.kind == kind) { - jj_gen++; - return token; - } - token = oldToken; - jj_kind = kind; - throw generateParseException(); - } - - final public Token getNextToken() { - if (token.next != null) token = token.next; - else token = token.next = token_source.getNextToken(); - jj_ntk = -1; - jj_gen++; - return token; - } - - final public Token getToken(int index) { - Token t = token; - for (int i = 0; i < index; i++) { - if (t.next != null) t = t.next; - else t = t.next = token_source.getNextToken(); - } - return t; - } - - final private int jj_ntk() { - if ((jj_nt=token.next) == null) - return (jj_ntk = (token.next=token_source.getNextToken()).kind); - else - return (jj_ntk = jj_nt.kind); - } - - private java.util.Vector jj_expentries = new java.util.Vector(); - private int[] jj_expentry; - private int jj_kind = -1; - - public ParseException generateParseException() { - jj_expentries.removeAllElements(); - boolean[] la1tokens = new boolean[70]; - for (int i = 0; i < 70; i++) { - la1tokens[i] = false; - } - if (jj_kind >= 0) { - la1tokens[jj_kind] = true; - jj_kind = -1; - } - for (int i = 0; i < 32; i++) { - if (jj_la1[i] == jj_gen) { - for (int j = 0; j < 32; j++) { - if ((jj_la1_0[i] & (1< include_path = null; - - // for computing the autogenerated field ids in thrift - private int field_val; - - // store types and tables - // separately because one cannot use a table (ie service.method) as a Struct like type. - protected Map types; - protected Map tables; - - // system include path - final private static String default_include_path[] = { "/usr/local/include","/usr/include","/usr/local/include/thrift/if","/usr/local/include/fb303/if" }; - - // need three params to differentiate between this and 2 param method auto generated since - // some calls in the autogenerated code use null param for 2nd param and thus ambiguous. - protected thrift_grammar(InputStream is, List include_path, boolean junk) { - this(is,null); - this.types = new HashMap () ; - this.tables = new HashMap () ; - this.include_path = include_path; - this.field_val = -1; - } - - // find the file on the include path - private static File findFile(String fname, List include_path) { - for(String path: include_path) { - final String full = path + "/" + fname; - File f = new File(full); - if(f.exists()) { - return f; - } - } - return null; - } - - public static void main(String args[]) { - String filename = null; - List include_path = new ArrayList(); - - for(String path: default_include_path) { - include_path.add(path); - } - for(int i = 0; i < args.length; i++) { - String arg = args[i]; - if(arg.equals("--include") && i + 1 < args.length) { - include_path.add(args[++i]); - } - if(arg.equals("--file") && i + 1 < args.length) { - filename = args[++i]; - } - } - - InputStream is = System.in; - if(filename != null) { - try { - is = new FileInputStream(findFile(filename, include_path)); - } catch(IOException e) { - } - } - thrift_grammar t = new thrift_grammar(is,include_path,false); - - try { - t.Start(); - } catch (Exception e) { - System.out.println("Parse error."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } -} - -PARSER_END(thrift_grammar) - - - -SKIP : -{ - " " -| "\t" -| "\n" -| "\r" -| <"#"(~["\n"])* ("\n"|"\r"|"\r\n")> -| <"//" (~["\n","\r"])* ("\n"|"\r"|"\r\n")> -| <"/*" (~["*"])* "*" (~["/"] (~["*"])* "*")* "/"> -} - - -/** - * HELPER DEFINITIONS, COMMENTS, CONSTANTS, AND WHATNOT - */ - -TOKEN: -{ -| - | -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| - | - | - | - | - | - | - | - | - | - | - | - | - -} - -TOKEN: { - - -| -)*"."()+(["e","E"](["+","-"])?()+)?> -| -(||"."|"_")*> -| -<#LETTER: (["a"-"z", "A"-"Z" ]) > -| -<#DIGIT: ["0"-"9"] > -| - -| - -} - - -SimpleNode Start() : {} -{ - HeaderList() (Definition())+ - { - return jjtThis; - } -} - -SimpleNode HeaderList() : {} -{ - (Header())* - { - return jjtThis; - } - -} - -SimpleNode Header() : {} -{ - Include() - { - return jjtThis; - } -| Namespace() - { - return jjtThis; - } -} - -SimpleNode Namespace() : {} -{ - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -| - -{ - return jjtThis; -} -} - - -SimpleNode Include() : { - String fname; - boolean found = false; -} -{ - - fname=.image -{ - // bugbug somewhat fragile below substring expression - fname = fname.substring(1,fname.length() - 1); - - // try to find the file on the include path - File f = thrift_grammar.findFile(fname, this.include_path); - if(f != null) { - found = true; - try { - FileInputStream fis = new FileInputStream(f); - thrift_grammar t = new thrift_grammar(fis,this.include_path, false); - t.Start(); - fis.close(); - found = true; - // add in what we found to our type and table tables. - this.tables.putAll(t.tables); - this.types.putAll(t.types); - } catch (Exception e) { - System.out.println("File: " + fname + " - Oops."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } - if(!found) { - throw new RuntimeException("include file not found: " + fname); - } - return jjtThis; -} -} - - -SimpleNode Definition() : {} -{ - Const() - { - return jjtThis; - } -| Service() - { - return jjtThis; - } -| TypeDefinition() - { - return jjtThis; - } -} - -SimpleNode TypeDefinition() : {} -{ - Typedef() - { - return jjtThis; - } -| Enum() - { - return jjtThis; - } -| Senum() - { - return jjtThis; - } -| Struct() - { - return jjtThis; - } -| Xception() - { - return jjtThis; - } - -} - -DynamicSerDeTypedef Typedef() : {} -{ - - DefinitionType() - jjtThis.name = .image - { - // store the type for later retrieval - this.types.put(jjtThis.name, jjtThis); - return jjtThis; - } -} - - -// returning void because we ignore this production. -void CommaOrSemicolon() : {} -{ - "," -| - ";" -{ -} -} - -SimpleNode Enum() : {} -{ - "{" EnumDefList() "}" - { - return jjtThis; - } -} - -SimpleNode EnumDefList() : {} -{ - (EnumDef())+ - { - return jjtThis; - } -} - -SimpleNode EnumDef() : {} -{ - ["=" ] [CommaOrSemicolon()] - { - return jjtThis; - } -} - -SimpleNode Senum() : {} -{ - "{" SenumDefList() "}" - { - return jjtThis; - } -} - -SimpleNode SenumDefList() : {} -{ - (SenumDef())+ - { - return jjtThis; - } -} - -SimpleNode SenumDef() : {} -{ - [CommaOrSemicolon()] - { - return jjtThis; - } -} - - -SimpleNode Const() : {} -{ - FieldType() "=" ConstValue() [CommaOrSemicolon()] - { - return jjtThis; - } -} - -SimpleNode ConstValue() : {} -{ - - { - } -| - { - } -| - { - } -| - { - } -| ConstList() - { - } -| ConstMap() - { - return jjtThis; - } -} - -SimpleNode ConstList() : {} -{ - "[" ConstListContents() "]" - { - return jjtThis; - } -} - -SimpleNode ConstListContents() : {} -{ - (ConstValue() [CommaOrSemicolon()])+ - { - return jjtThis; - } -} - -SimpleNode ConstMap() : {} -{ - "{" ConstMapContents() "}" - { - return jjtThis; - } -} - -SimpleNode ConstMapContents() : {} -{ - (ConstValue() ":" ConstValue() [CommaOrSemicolon()])+ - { - } -| - { - return jjtThis; - } -} - -DynamicSerDeStruct Struct() : { - -} -{ - - jjtThis.name = .image - "{" - FieldList() - "}" - { - this.types.put(jjtThis.name,jjtThis); - return jjtThis; - } -} - - -SimpleNode Xception() : {} -{ - "{" FieldList() "}" - { - return jjtThis; - } -} - - -SimpleNode Service() : {} -{ - - - Extends() - "{" - FlagArgs() - (Function())+ - UnflagArgs() - "}" - { - // at some point, these should be inserted as a "db" - return jjtThis; - } -} - -SimpleNode FlagArgs() : {} -{ - { - return jjtThis; - } -} - -SimpleNode UnflagArgs() : {} -{ - { - return jjtThis; - } -} - -SimpleNode Extends() : {} -{ - - { - return jjtThis; - } -| - { - return jjtThis; - } -} - - -DynamicSerDeFunction Function() : {} -{ - // metastore ignores async and type - Async() - FunctionType() - - // the name of the function/table - jjtThis.name = .image - "(" - FieldList() - ")" - Throws() - [CommaOrSemicolon()] - - { - this.tables.put(jjtThis.name, jjtThis); - return jjtThis; - } -} - -void Async() : {} -{ - -| -{} -} - -void Throws() : {} -{ - "(" FieldList() ")" -| -{} -} - - -// nothing special - just use the DynamicSerDeFieldList's children methods to access the fields -DynamicSerDeFieldList FieldList() : { - this.field_val = -1; -} -{ - (Field())* { - return jjtThis; - } -} - - -DynamicSerDeField Field() : { - - String fidnum = ""; - String fid; -} -{ - - // parse the field id which is optional - [fidnum=.image ":"] - - // is this field required or optional? default is optional - FieldRequiredness() - - // field type - obviously not optional - FieldType() - - // the name of the field - not optional - jjtThis.name = .image - - // does it have = some value? - FieldValue() - - // take it or leave it - [CommaOrSemicolon()] - - { - if(fidnum.length() > 0) { - int fidInt = Integer.valueOf(fidnum); - jjtThis.fieldid = fidInt; - } else { - jjtThis.fieldid = this.field_val--; - } - return jjtThis; - } -} - - - -SimpleNode FieldRequiredness() : {} -{ - - { - return jjtThis; - } -| - { - return jjtThis; - } -| - { - return jjtThis; - } -} - -SimpleNode FieldValue() : {} -{ - "=" - ConstValue() - { - return jjtThis; - } -| -{ - return jjtThis; -} -} - -SimpleNode DefinitionType() : {} -{ -// BaseType() xxx - TypeString() - { - return jjtThis; - } -| TypeBool() - { - return jjtThis; - } -| Typei16() - { - return jjtThis; - } -| Typei32() - { - return jjtThis; - } -| Typei64() - { - return jjtThis; - } -| TypeDouble() - { - return jjtThis; - } -| TypeMap() - { - return jjtThis; - } -| TypeSet() - { - return jjtThis; - } -| TypeList() - { - return jjtThis; - } -} - -void FunctionType() : {} -{ - FieldType() -| -{} -} - -DynamicSerDeFieldType FieldType() : { -} - -{ - TypeString() - { - return jjtThis; - } -| TypeBool() - { - return jjtThis; - } -| Typei16() - { - return jjtThis; - } -| Typei32() - { - return jjtThis; - } -| Typei64() - { - return jjtThis; - } -| TypeDouble() - { - return jjtThis; - } -| - TypeMap() - { - return jjtThis; - } -| - TypeSet() - { - return jjtThis; - } -| - TypeList() - { - return jjtThis; - } -| - jjtThis.name = .image - { - return jjtThis; - } -} - -DynamicSerDeTypeString TypeString() : {} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypeByte TypeByte() : { -} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypei16 Typei16() : { -} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypei32 Typei32() : {} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypei64 Typei64() : {} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypeDouble TypeDouble() : {} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypeBool TypeBool() : {} -{ - - { - return jjtThis; - } -} - -DynamicSerDeTypeMap TypeMap() : {} -{ - - "<" - FieldType() - "," - FieldType() - ">" - { - return jjtThis; - } -} - -DynamicSerDeTypeSet TypeSet() : {} -{ - - "<" - - FieldType() - - ">" - { - return jjtThis; - } -} - -DynamicSerDeTypeList TypeList() : {} -{ - - "<" - - FieldType() - - ">" - { - return jjtThis; - } -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConst.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConst.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConst.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeConst.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeConst extends SimpleNode { - public DynamicSerDeConst(int id) { - super(id); - } - - public DynamicSerDeConst(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeHeader.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeHeader.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeHeader.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeHeader.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeHeader extends SimpleNode { - public DynamicSerDeHeader(int id) { - super(id); - } - - public DynamicSerDeHeader(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/Token.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/Token.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/Token.java (working copy) @@ -1,99 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JavaCC: Do not edit this line. Token.java Version 3.0 */ -package org.apache.hadoop.hive.serde.dynamic_type; - -/** - * Describes the input token stream. - */ - -public class Token { - - /** - * An integer that describes the kind of this token. This numbering - * system is determined by JavaCCParser, and a table of these numbers is - * stored in the file ...Constants.java. - */ - public int kind; - - /** - * beginLine and beginColumn describe the position of the first character - * of this token; endLine and endColumn describe the position of the - * last character of this token. - */ - public int beginLine, beginColumn, endLine, endColumn; - - /** - * The string image of the token. - */ - public String image; - - /** - * A reference to the next regular (non-special) token from the input - * stream. If this is the last token from the input stream, or if the - * token manager has not read tokens beyond this one, this field is - * set to null. This is true only if this token is also a regular - * token. Otherwise, see below for a description of the contents of - * this field. - */ - public Token next; - - /** - * This field is used to access special tokens that occur prior to this - * token, but after the immediately preceding regular (non-special) token. - * If there are no such special tokens, this field is set to null. - * When there are more than one such special token, this field refers - * to the last of these special tokens, which in turn refers to the next - * previous special token through its specialToken field, and so on - * until the first special token (whose specialToken field is null). - * The next fields of special tokens refer to other special tokens that - * immediately follow it (without an intervening regular token). If there - * is no such token, this field is null. - */ - public Token specialToken; - - /** - * Returns the image. - */ - public String toString() - { - return image; - } - - /** - * Returns a new Token object, by default. However, if you want, you - * can create and return subclass objects based on the value of ofKind. - * Simply add the cases to the switch for all those special cases. - * For example, if you have a subclass of Token called IDToken that - * you want to create if ofKind is ID, simlpy add something like : - * - * case MyParserConstants.ID : return new IDToken(); - * - * to the following switch statement. Then you can cast matchedToken - * variable to the appropriate type and use it in your lexical actions. - */ - public static final Token newToken(int ofKind) - { - switch(ofKind) - { - default : return new Token(); - } - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeUnflagArgs.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeUnflagArgs.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeUnflagArgs.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeUnflagArgs.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeUnflagArgs extends SimpleNode { - public DynamicSerDeUnflagArgs(int id) { - super(id); - } - - public DynamicSerDeUnflagArgs(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeDefinition.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeDefinition.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeTypeDefinition.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeTypeDefinition.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeTypeDefinition extends SimpleNode { - public DynamicSerDeTypeDefinition(int id) { - super(id); - } - - public DynamicSerDeTypeDefinition(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeDefinitionType.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeDefinitionType.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeDefinitionType.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeDefinitionType.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeDefinitionType extends SimpleNode { - public DynamicSerDeDefinitionType(int id) { - super(id); - } - - public DynamicSerDeDefinitionType(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammarTreeConstants.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammarTreeConstants.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/thrift_grammarTreeConstants.java (working copy) @@ -1,123 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. /home/pwyckoff/projects/hadoop/trunk/src/org.apache.hadoop.hive.serde/build/thrift_grammarTreeConstants.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public interface thrift_grammarTreeConstants -{ - public int JJTSTART = 0; - public int JJTHEADERLIST = 1; - public int JJTHEADER = 2; - public int JJTNAMESPACE = 3; - public int JJTINCLUDE = 4; - public int JJTDEFINITION = 5; - public int JJTTYPEDEFINITION = 6; - public int JJTTYPEDEF = 7; - public int JJTCOMMAORSEMICOLON = 8; - public int JJTENUM = 9; - public int JJTENUMDEFLIST = 10; - public int JJTENUMDEF = 11; - public int JJTSENUM = 12; - public int JJTSENUMDEFLIST = 13; - public int JJTSENUMDEF = 14; - public int JJTCONST = 15; - public int JJTCONSTVALUE = 16; - public int JJTCONSTLIST = 17; - public int JJTCONSTLISTCONTENTS = 18; - public int JJTCONSTMAP = 19; - public int JJTCONSTMAPCONTENTS = 20; - public int JJTSTRUCT = 21; - public int JJTXCEPTION = 22; - public int JJTSERVICE = 23; - public int JJTFLAGARGS = 24; - public int JJTUNFLAGARGS = 25; - public int JJTEXTENDS = 26; - public int JJTFUNCTION = 27; - public int JJTASYNC = 28; - public int JJTTHROWS = 29; - public int JJTFIELDLIST = 30; - public int JJTFIELD = 31; - public int JJTFIELDREQUIREDNESS = 32; - public int JJTFIELDVALUE = 33; - public int JJTDEFINITIONTYPE = 34; - public int JJTFUNCTIONTYPE = 35; - public int JJTFIELDTYPE = 36; - public int JJTTYPESTRING = 37; - public int JJTTYPEBYTE = 38; - public int JJTTYPEI16 = 39; - public int JJTTYPEI32 = 40; - public int JJTTYPEI64 = 41; - public int JJTTYPEDOUBLE = 42; - public int JJTTYPEBOOL = 43; - public int JJTTYPEMAP = 44; - public int JJTTYPESET = 45; - public int JJTTYPELIST = 46; - - - public String[] jjtNodeName = { - "Start", - "HeaderList", - "Header", - "Namespace", - "Include", - "Definition", - "TypeDefinition", - "Typedef", - "CommaOrSemicolon", - "Enum", - "EnumDefList", - "EnumDef", - "Senum", - "SenumDefList", - "SenumDef", - "Const", - "ConstValue", - "ConstList", - "ConstListContents", - "ConstMap", - "ConstMapContents", - "Struct", - "Xception", - "Service", - "FlagArgs", - "UnflagArgs", - "Extends", - "Function", - "Async", - "Throws", - "FieldList", - "Field", - "FieldRequiredness", - "FieldValue", - "DefinitionType", - "FunctionType", - "FieldType", - "TypeString", - "TypeByte", - "Typei16", - "Typei32", - "Typei64", - "TypeDouble", - "TypeBool", - "TypeMap", - "TypeSet", - "TypeList", - }; -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeService.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeService.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeService.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeService.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeService extends SimpleNode { - public DynamicSerDeService(int id) { - super(id); - } - - public DynamicSerDeService(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConstListContents.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConstListContents.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeConstListContents.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstListContents.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeConstListContents extends SimpleNode { - public DynamicSerDeConstListContents(int id) { - super(id); - } - - public DynamicSerDeConstListContents(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeEnum.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeEnum.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDeEnum.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeEnum.java */ - -package org.apache.hadoop.hive.serde.dynamic_type; - -public class DynamicSerDeEnum extends SimpleNode { - public DynamicSerDeEnum(int id) { - super(id); - } - - public DynamicSerDeEnum(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java (working copy) @@ -25,6 +25,10 @@ public static final String SERIALIZATION_NULL_FORMAT = "serialization.null.format"; + public static final String SERIALIZATION_LAST_COLUMN_TAKES_REST = "serialization.last.column.takes.rest"; + + public static final String SERIALIZATION_SORT_ORDER = "serialization.sort.order"; + public static final String FIELD_DELIM = "field.delim"; public static final String COLLECTION_DELIM = "colelction.delim"; @@ -33,8 +37,14 @@ public static final String MAPKEY_DELIM = "mapkey.delim"; + public static final String QUOTE_CHAR = "quote.delim"; + + public static final String BOOLEAN_TYPE_NAME = "boolean"; + public static final String TINYINT_TYPE_NAME = "tinyint"; + public static final String SMALLINT_TYPE_NAME = "smallint"; + public static final String INT_TYPE_NAME = "int"; public static final String BIGINT_TYPE_NAME = "bigint"; @@ -57,7 +67,9 @@ public static final Set PrimitiveTypes = new HashSet(); static { + PrimitiveTypes.add("boolean"); PrimitiveTypes.add("tinyint"); + PrimitiveTypes.add("smallint"); PrimitiveTypes.add("int"); PrimitiveTypes.add("bigint"); PrimitiveTypes.add("float"); Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleNode.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleNode.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleNode.java (working copy) @@ -1,72 +0,0 @@ -/* Generated By:JJTree: Do not edit this line. SimpleNode.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class SimpleNode implements Node { - protected Node parent; - protected Node[] children; - protected int id; - protected thrift_grammar parser; - - public SimpleNode(int i) { - id = i; - } - - public SimpleNode(thrift_grammar p, int i) { - this(i); - parser = p; - } - - public void jjtOpen() { - } - - public void jjtClose() { - } - - public void jjtSetParent(Node n) { parent = n; } - public Node jjtGetParent() { return parent; } - - public void jjtAddChild(Node n, int i) { - if (children == null) { - children = new Node[i + 1]; - } else if (i >= children.length) { - Node c[] = new Node[i + 1]; - System.arraycopy(children, 0, c, 0, children.length); - children = c; - } - children[i] = n; - } - - public Node jjtGetChild(int i) { - return children[i]; - } - - public int jjtGetNumChildren() { - return (children == null) ? 0 : children.length; - } - - /* You can override these two methods in subclasses of SimpleNode to - customize the way the node appears when the tree is dumped. If - your output uses more than one line you should override - toString(String), otherwise overriding toString() is probably all - you need to do. */ - - public String toString() { return thrift_grammarTreeConstants.jjtNodeName[id]; } - public String toString(String prefix) { return prefix + toString(); } - - /* Override this method if you want to customize how the node dumps - out its children. */ - - public void dump(String prefix) { - System.out.println(toString(prefix)); - if (children != null) { - for (int i = 0; i < children.length; ++i) { - SimpleNode n = (SimpleNode)children[i]; - if (n != null) { - n.dump(prefix + " "); - } - } - } - } -} - Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarConstants.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarConstants.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarConstants.java (working copy) @@ -1,133 +0,0 @@ -/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammarConstants.java */ -package org.apache.hadoop.hive.serde2.dynamic_type; - -public interface thrift_grammarConstants { - - int EOF = 0; - int tok_const = 8; - int tok_namespace = 9; - int tok_cpp_namespace = 10; - int tok_cpp_include = 11; - int tok_cpp_type = 12; - int tok_java_package = 13; - int tok_cocoa_prefix = 14; - int tok_csharp_namespace = 15; - int tok_php_namespace = 16; - int tok_py_module = 17; - int tok_perl_package = 18; - int tok_ruby_namespace = 19; - int tok_smalltalk_category = 20; - int tok_smalltalk_prefix = 21; - int tok_xsd_all = 22; - int tok_xsd_optional = 23; - int tok_xsd_nillable = 24; - int tok_xsd_namespace = 25; - int tok_xsd_attrs = 26; - int tok_include = 27; - int tok_void = 28; - int tok_bool = 29; - int tok_byte = 30; - int tok_i16 = 31; - int tok_i32 = 32; - int tok_i64 = 33; - int tok_double = 34; - int tok_string = 35; - int tok_slist = 36; - int tok_senum = 37; - int tok_map = 38; - int tok_list = 39; - int tok_set = 40; - int tok_async = 41; - int tok_typedef = 42; - int tok_struct = 43; - int tok_exception = 44; - int tok_extends = 45; - int tok_throws = 46; - int tok_service = 47; - int tok_enum = 48; - int tok_required = 49; - int tok_optional = 50; - int tok_int_constant = 51; - int tok_double_constant = 52; - int IDENTIFIER = 53; - int LETTER = 54; - int DIGIT = 55; - int tok_literal = 56; - int tok_st_identifier = 57; - - int DEFAULT = 0; - - String[] tokenImage = { - "", - "\" \"", - "\"\\t\"", - "\"\\n\"", - "\"\\r\"", - "", - "", - "", - "\"const\"", - "\"namespace\"", - "\"cpp_namespace\"", - "\"cpp_include\"", - "\"cpp_type\"", - "\"java_package\"", - "\"cocoa_prefix\"", - "\"csharp_namespace\"", - "\"php_namespace\"", - "\"py_module\"", - "\"perl_package\"", - "\"ruby_namespace\"", - "\"smalltalk_category\"", - "\"smalltalk_prefix\"", - "\"xsd_all\"", - "\"xsd_optional\"", - "\"xsd_nillable\"", - "\"xsd_namespace\"", - "\"xsd_attrs\"", - "\"include\"", - "\"void\"", - "\"bool\"", - "\"byte\"", - "\"i16\"", - "\"i32\"", - "\"i64\"", - "\"double\"", - "\"string\"", - "\"slist\"", - "\"senum\"", - "\"map\"", - "\"list\"", - "\"set\"", - "\"async\"", - "\"typedef\"", - "\"struct\"", - "\"exception\"", - "\"extends\"", - "\"throws\"", - "\"service\"", - "\"enum\"", - "\"required\"", - "\"optional\"", - "", - "", - "", - "", - "", - "", - "", - "\",\"", - "\";\"", - "\"{\"", - "\"}\"", - "\"=\"", - "\"[\"", - "\"]\"", - "\":\"", - "\"(\"", - "\")\"", - "\"<\"", - "\">\"", - }; - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeFieldRequiredness.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeFieldRequiredness extends SimpleNode { - public DynamicSerDeFieldRequiredness(int id) { - super(id); - } - - public DynamicSerDeFieldRequiredness(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeInclude.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeInclude.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeInclude.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeInclude.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeInclude extends SimpleNode { - public DynamicSerDeInclude(int id) { - super(id); - } - - public DynamicSerDeInclude(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMap.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMap.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMap.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstMap.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeConstMap extends SimpleNode { - public DynamicSerDeConstMap(int id) { - super(id); - } - - public DynamicSerDeConstMap(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeThrows.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeThrows.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeThrows.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeThrows.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeThrows extends SimpleNode { - public DynamicSerDeThrows(int id) { - super(id); - } - - public DynamicSerDeThrows(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFlagArgs.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFlagArgs.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFlagArgs.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeFlagArgs.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeFlagArgs extends SimpleNode { - public DynamicSerDeFlagArgs(int id) { - super(id); - } - - public DynamicSerDeFlagArgs(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java (working copy) @@ -1,123 +0,0 @@ -/* Generated By:JJTree: Do not edit this line. /data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -class JJTthrift_grammarState { - private java.util.Stack nodes; - private java.util.Stack marks; - - private int sp; // number of nodes on stack - private int mk; // current mark - private boolean node_created; - - JJTthrift_grammarState() { - nodes = new java.util.Stack(); - marks = new java.util.Stack(); - sp = 0; - mk = 0; - } - - /* Determines whether the current node was actually closed and - pushed. This should only be called in the final user action of a - node scope. */ - boolean nodeCreated() { - return node_created; - } - - /* Call this to reinitialize the node stack. It is called - automatically by the parser's ReInit() method. */ - void reset() { - nodes.removeAllElements(); - marks.removeAllElements(); - sp = 0; - mk = 0; - } - - /* Returns the root node of the AST. It only makes sense to call - this after a successful parse. */ - Node rootNode() { - return (Node)nodes.elementAt(0); - } - - /* Pushes a node on to the stack. */ - void pushNode(Node n) { - nodes.push(n); - ++sp; - } - - /* Returns the node on the top of the stack, and remove it from the - stack. */ - Node popNode() { - if (--sp < mk) { - mk = ((Integer)marks.pop()).intValue(); - } - return (Node)nodes.pop(); - } - - /* Returns the node currently on the top of the stack. */ - Node peekNode() { - return (Node)nodes.peek(); - } - - /* Returns the number of children on the stack in the current node - scope. */ - int nodeArity() { - return sp - mk; - } - - - void clearNodeScope(Node n) { - while (sp > mk) { - popNode(); - } - mk = ((Integer)marks.pop()).intValue(); - } - - - void openNodeScope(Node n) { - marks.push(new Integer(mk)); - mk = sp; - n.jjtOpen(); - } - - - /* A definite node is constructed from a specified number of - children. That number of nodes are popped from the stack and - made the children of the definite node. Then the definite node - is pushed on to the stack. */ - void closeNodeScope(Node n, int num) { - mk = ((Integer)marks.pop()).intValue(); - while (num-- > 0) { - Node c = popNode(); - c.jjtSetParent(n); - n.jjtAddChild(c, num); - } - n.jjtClose(); - pushNode(n); - node_created = true; - } - - - /* A conditional node is constructed if its condition is true. All - the nodes that have been pushed since the node was opened are - made children of the the conditional node, which is then pushed - on to the stack. If the condition is false the node is not - constructed and they are left on the stack. */ - void closeNodeScope(Node n, boolean condition) { - if (condition) { - int a = nodeArity(); - mk = ((Integer)marks.pop()).intValue(); - while (a-- > 0) { - Node c = popNode(); - c.jjtSetParent(n); - n.jjtAddChild(c, a); - } - n.jjtClose(); - pushNode(n); - node_created = true; - } else { - mk = ((Integer)marks.pop()).intValue(); - node_created = false; - } - } -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenum.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenum.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenum.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenum.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeSenum extends SimpleNode { - public DynamicSerDeSenum(int id) { - super(id); - } - - public DynamicSerDeSenum(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunctionType.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunctionType.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunctionType.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeFunctionType.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeFunctionType extends SimpleNode { - public DynamicSerDeFunctionType(int id) { - super(id); - } - - public DynamicSerDeFunctionType(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinition.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinition.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinition.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeDefinition.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeDefinition extends SimpleNode { - public DynamicSerDeDefinition(int id) { - super(id); - } - - public DynamicSerDeDefinition(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeXception.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeXception.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeXception.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeXception.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeXception extends SimpleNode { - public DynamicSerDeXception(int id) { - super(id); - } - - public DynamicSerDeXception(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeNamespace.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeNamespace.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeNamespace.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeNamespace.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeNamespace extends SimpleNode { - public DynamicSerDeNamespace(int id) { - super(id); - } - - public DynamicSerDeNamespace(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStart.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStart.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStart.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeStart.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeStart extends SimpleNode { - public DynamicSerDeStart(int id) { - super(id); - } - - public DynamicSerDeStart(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/Node.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/Node.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/Node.java (working copy) @@ -1,34 +0,0 @@ -/* Generated By:JJTree: Do not edit this line. Node.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -/* All AST nodes must implement this interface. It provides basic - machinery for constructing the parent and child relationships - between nodes. */ - -public interface Node { - - /** This method is called after the node has been made the current - node. It indicates that child nodes can now be added to it. */ - public void jjtOpen(); - - /** This method is called after all the child nodes have been - added. */ - public void jjtClose(); - - /** This pair of methods are used to inform the node of its - parent. */ - public void jjtSetParent(Node n); - public Node jjtGetParent(); - - /** This method tells the node to add its argument to the node's - list of children. */ - public void jjtAddChild(Node n, int i); - - /** This method returns a child node. The children are numbered - from zero, left to right. */ - public Node jjtGetChild(int i); - - /** Return the number of children the node has. */ - public int jjtGetNumChildren(); -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jj =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jj (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jj (working copy) @@ -1,2345 +0,0 @@ -/*@bgen(jjtree) Generated By:JJTree: Do not edit this line. /data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jj */ -/*@egen*/options { - STATIC = false; -} - - -PARSER_BEGIN(thrift_grammar) - -package org.apache.hadoop.hive.serde2.dynamic_type; - -import java.util.*; -import java.io.*; -import java.net.*; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.transport.*; -import org.apache.hadoop.hive.serde2.dynamic_type.*; - -public class thrift_grammar/*@bgen(jjtree)*/implements thrift_grammarTreeConstants/*@egen*/ {/*@bgen(jjtree)*/ - protected JJTthrift_grammarState jjtree = new JJTthrift_grammarState(); - -/*@egen*/ - - private List include_path = null; - - // for computing the autogenerated field ids in thrift - private int field_val; - - // store types and tables - // separately because one cannot use a table (ie service.method) as a Struct like type. - protected Map types; - protected Map tables; - - // system include path - final private static String default_include_path[] = { "/usr/local/include","/usr/include","/usr/local/include/thrift/if","/usr/local/include/fb303/if" }; - - // need three params to differentiate between this and 2 param method auto generated since - // some calls in the autogenerated code use null param for 2nd param and thus ambiguous. - protected thrift_grammar(InputStream is, List include_path, boolean junk) { - this(is,null); - this.types = new HashMap () ; - this.tables = new HashMap () ; - this.include_path = include_path; - this.field_val = -1; - } - - // find the file on the include path - private static File findFile(String fname, List include_path) { - for(String path: include_path) { - final String full = path + "/" + fname; - File f = new File(full); - if(f.exists()) { - return f; - } - } - return null; - } - - public static void main(String args[]) { - String filename = null; - List include_path = new ArrayList(); - - for(String path: default_include_path) { - include_path.add(path); - } - for(int i = 0; i < args.length; i++) { - String arg = args[i]; - if(arg.equals("--include") && i + 1 < args.length) { - include_path.add(args[++i]); - } - if(arg.equals("--file") && i + 1 < args.length) { - filename = args[++i]; - } - } - - InputStream is = System.in; - if(filename != null) { - try { - is = new FileInputStream(findFile(filename, include_path)); - } catch(IOException e) { - } - } - thrift_grammar t = new thrift_grammar(is,include_path,false); - - try { - t.Start(); - } catch (Exception e) { - System.out.println("Parse error."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } -} - -PARSER_END(thrift_grammar) - - - -SKIP : -{ - " " -| "\t" -| "\n" -| "\r" -| <"#"(~["\n"])* ("\n"|"\r"|"\r\n")> -| <"//" (~["\n","\r"])* ("\n"|"\r"|"\r\n")> -| <"/*" (~["*"])* "*" (~["/"] (~["*"])* "*")* "/"> -} - - -/** - * HELPER DEFINITIONS, COMMENTS, CONSTANTS, AND WHATNOT - */ - -TOKEN: -{ -| - | -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| -| - | - | - | - | - | - | - | - | - | - | - | - | - -} - -TOKEN: { - - -| -)*"."()+(["e","E"](["+","-"])?()+)?> -| -(||"."|"_")*> -| -<#LETTER: (["a"-"z", "A"-"Z" ]) > -| -<#DIGIT: ["0"-"9"] > -| - -| - -} - - -SimpleNode Start() : {/*@bgen(jjtree) Start */ - DynamicSerDeStart jjtn000 = new DynamicSerDeStart(JJTSTART); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Start */ - try { -/*@egen*/ - HeaderList() (Definition())+/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode HeaderList() : {/*@bgen(jjtree) HeaderList */ - DynamicSerDeHeaderList jjtn000 = new DynamicSerDeHeaderList(JJTHEADERLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) HeaderList */ - try { -/*@egen*/ - (Header())*/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ - -} - -SimpleNode Header() : {/*@bgen(jjtree) Header */ - DynamicSerDeHeader jjtn000 = new DynamicSerDeHeader(JJTHEADER); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Header */ - try { -/*@egen*/ - Include()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Namespace()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode Namespace() : {/*@bgen(jjtree) Namespace */ - DynamicSerDeNamespace jjtn000 = new DynamicSerDeNamespace(JJTNAMESPACE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Namespace */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -} -| - /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -}/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -SimpleNode Include() : {/*@bgen(jjtree) Include */ - DynamicSerDeInclude jjtn000 = new DynamicSerDeInclude(JJTINCLUDE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ - String fname; - boolean found = false; -} -{/*@bgen(jjtree) Include */ - try { -/*@egen*/ - - fname=.image/*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - // bugbug somewhat fragile below substring expression - fname = fname.substring(1,fname.length() - 1); - - // try to find the file on the include path - File f = thrift_grammar.findFile(fname, this.include_path); - if(f != null) { - found = true; - try { - FileInputStream fis = new FileInputStream(f); - thrift_grammar t = new thrift_grammar(fis,this.include_path, false); - t.Start(); - fis.close(); - found = true; - // add in what we found to our type and table tables. - this.tables.putAll(t.tables); - this.types.putAll(t.types); - } catch (Exception e) { - System.out.println("File: " + fname + " - Oops."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } - if(!found) { - throw new RuntimeException("include file not found: " + fname); - } - return jjtn000; -}/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -SimpleNode Definition() : {/*@bgen(jjtree) Definition */ - DynamicSerDeDefinition jjtn000 = new DynamicSerDeDefinition(JJTDEFINITION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Definition */ - try { -/*@egen*/ - Const()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Service()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeDefinition()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode TypeDefinition() : {/*@bgen(jjtree) TypeDefinition */ - DynamicSerDeTypeDefinition jjtn000 = new DynamicSerDeTypeDefinition(JJTTYPEDEFINITION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeDefinition */ - try { -/*@egen*/ - Typedef()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Enum()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Senum()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Struct()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Xception()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ - -} - -DynamicSerDeTypedef Typedef() : {/*@bgen(jjtree) Typedef */ - DynamicSerDeTypedef jjtn000 = new DynamicSerDeTypedef(JJTTYPEDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Typedef */ - try { -/*@egen*/ - - DefinitionType() - jjtn000.name = .image/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - // store the type for later retrieval - this.types.put(jjtn000.name, jjtn000); - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -// returning void because we ignore this production. -void CommaOrSemicolon() : {/*@bgen(jjtree) CommaOrSemicolon */ - DynamicSerDeCommaOrSemicolon jjtn000 = new DynamicSerDeCommaOrSemicolon(JJTCOMMAORSEMICOLON); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) CommaOrSemicolon */ - try { -/*@egen*/ - "," -| - ";"/*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ -}/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode Enum() : {/*@bgen(jjtree) Enum */ - DynamicSerDeEnum jjtn000 = new DynamicSerDeEnum(JJTENUM); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Enum */ - try { -/*@egen*/ - "{" EnumDefList() "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode EnumDefList() : {/*@bgen(jjtree) EnumDefList */ - DynamicSerDeEnumDefList jjtn000 = new DynamicSerDeEnumDefList(JJTENUMDEFLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) EnumDefList */ - try { -/*@egen*/ - (EnumDef())+/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode EnumDef() : {/*@bgen(jjtree) EnumDef */ - DynamicSerDeEnumDef jjtn000 = new DynamicSerDeEnumDef(JJTENUMDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) EnumDef */ - try { -/*@egen*/ - ["=" ] [CommaOrSemicolon()]/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode Senum() : {/*@bgen(jjtree) Senum */ - DynamicSerDeSenum jjtn000 = new DynamicSerDeSenum(JJTSENUM); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Senum */ - try { -/*@egen*/ - "{" SenumDefList() "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode SenumDefList() : {/*@bgen(jjtree) SenumDefList */ - DynamicSerDeSenumDefList jjtn000 = new DynamicSerDeSenumDefList(JJTSENUMDEFLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) SenumDefList */ - try { -/*@egen*/ - (SenumDef())+/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode SenumDef() : {/*@bgen(jjtree) SenumDef */ - DynamicSerDeSenumDef jjtn000 = new DynamicSerDeSenumDef(JJTSENUMDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) SenumDef */ - try { -/*@egen*/ - [CommaOrSemicolon()]/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -SimpleNode Const() : {/*@bgen(jjtree) Const */ - DynamicSerDeConst jjtn000 = new DynamicSerDeConst(JJTCONST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Const */ - try { -/*@egen*/ - FieldType() "=" ConstValue() [CommaOrSemicolon()]/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode ConstValue() : {/*@bgen(jjtree) ConstValue */ - DynamicSerDeConstValue jjtn000 = new DynamicSerDeConstValue(JJTCONSTVALUE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) ConstValue */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -| /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -| /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -| /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -| ConstList()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -| ConstMap()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode ConstList() : {/*@bgen(jjtree) ConstList */ - DynamicSerDeConstList jjtn000 = new DynamicSerDeConstList(JJTCONSTLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) ConstList */ - try { -/*@egen*/ - "[" ConstListContents() "]"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode ConstListContents() : {/*@bgen(jjtree) ConstListContents */ - DynamicSerDeConstListContents jjtn000 = new DynamicSerDeConstListContents(JJTCONSTLISTCONTENTS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) ConstListContents */ - try { -/*@egen*/ - (ConstValue() [CommaOrSemicolon()])+/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode ConstMap() : {/*@bgen(jjtree) ConstMap */ - DynamicSerDeConstMap jjtn000 = new DynamicSerDeConstMap(JJTCONSTMAP); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) ConstMap */ - try { -/*@egen*/ - "{" ConstMapContents() "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode ConstMapContents() : {/*@bgen(jjtree) ConstMapContents */ - DynamicSerDeConstMapContents jjtn000 = new DynamicSerDeConstMapContents(JJTCONSTMAPCONTENTS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) ConstMapContents */ - try { -/*@egen*/ - (ConstValue() ":" ConstValue() [CommaOrSemicolon()])+/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - } -|/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeStruct Struct() : {/*@bgen(jjtree) Struct */ - DynamicSerDeStruct jjtn000 = new DynamicSerDeStruct(JJTSTRUCT); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ - -} -{/*@bgen(jjtree) Struct */ - try { -/*@egen*/ - - jjtn000.name = .image - "{" - FieldList() - "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - this.types.put(jjtn000.name,jjtn000); - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -SimpleNode Xception() : {/*@bgen(jjtree) Xception */ - DynamicSerDeXception jjtn000 = new DynamicSerDeXception(JJTXCEPTION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Xception */ - try { -/*@egen*/ - "{" FieldList() "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -SimpleNode Service() : {/*@bgen(jjtree) Service */ - DynamicSerDeService jjtn000 = new DynamicSerDeService(JJTSERVICE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Service */ - try { -/*@egen*/ - - - Extends() - "{" - FlagArgs() - (Function())+ - UnflagArgs() - "}"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - // at some point, these should be inserted as a "db" - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode FlagArgs() : {/*@bgen(jjtree) FlagArgs */ - DynamicSerDeFlagArgs jjtn000 = new DynamicSerDeFlagArgs(JJTFLAGARGS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) FlagArgs */ - try { -/*@egen*//*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode UnflagArgs() : {/*@bgen(jjtree) UnflagArgs */ - DynamicSerDeUnflagArgs jjtn000 = new DynamicSerDeUnflagArgs(JJTUNFLAGARGS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) UnflagArgs */ - try { -/*@egen*//*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode Extends() : {/*@bgen(jjtree) Extends */ - DynamicSerDeExtends jjtn000 = new DynamicSerDeExtends(JJTEXTENDS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Extends */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -|/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -DynamicSerDeFunction Function() : {/*@bgen(jjtree) Function */ - DynamicSerDeFunction jjtn000 = new DynamicSerDeFunction(JJTFUNCTION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Function */ - try { -/*@egen*/ - // metastore ignores async and type - Async() - FunctionType() - - // the name of the function/table - jjtn000.name = .image - "(" - FieldList() - ")" - Throws() - [CommaOrSemicolon()]/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - - { - this.tables.put(jjtn000.name, jjtn000); - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -void Async() : {/*@bgen(jjtree) Async */ - DynamicSerDeAsync jjtn000 = new DynamicSerDeAsync(JJTASYNC); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Async */ - try { -/*@egen*/ - -|/*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{}/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -void Throws() : {/*@bgen(jjtree) Throws */ - DynamicSerDeThrows jjtn000 = new DynamicSerDeThrows(JJTTHROWS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Throws */ - try { -/*@egen*/ - "(" FieldList() ")" -|/*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{}/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -// nothing special - just use the DynamicSerDeFieldList's children methods to access the fields -DynamicSerDeFieldList FieldList() : {/*@bgen(jjtree) FieldList */ - DynamicSerDeFieldList jjtn000 = new DynamicSerDeFieldList(JJTFIELDLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ - this.field_val = -1; -} -{/*@bgen(jjtree) FieldList */ - try { -/*@egen*/ - (Field())*/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - -DynamicSerDeField Field() : {/*@bgen(jjtree) Field */ - DynamicSerDeField jjtn000 = new DynamicSerDeField(JJTFIELD); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ - - String fidnum = ""; - String fid; -} -{/*@bgen(jjtree) Field */ - try { -/*@egen*/ - - // parse the field id which is optional - [fidnum=.image ":"] - - // is this field required or optional? default is optional - FieldRequiredness() - - // field type - obviously not optional - FieldType() - - // the name of the field - not optional - jjtn000.name = .image - - // does it have = some value? - FieldValue() - - // take it or leave it - [CommaOrSemicolon()]/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - - { - if(fidnum.length() > 0) { - int fidInt = Integer.valueOf(fidnum); - jjtn000.fieldid = fidInt; - } else { - jjtn000.fieldid = this.field_val--; - } - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - - - -SimpleNode FieldRequiredness() : {/*@bgen(jjtree) FieldRequiredness */ - DynamicSerDeFieldRequiredness jjtn000 = new DynamicSerDeFieldRequiredness(JJTFIELDREQUIREDNESS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) FieldRequiredness */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -|/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode FieldValue() : {/*@bgen(jjtree) FieldValue */ - DynamicSerDeFieldValue jjtn000 = new DynamicSerDeFieldValue(JJTFIELDVALUE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) FieldValue */ - try { -/*@egen*/ - "=" - ConstValue()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -|/*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{ - return jjtn000; -}/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -SimpleNode DefinitionType() : {/*@bgen(jjtree) DefinitionType */ - DynamicSerDeDefinitionType jjtn000 = new DynamicSerDeDefinitionType(JJTDEFINITIONTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) DefinitionType */ - try { -/*@egen*/ -// BaseType() xxx - TypeString()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeBool()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei16()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei32()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei64()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeDouble()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeMap()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeSet()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeList()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -void FunctionType() : {/*@bgen(jjtree) FunctionType */ - DynamicSerDeFunctionType jjtn000 = new DynamicSerDeFunctionType(JJTFUNCTIONTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) FunctionType */ - try { -/*@egen*/ - FieldType() -| /*@bgen(jjtree)*/ -{ - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; -} -/*@egen*/ -{}/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeFieldType FieldType() : {/*@bgen(jjtree) FieldType */ - DynamicSerDeFieldType jjtn000 = new DynamicSerDeFieldType(JJTFIELDTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ -} - -{/*@bgen(jjtree) FieldType */ - try { -/*@egen*/ - TypeString()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeBool()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei16()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei32()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| Typei64()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| TypeDouble()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| - TypeMap()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| - TypeSet()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| - TypeList()/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - } -| - jjtn000.name = .image/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeString TypeString() : {/*@bgen(jjtree) TypeString */ - DynamicSerDeTypeString jjtn000 = new DynamicSerDeTypeString(JJTTYPESTRING); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeString */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeByte TypeByte() : {/*@bgen(jjtree) TypeByte */ - DynamicSerDeTypeByte jjtn000 = new DynamicSerDeTypeByte(JJTTYPEBYTE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ -} -{/*@bgen(jjtree) TypeByte */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypei16 Typei16() : {/*@bgen(jjtree) Typei16 */ - DynamicSerDeTypei16 jjtn000 = new DynamicSerDeTypei16(JJTTYPEI16); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ -} -{/*@bgen(jjtree) Typei16 */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypei32 Typei32() : {/*@bgen(jjtree) Typei32 */ - DynamicSerDeTypei32 jjtn000 = new DynamicSerDeTypei32(JJTTYPEI32); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Typei32 */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypei64 Typei64() : {/*@bgen(jjtree) Typei64 */ - DynamicSerDeTypei64 jjtn000 = new DynamicSerDeTypei64(JJTTYPEI64); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) Typei64 */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeDouble TypeDouble() : {/*@bgen(jjtree) TypeDouble */ - DynamicSerDeTypeDouble jjtn000 = new DynamicSerDeTypeDouble(JJTTYPEDOUBLE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeDouble */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeBool TypeBool() : {/*@bgen(jjtree) TypeBool */ - DynamicSerDeTypeBool jjtn000 = new DynamicSerDeTypeBool(JJTTYPEBOOL); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeBool */ - try { -/*@egen*/ - /*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeMap TypeMap() : {/*@bgen(jjtree) TypeMap */ - DynamicSerDeTypeMap jjtn000 = new DynamicSerDeTypeMap(JJTTYPEMAP); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeMap */ - try { -/*@egen*/ - - "<" - FieldType() - "," - FieldType() - ">"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeSet TypeSet() : {/*@bgen(jjtree) TypeSet */ - DynamicSerDeTypeSet jjtn000 = new DynamicSerDeTypeSet(JJTTYPESET); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeSet */ - try { -/*@egen*/ - - "<" - - FieldType() - - ">"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} - -DynamicSerDeTypeList TypeList() : {/*@bgen(jjtree) TypeList */ - DynamicSerDeTypeList jjtn000 = new DynamicSerDeTypeList(JJTTYPELIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} -{/*@bgen(jjtree) TypeList */ - try { -/*@egen*/ - - "<" - - FieldType() - - ">"/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ - { - return jjtn000; - }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java (working copy) @@ -1,192 +0,0 @@ -/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */ -package org.apache.hadoop.hive.serde2.dynamic_type; - -/** - * This exception is thrown when parse errors are encountered. - * You can explicitly create objects of this exception type by - * calling the method generateParseException in the generated - * parser. - * - * You can modify this class to customize your error reporting - * mechanisms so long as you retain the public fields. - */ -public class ParseException extends Exception { - - /** - * This constructor is used by the method "generateParseException" - * in the generated parser. Calling this constructor generates - * a new object of this type with the fields "currentToken", - * "expectedTokenSequences", and "tokenImage" set. The boolean - * flag "specialConstructor" is also set to true to indicate that - * this constructor was used to create this object. - * This constructor calls its super class with the empty string - * to force the "toString" method of parent class "Throwable" to - * print the error message in the form: - * ParseException: - */ - public ParseException(Token currentTokenVal, - int[][] expectedTokenSequencesVal, - String[] tokenImageVal - ) - { - super(""); - specialConstructor = true; - currentToken = currentTokenVal; - expectedTokenSequences = expectedTokenSequencesVal; - tokenImage = tokenImageVal; - } - - /** - * The following constructors are for use by you for whatever - * purpose you can think of. Constructing the exception in this - * manner makes the exception behave in the normal way - i.e., as - * documented in the class "Throwable". The fields "errorToken", - * "expectedTokenSequences", and "tokenImage" do not contain - * relevant information. The JavaCC generated code does not use - * these constructors. - */ - - public ParseException() { - super(); - specialConstructor = false; - } - - public ParseException(String message) { - super(message); - specialConstructor = false; - } - - /** - * This variable determines which constructor was used to create - * this object and thereby affects the semantics of the - * "getMessage" method (see below). - */ - protected boolean specialConstructor; - - /** - * This is the last token that has been consumed successfully. If - * this object has been created due to a parse error, the token - * followng this token will (therefore) be the first error token. - */ - public Token currentToken; - - /** - * Each entry in this array is an array of integers. Each array - * of integers represents a sequence of tokens (by their ordinal - * values) that is expected at this point of the parse. - */ - public int[][] expectedTokenSequences; - - /** - * This is a reference to the "tokenImage" array of the generated - * parser within which the parse error occurred. This array is - * defined in the generated ...Constants interface. - */ - public String[] tokenImage; - - /** - * This method has the standard behavior when this object has been - * created using the standard constructors. Otherwise, it uses - * "currentToken" and "expectedTokenSequences" to generate a parse - * error message and returns it. If this object has been created - * due to a parse error, and you do not catch it (it gets thrown - * from the parser), then this method is called during the printing - * of the final stack trace, and hence the correct error message - * gets displayed. - */ - public String getMessage() { - if (!specialConstructor) { - return super.getMessage(); - } - StringBuffer expected = new StringBuffer(); - int maxSize = 0; - for (int i = 0; i < expectedTokenSequences.length; i++) { - if (maxSize < expectedTokenSequences[i].length) { - maxSize = expectedTokenSequences[i].length; - } - for (int j = 0; j < expectedTokenSequences[i].length; j++) { - expected.append(tokenImage[expectedTokenSequences[i][j]]).append(" "); - } - if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) { - expected.append("..."); - } - expected.append(eol).append(" "); - } - String retval = "Encountered \""; - Token tok = currentToken.next; - for (int i = 0; i < maxSize; i++) { - if (i != 0) retval += " "; - if (tok.kind == 0) { - retval += tokenImage[0]; - break; - } - retval += add_escapes(tok.image); - tok = tok.next; - } - retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn; - retval += "." + eol; - if (expectedTokenSequences.length == 1) { - retval += "Was expecting:" + eol + " "; - } else { - retval += "Was expecting one of:" + eol + " "; - } - retval += expected.toString(); - return retval; - } - - /** - * The end of line string for this machine. - */ - protected String eol = System.getProperty("line.separator", "\n"); - - /** - * Used to convert raw characters to their escaped version - * when these raw version cannot be used as part of an ASCII - * string literal. - */ - protected String add_escapes(String str) { - StringBuffer retval = new StringBuffer(); - char ch; - for (int i = 0; i < str.length(); i++) { - switch (str.charAt(i)) - { - case 0 : - continue; - case '\b': - retval.append("\\b"); - continue; - case '\t': - retval.append("\\t"); - continue; - case '\n': - retval.append("\\n"); - continue; - case '\f': - retval.append("\\f"); - continue; - case '\r': - retval.append("\\r"); - continue; - case '\"': - retval.append("\\\""); - continue; - case '\'': - retval.append("\\\'"); - continue; - case '\\': - retval.append("\\\\"); - continue; - default: - if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { - String s = "0000" + Integer.toString(ch, 16); - retval.append("\\u" + s.substring(s.length() - 4, s.length())); - } else { - retval.append(ch); - } - continue; - } - } - return retval.toString(); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDefList.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDefList.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDefList.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenumDefList.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeSenumDefList extends SimpleNode { - public DynamicSerDeSenumDefList(int id) { - super(id); - } - - public DynamicSerDeSenumDefList(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMapContents.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMapContents.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMapContents.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstMapContents.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeConstMapContents extends SimpleNode { - public DynamicSerDeConstMapContents(int id) { - super(id); - } - - public DynamicSerDeConstMapContents(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDef.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDef.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDef.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenumDef.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeSenumDef extends SimpleNode { - public DynamicSerDeSenumDef(int id) { - super(id); - } - - public DynamicSerDeSenumDef(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeCommaOrSemicolon.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeCommaOrSemicolon.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeCommaOrSemicolon.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeCommaOrSemicolon.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeCommaOrSemicolon extends SimpleNode { - public DynamicSerDeCommaOrSemicolon(int id) { - super(id); - } - - public DynamicSerDeCommaOrSemicolon(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java (working copy) @@ -1,133 +0,0 @@ -/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 3.0 */ -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class TokenMgrError extends Error -{ - /* - * Ordinals for various reasons why an Error of this type can be thrown. - */ - - /** - * Lexical error occured. - */ - static final int LEXICAL_ERROR = 0; - - /** - * An attempt wass made to create a second instance of a static token manager. - */ - static final int STATIC_LEXER_ERROR = 1; - - /** - * Tried to change to an invalid lexical state. - */ - static final int INVALID_LEXICAL_STATE = 2; - - /** - * Detected (and bailed out of) an infinite loop in the token manager. - */ - static final int LOOP_DETECTED = 3; - - /** - * Indicates the reason why the exception is thrown. It will have - * one of the above 4 values. - */ - int errorCode; - - /** - * Replaces unprintable characters by their espaced (or unicode escaped) - * equivalents in the given string - */ - protected static final String addEscapes(String str) { - StringBuffer retval = new StringBuffer(); - char ch; - for (int i = 0; i < str.length(); i++) { - switch (str.charAt(i)) - { - case 0 : - continue; - case '\b': - retval.append("\\b"); - continue; - case '\t': - retval.append("\\t"); - continue; - case '\n': - retval.append("\\n"); - continue; - case '\f': - retval.append("\\f"); - continue; - case '\r': - retval.append("\\r"); - continue; - case '\"': - retval.append("\\\""); - continue; - case '\'': - retval.append("\\\'"); - continue; - case '\\': - retval.append("\\\\"); - continue; - default: - if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { - String s = "0000" + Integer.toString(ch, 16); - retval.append("\\u" + s.substring(s.length() - 4, s.length())); - } else { - retval.append(ch); - } - continue; - } - } - return retval.toString(); - } - - /** - * Returns a detailed message for the Error when it is thrown by the - * token manager to indicate a lexical error. - * Parameters : - * EOFSeen : indicates if EOF caused the lexicl error - * curLexState : lexical state in which this error occured - * errorLine : line number when the error occured - * errorColumn : column number when the error occured - * errorAfter : prefix that was seen before this error occured - * curchar : the offending character - * Note: You can customize the lexical error message by modifying this method. - */ - protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) { - return("Lexical error at line " + - errorLine + ", column " + - errorColumn + ". Encountered: " + - (EOFSeen ? " " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") + - "after : \"" + addEscapes(errorAfter) + "\""); - } - - /** - * You can also modify the body of this method to customize your error messages. - * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not - * of end-users concern, so you can return something like : - * - * "Internal Error : Please file a bug report .... " - * - * from this method for such cases in the release version of your parser. - */ - public String getMessage() { - return super.getMessage(); - } - - /* - * Constructors of various flavors follow. - */ - - public TokenMgrError() { - } - - public TokenMgrError(String message, int reason) { - super(message); - errorCode = reason; - } - - public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) { - this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason); - } -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeExtends.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeExtends.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeExtends.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeExtends.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeExtends extends SimpleNode { - public DynamicSerDeExtends(int id) { - super(id); - } - - public DynamicSerDeExtends(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTokenManager.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTokenManager.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTokenManager.java (working copy) @@ -1,1455 +0,0 @@ -/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammarTokenManager.java */ -package org.apache.hadoop.hive.serde2.dynamic_type; -import java.util.*; -import java.io.*; -import java.net.*; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.transport.*; -import org.apache.hadoop.hive.serde2.dynamic_type.*; - -public class thrift_grammarTokenManager implements thrift_grammarConstants -{ - public java.io.PrintStream debugStream = System.out; - public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; } -private final int jjStopStringLiteralDfa_0(int pos, long active0, long active1) -{ - switch (pos) - { - case 0: - if ((active0 & 0x7ffffffffff00L) != 0L) - { - jjmatchedKind = 53; - return 35; - } - return -1; - case 1: - if ((active0 & 0x7ffffffffff00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 1; - return 35; - } - return -1; - case 2: - if ((active0 & 0x14380000000L) != 0L) - return 35; - if ((active0 & 0x7febc7fffff00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 2; - return 35; - } - return -1; - case 3: - if ((active0 & 0x1008070000000L) != 0L) - return 35; - if ((active0 & 0x6fe3c0fffff00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 3; - return 35; - } - return -1; - case 4: - if ((active0 & 0x23000000100L) != 0L) - return 35; - if ((active0 & 0x6fc0c0ffffe00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 4; - return 35; - } - return -1; - case 5: - if ((active0 & 0x480c00000000L) != 0L) - return 35; - if ((active0 & 0x6b4000ffffe00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 5; - return 35; - } - return -1; - case 6: - if ((active0 & 0xa40008400000L) != 0L) - return 35; - if ((active0 & 0x6100007bffe00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 6; - return 35; - } - return -1; - case 7: - if ((active0 & 0x6000000001000L) != 0L) - return 35; - if ((active0 & 0x100007bfee00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 7; - return 35; - } - return -1; - case 8: - if ((active0 & 0x3bdec00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 8; - return 35; - } - if ((active0 & 0x100004020200L) != 0L) - return 35; - return -1; - case 9: - if ((active0 & 0x3bdec00L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 9; - return 35; - } - return -1; - case 10: - if ((active0 & 0x800L) != 0L) - return 35; - if ((active0 & 0x3bde400L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 10; - return 35; - } - return -1; - case 11: - if ((active0 & 0x1846000L) != 0L) - return 35; - if ((active0 & 0x2398400L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 11; - return 35; - } - return -1; - case 12: - if ((active0 & 0x2010400L) != 0L) - return 35; - if ((active0 & 0x388000L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 12; - return 35; - } - return -1; - case 13: - if ((active0 & 0x80000L) != 0L) - return 35; - if ((active0 & 0x308000L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 13; - return 35; - } - return -1; - case 14: - if ((active0 & 0x308000L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 14; - return 35; - } - return -1; - case 15: - if ((active0 & 0x208000L) != 0L) - return 35; - if ((active0 & 0x100000L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 15; - return 35; - } - return -1; - case 16: - if ((active0 & 0x100000L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 16; - return 35; - } - return -1; - default : - return -1; - } -} -private final int jjStartNfa_0(int pos, long active0, long active1) -{ - return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0, active1), pos + 1); -} -private final int jjStopAtPos(int pos, int kind) -{ - jjmatchedKind = kind; - jjmatchedPos = pos; - return pos + 1; -} -private final int jjStartNfaWithStates_0(int pos, int kind, int state) -{ - jjmatchedKind = kind; - jjmatchedPos = pos; - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { return pos + 1; } - return jjMoveNfa_0(state, pos + 1); -} -private final int jjMoveStringLiteralDfa0_0() -{ - switch(curChar) - { - case 40: - return jjStopAtPos(0, 66); - case 41: - return jjStopAtPos(0, 67); - case 44: - return jjStopAtPos(0, 58); - case 58: - return jjStopAtPos(0, 65); - case 59: - return jjStopAtPos(0, 59); - case 60: - return jjStopAtPos(0, 68); - case 61: - return jjStopAtPos(0, 62); - case 62: - return jjStopAtPos(0, 69); - case 91: - return jjStopAtPos(0, 63); - case 93: - return jjStopAtPos(0, 64); - case 97: - return jjMoveStringLiteralDfa1_0(0x20000000000L); - case 98: - return jjMoveStringLiteralDfa1_0(0x60000000L); - case 99: - return jjMoveStringLiteralDfa1_0(0xdd00L); - case 100: - return jjMoveStringLiteralDfa1_0(0x400000000L); - case 101: - return jjMoveStringLiteralDfa1_0(0x1300000000000L); - case 105: - return jjMoveStringLiteralDfa1_0(0x388000000L); - case 106: - return jjMoveStringLiteralDfa1_0(0x2000L); - case 108: - return jjMoveStringLiteralDfa1_0(0x8000000000L); - case 109: - return jjMoveStringLiteralDfa1_0(0x4000000000L); - case 110: - return jjMoveStringLiteralDfa1_0(0x200L); - case 111: - return jjMoveStringLiteralDfa1_0(0x4000000000000L); - case 112: - return jjMoveStringLiteralDfa1_0(0x70000L); - case 114: - return jjMoveStringLiteralDfa1_0(0x2000000080000L); - case 115: - return jjMoveStringLiteralDfa1_0(0x893800300000L); - case 116: - return jjMoveStringLiteralDfa1_0(0x440000000000L); - case 118: - return jjMoveStringLiteralDfa1_0(0x10000000L); - case 120: - return jjMoveStringLiteralDfa1_0(0x7c00000L); - case 123: - return jjStopAtPos(0, 60); - case 125: - return jjStopAtPos(0, 61); - default : - return jjMoveNfa_0(0, 0); - } -} -private final int jjMoveStringLiteralDfa1_0(long active0) -{ - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(0, active0, 0L); - return 1; - } - switch(curChar) - { - case 49: - return jjMoveStringLiteralDfa2_0(active0, 0x80000000L); - case 51: - return jjMoveStringLiteralDfa2_0(active0, 0x100000000L); - case 54: - return jjMoveStringLiteralDfa2_0(active0, 0x200000000L); - case 97: - return jjMoveStringLiteralDfa2_0(active0, 0x4000002200L); - case 101: - return jjMoveStringLiteralDfa2_0(active0, 0x2812000040000L); - case 104: - return jjMoveStringLiteralDfa2_0(active0, 0x400000010000L); - case 105: - return jjMoveStringLiteralDfa2_0(active0, 0x8000000000L); - case 108: - return jjMoveStringLiteralDfa2_0(active0, 0x1000000000L); - case 109: - return jjMoveStringLiteralDfa2_0(active0, 0x300000L); - case 110: - return jjMoveStringLiteralDfa2_0(active0, 0x1000008000000L); - case 111: - return jjMoveStringLiteralDfa2_0(active0, 0x430004100L); - case 112: - return jjMoveStringLiteralDfa2_0(active0, 0x4000000001c00L); - case 115: - return jjMoveStringLiteralDfa2_0(active0, 0x20007c08000L); - case 116: - return jjMoveStringLiteralDfa2_0(active0, 0x80800000000L); - case 117: - return jjMoveStringLiteralDfa2_0(active0, 0x80000L); - case 120: - return jjMoveStringLiteralDfa2_0(active0, 0x300000000000L); - case 121: - return jjMoveStringLiteralDfa2_0(active0, 0x40040020000L); - default : - break; - } - return jjStartNfa_0(0, active0, 0L); -} -private final int jjMoveStringLiteralDfa2_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(0, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(1, active0, 0L); - return 2; - } - switch(curChar) - { - case 50: - if ((active0 & 0x100000000L) != 0L) - return jjStartNfaWithStates_0(2, 32, 35); - break; - case 52: - if ((active0 & 0x200000000L) != 0L) - return jjStartNfaWithStates_0(2, 33, 35); - break; - case 54: - if ((active0 & 0x80000000L) != 0L) - return jjStartNfaWithStates_0(2, 31, 35); - break; - case 95: - return jjMoveStringLiteralDfa3_0(active0, 0x20000L); - case 97: - return jjMoveStringLiteralDfa3_0(active0, 0x300000L); - case 98: - return jjMoveStringLiteralDfa3_0(active0, 0x80000L); - case 99: - return jjMoveStringLiteralDfa3_0(active0, 0x100008004000L); - case 100: - return jjMoveStringLiteralDfa3_0(active0, 0x7c00000L); - case 104: - return jjMoveStringLiteralDfa3_0(active0, 0x8000L); - case 105: - return jjMoveStringLiteralDfa3_0(active0, 0x1010000000L); - case 109: - return jjMoveStringLiteralDfa3_0(active0, 0x200L); - case 110: - return jjMoveStringLiteralDfa3_0(active0, 0x2000000100L); - case 111: - return jjMoveStringLiteralDfa3_0(active0, 0x20000000L); - case 112: - if ((active0 & 0x4000000000L) != 0L) - return jjStartNfaWithStates_0(2, 38, 35); - return jjMoveStringLiteralDfa3_0(active0, 0x40000011c00L); - case 113: - return jjMoveStringLiteralDfa3_0(active0, 0x2000000000000L); - case 114: - return jjMoveStringLiteralDfa3_0(active0, 0xc80800040000L); - case 115: - return jjMoveStringLiteralDfa3_0(active0, 0x8000000000L); - case 116: - if ((active0 & 0x10000000000L) != 0L) - return jjStartNfaWithStates_0(2, 40, 35); - return jjMoveStringLiteralDfa3_0(active0, 0x4200040000000L); - case 117: - return jjMoveStringLiteralDfa3_0(active0, 0x1000400000000L); - case 118: - return jjMoveStringLiteralDfa3_0(active0, 0x2000L); - case 121: - return jjMoveStringLiteralDfa3_0(active0, 0x20000000000L); - default : - break; - } - return jjStartNfa_0(1, active0, 0L); -} -private final int jjMoveStringLiteralDfa3_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(1, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(2, active0, 0L); - return 3; - } - switch(curChar) - { - case 95: - return jjMoveStringLiteralDfa4_0(active0, 0x7c11c00L); - case 97: - return jjMoveStringLiteralDfa4_0(active0, 0xa000L); - case 98: - return jjMoveStringLiteralDfa4_0(active0, 0x400000000L); - case 100: - if ((active0 & 0x10000000L) != 0L) - return jjStartNfaWithStates_0(3, 28, 35); - break; - case 101: - if ((active0 & 0x40000000L) != 0L) - return jjStartNfaWithStates_0(3, 30, 35); - return jjMoveStringLiteralDfa4_0(active0, 0x340000000200L); - case 105: - return jjMoveStringLiteralDfa4_0(active0, 0x4000800000000L); - case 108: - if ((active0 & 0x20000000L) != 0L) - return jjStartNfaWithStates_0(3, 29, 35); - return jjMoveStringLiteralDfa4_0(active0, 0x8340000L); - case 109: - if ((active0 & 0x1000000000000L) != 0L) - return jjStartNfaWithStates_0(3, 48, 35); - return jjMoveStringLiteralDfa4_0(active0, 0x20000L); - case 110: - return jjMoveStringLiteralDfa4_0(active0, 0x20000000000L); - case 111: - return jjMoveStringLiteralDfa4_0(active0, 0x400000004000L); - case 115: - return jjMoveStringLiteralDfa4_0(active0, 0x1000000100L); - case 116: - if ((active0 & 0x8000000000L) != 0L) - return jjStartNfaWithStates_0(3, 39, 35); - break; - case 117: - return jjMoveStringLiteralDfa4_0(active0, 0x2082000000000L); - case 118: - return jjMoveStringLiteralDfa4_0(active0, 0x800000000000L); - case 121: - return jjMoveStringLiteralDfa4_0(active0, 0x80000L); - default : - break; - } - return jjStartNfa_0(2, active0, 0L); -} -private final int jjMoveStringLiteralDfa4_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(2, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(3, active0, 0L); - return 4; - } - switch(curChar) - { - case 95: - return jjMoveStringLiteralDfa5_0(active0, 0xc2000L); - case 97: - return jjMoveStringLiteralDfa5_0(active0, 0x4404000L); - case 99: - if ((active0 & 0x20000000000L) != 0L) - return jjStartNfaWithStates_0(4, 41, 35); - return jjMoveStringLiteralDfa5_0(active0, 0x80000000000L); - case 100: - return jjMoveStringLiteralDfa5_0(active0, 0x40000000000L); - case 105: - return jjMoveStringLiteralDfa5_0(active0, 0x2800000000800L); - case 108: - return jjMoveStringLiteralDfa5_0(active0, 0x400300000L); - case 109: - if ((active0 & 0x2000000000L) != 0L) - return jjStartNfaWithStates_0(4, 37, 35); - break; - case 110: - return jjMoveStringLiteralDfa5_0(active0, 0x200803010400L); - case 111: - return jjMoveStringLiteralDfa5_0(active0, 0x4000000820000L); - case 112: - return jjMoveStringLiteralDfa5_0(active0, 0x100000000000L); - case 114: - return jjMoveStringLiteralDfa5_0(active0, 0x8000L); - case 115: - return jjMoveStringLiteralDfa5_0(active0, 0x200L); - case 116: - if ((active0 & 0x100L) != 0L) - return jjStartNfaWithStates_0(4, 8, 35); - else if ((active0 & 0x1000000000L) != 0L) - return jjStartNfaWithStates_0(4, 36, 35); - return jjMoveStringLiteralDfa5_0(active0, 0x1000L); - case 117: - return jjMoveStringLiteralDfa5_0(active0, 0x8000000L); - case 119: - return jjMoveStringLiteralDfa5_0(active0, 0x400000000000L); - default : - break; - } - return jjStartNfa_0(3, active0, 0L); -} -private final int jjMoveStringLiteralDfa5_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(3, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(4, active0, 0L); - return 5; - } - switch(curChar) - { - case 95: - return jjMoveStringLiteralDfa6_0(active0, 0x4000L); - case 97: - return jjMoveStringLiteralDfa6_0(active0, 0x2010400L); - case 99: - return jjMoveStringLiteralDfa6_0(active0, 0x800000000000L); - case 100: - return jjMoveStringLiteralDfa6_0(active0, 0x200008020000L); - case 101: - if ((active0 & 0x400000000L) != 0L) - return jjStartNfaWithStates_0(5, 34, 35); - return jjMoveStringLiteralDfa6_0(active0, 0x40000000000L); - case 103: - if ((active0 & 0x800000000L) != 0L) - return jjStartNfaWithStates_0(5, 35, 35); - break; - case 105: - return jjMoveStringLiteralDfa6_0(active0, 0x1000000L); - case 108: - return jjMoveStringLiteralDfa6_0(active0, 0x400000L); - case 110: - return jjMoveStringLiteralDfa6_0(active0, 0x4000000080800L); - case 112: - return jjMoveStringLiteralDfa6_0(active0, 0x84a200L); - case 114: - return jjMoveStringLiteralDfa6_0(active0, 0x2000000000000L); - case 115: - if ((active0 & 0x400000000000L) != 0L) - return jjStartNfaWithStates_0(5, 46, 35); - break; - case 116: - if ((active0 & 0x80000000000L) != 0L) - return jjStartNfaWithStates_0(5, 43, 35); - return jjMoveStringLiteralDfa6_0(active0, 0x100004300000L); - case 121: - return jjMoveStringLiteralDfa6_0(active0, 0x1000L); - default : - break; - } - return jjStartNfa_0(4, active0, 0L); -} -private final int jjMoveStringLiteralDfa6_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(4, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(5, active0, 0L); - return 6; - } - switch(curChar) - { - case 95: - return jjMoveStringLiteralDfa7_0(active0, 0x8000L); - case 97: - return jjMoveStringLiteralDfa7_0(active0, 0x40000003c2200L); - case 99: - return jjMoveStringLiteralDfa7_0(active0, 0x800L); - case 101: - if ((active0 & 0x8000000L) != 0L) - return jjStartNfaWithStates_0(6, 27, 35); - else if ((active0 & 0x800000000000L) != 0L) - return jjStartNfaWithStates_0(6, 47, 35); - return jjMoveStringLiteralDfa7_0(active0, 0x2000000000000L); - case 102: - if ((active0 & 0x40000000000L) != 0L) - return jjStartNfaWithStates_0(6, 42, 35); - break; - case 105: - return jjMoveStringLiteralDfa7_0(active0, 0x100000000000L); - case 108: - if ((active0 & 0x400000L) != 0L) - return jjStartNfaWithStates_0(6, 22, 35); - return jjMoveStringLiteralDfa7_0(active0, 0x1000000L); - case 109: - return jjMoveStringLiteralDfa7_0(active0, 0x2010400L); - case 112: - return jjMoveStringLiteralDfa7_0(active0, 0x5000L); - case 115: - if ((active0 & 0x200000000000L) != 0L) - return jjStartNfaWithStates_0(6, 45, 35); - break; - case 116: - return jjMoveStringLiteralDfa7_0(active0, 0x4800000L); - case 117: - return jjMoveStringLiteralDfa7_0(active0, 0x20000L); - default : - break; - } - return jjStartNfa_0(5, active0, 0L); -} -private final int jjMoveStringLiteralDfa7_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(5, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(6, active0, 0L); - return 7; - } - switch(curChar) - { - case 99: - return jjMoveStringLiteralDfa8_0(active0, 0x42200L); - case 100: - if ((active0 & 0x2000000000000L) != 0L) - return jjStartNfaWithStates_0(7, 49, 35); - break; - case 101: - if ((active0 & 0x1000L) != 0L) - return jjStartNfaWithStates_0(7, 12, 35); - return jjMoveStringLiteralDfa8_0(active0, 0x2010400L); - case 105: - return jjMoveStringLiteralDfa8_0(active0, 0x800000L); - case 108: - if ((active0 & 0x4000000000000L) != 0L) - return jjStartNfaWithStates_0(7, 50, 35); - return jjMoveStringLiteralDfa8_0(active0, 0x1320800L); - case 109: - return jjMoveStringLiteralDfa8_0(active0, 0x80000L); - case 110: - return jjMoveStringLiteralDfa8_0(active0, 0x8000L); - case 111: - return jjMoveStringLiteralDfa8_0(active0, 0x100000000000L); - case 114: - return jjMoveStringLiteralDfa8_0(active0, 0x4004000L); - default : - break; - } - return jjStartNfa_0(6, active0, 0L); -} -private final int jjMoveStringLiteralDfa8_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(6, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(7, active0, 0L); - return 8; - } - switch(curChar) - { - case 97: - return jjMoveStringLiteralDfa9_0(active0, 0x1008000L); - case 101: - if ((active0 & 0x200L) != 0L) - return jjStartNfaWithStates_0(8, 9, 35); - else if ((active0 & 0x20000L) != 0L) - return jjStartNfaWithStates_0(8, 17, 35); - return jjMoveStringLiteralDfa9_0(active0, 0x84000L); - case 107: - return jjMoveStringLiteralDfa9_0(active0, 0x342000L); - case 110: - if ((active0 & 0x100000000000L) != 0L) - return jjStartNfaWithStates_0(8, 44, 35); - break; - case 111: - return jjMoveStringLiteralDfa9_0(active0, 0x800000L); - case 115: - if ((active0 & 0x4000000L) != 0L) - return jjStartNfaWithStates_0(8, 26, 35); - return jjMoveStringLiteralDfa9_0(active0, 0x2010400L); - case 117: - return jjMoveStringLiteralDfa9_0(active0, 0x800L); - default : - break; - } - return jjStartNfa_0(7, active0, 0L); -} -private final int jjMoveStringLiteralDfa9_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(7, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(8, active0, 0L); - return 9; - } - switch(curChar) - { - case 95: - return jjMoveStringLiteralDfa10_0(active0, 0x300000L); - case 97: - return jjMoveStringLiteralDfa10_0(active0, 0x42000L); - case 98: - return jjMoveStringLiteralDfa10_0(active0, 0x1000000L); - case 100: - return jjMoveStringLiteralDfa10_0(active0, 0x800L); - case 102: - return jjMoveStringLiteralDfa10_0(active0, 0x4000L); - case 109: - return jjMoveStringLiteralDfa10_0(active0, 0x8000L); - case 110: - return jjMoveStringLiteralDfa10_0(active0, 0x800000L); - case 112: - return jjMoveStringLiteralDfa10_0(active0, 0x2010400L); - case 115: - return jjMoveStringLiteralDfa10_0(active0, 0x80000L); - default : - break; - } - return jjStartNfa_0(8, active0, 0L); -} -private final int jjMoveStringLiteralDfa10_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(8, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(9, active0, 0L); - return 10; - } - switch(curChar) - { - case 97: - return jjMoveStringLiteralDfa11_0(active0, 0x2810400L); - case 99: - return jjMoveStringLiteralDfa11_0(active0, 0x100000L); - case 101: - if ((active0 & 0x800L) != 0L) - return jjStartNfaWithStates_0(10, 11, 35); - return jjMoveStringLiteralDfa11_0(active0, 0x8000L); - case 103: - return jjMoveStringLiteralDfa11_0(active0, 0x42000L); - case 105: - return jjMoveStringLiteralDfa11_0(active0, 0x4000L); - case 108: - return jjMoveStringLiteralDfa11_0(active0, 0x1000000L); - case 112: - return jjMoveStringLiteralDfa11_0(active0, 0x280000L); - default : - break; - } - return jjStartNfa_0(9, active0, 0L); -} -private final int jjMoveStringLiteralDfa11_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(9, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(10, active0, 0L); - return 11; - } - switch(curChar) - { - case 97: - return jjMoveStringLiteralDfa12_0(active0, 0x180000L); - case 99: - return jjMoveStringLiteralDfa12_0(active0, 0x2010400L); - case 101: - if ((active0 & 0x2000L) != 0L) - return jjStartNfaWithStates_0(11, 13, 35); - else if ((active0 & 0x40000L) != 0L) - return jjStartNfaWithStates_0(11, 18, 35); - else if ((active0 & 0x1000000L) != 0L) - return jjStartNfaWithStates_0(11, 24, 35); - break; - case 108: - if ((active0 & 0x800000L) != 0L) - return jjStartNfaWithStates_0(11, 23, 35); - break; - case 114: - return jjMoveStringLiteralDfa12_0(active0, 0x200000L); - case 115: - return jjMoveStringLiteralDfa12_0(active0, 0x8000L); - case 120: - if ((active0 & 0x4000L) != 0L) - return jjStartNfaWithStates_0(11, 14, 35); - break; - default : - break; - } - return jjStartNfa_0(10, active0, 0L); -} -private final int jjMoveStringLiteralDfa12_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(10, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(11, active0, 0L); - return 12; - } - switch(curChar) - { - case 99: - return jjMoveStringLiteralDfa13_0(active0, 0x80000L); - case 101: - if ((active0 & 0x400L) != 0L) - return jjStartNfaWithStates_0(12, 10, 35); - else if ((active0 & 0x10000L) != 0L) - return jjStartNfaWithStates_0(12, 16, 35); - else if ((active0 & 0x2000000L) != 0L) - return jjStartNfaWithStates_0(12, 25, 35); - return jjMoveStringLiteralDfa13_0(active0, 0x200000L); - case 112: - return jjMoveStringLiteralDfa13_0(active0, 0x8000L); - case 116: - return jjMoveStringLiteralDfa13_0(active0, 0x100000L); - default : - break; - } - return jjStartNfa_0(11, active0, 0L); -} -private final int jjMoveStringLiteralDfa13_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(11, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(12, active0, 0L); - return 13; - } - switch(curChar) - { - case 97: - return jjMoveStringLiteralDfa14_0(active0, 0x8000L); - case 101: - if ((active0 & 0x80000L) != 0L) - return jjStartNfaWithStates_0(13, 19, 35); - return jjMoveStringLiteralDfa14_0(active0, 0x100000L); - case 102: - return jjMoveStringLiteralDfa14_0(active0, 0x200000L); - default : - break; - } - return jjStartNfa_0(12, active0, 0L); -} -private final int jjMoveStringLiteralDfa14_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(12, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(13, active0, 0L); - return 14; - } - switch(curChar) - { - case 99: - return jjMoveStringLiteralDfa15_0(active0, 0x8000L); - case 103: - return jjMoveStringLiteralDfa15_0(active0, 0x100000L); - case 105: - return jjMoveStringLiteralDfa15_0(active0, 0x200000L); - default : - break; - } - return jjStartNfa_0(13, active0, 0L); -} -private final int jjMoveStringLiteralDfa15_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(13, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(14, active0, 0L); - return 15; - } - switch(curChar) - { - case 101: - if ((active0 & 0x8000L) != 0L) - return jjStartNfaWithStates_0(15, 15, 35); - break; - case 111: - return jjMoveStringLiteralDfa16_0(active0, 0x100000L); - case 120: - if ((active0 & 0x200000L) != 0L) - return jjStartNfaWithStates_0(15, 21, 35); - break; - default : - break; - } - return jjStartNfa_0(14, active0, 0L); -} -private final int jjMoveStringLiteralDfa16_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(14, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(15, active0, 0L); - return 16; - } - switch(curChar) - { - case 114: - return jjMoveStringLiteralDfa17_0(active0, 0x100000L); - default : - break; - } - return jjStartNfa_0(15, active0, 0L); -} -private final int jjMoveStringLiteralDfa17_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjStartNfa_0(15, old0, 0L); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(16, active0, 0L); - return 17; - } - switch(curChar) - { - case 121: - if ((active0 & 0x100000L) != 0L) - return jjStartNfaWithStates_0(17, 20, 35); - break; - default : - break; - } - return jjStartNfa_0(16, active0, 0L); -} -private final void jjCheckNAdd(int state) -{ - if (jjrounds[state] != jjround) - { - jjstateSet[jjnewStateCnt++] = state; - jjrounds[state] = jjround; - } -} -private final void jjAddStates(int start, int end) -{ - do { - jjstateSet[jjnewStateCnt++] = jjnextStates[start]; - } while (start++ != end); -} -private final void jjCheckNAddTwoStates(int state1, int state2) -{ - jjCheckNAdd(state1); - jjCheckNAdd(state2); -} -private final void jjCheckNAddStates(int start, int end) -{ - do { - jjCheckNAdd(jjnextStates[start]); - } while (start++ != end); -} -private final void jjCheckNAddStates(int start) -{ - jjCheckNAdd(jjnextStates[start]); - jjCheckNAdd(jjnextStates[start + 1]); -} -static final long[] jjbitVec0 = { - 0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL -}; -private final int jjMoveNfa_0(int startState, int curPos) -{ - int[] nextStates; - int startsAt = 0; - jjnewStateCnt = 35; - int i = 1; - jjstateSet[0] = startState; - int j, kind = 0x7fffffff; - for (;;) - { - if (++jjround == 0x7fffffff) - ReInitRounds(); - if (curChar < 64) - { - long l = 1L << curChar; - MatchLoop: do - { - switch(jjstateSet[--i]) - { - case 35: - if ((0x3ff600000000000L & l) != 0L) - { - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - } - if ((0x3ff400000000000L & l) != 0L) - { - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - } - break; - case 0: - if ((0x3ff000000000000L & l) != 0L) - { - if (kind > 51) - kind = 51; - jjCheckNAdd(5); - } - else if ((0x280000000000L & l) != 0L) - jjCheckNAddStates(0, 2); - else if (curChar == 47) - jjAddStates(3, 4); - else if (curChar == 39) - jjCheckNAddTwoStates(12, 13); - else if (curChar == 34) - jjCheckNAddTwoStates(9, 10); - else if (curChar == 35) - jjCheckNAddStates(5, 7); - if (curChar == 45) - { - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - } - break; - case 1: - if ((0xfffffffffffffbffL & l) != 0L) - jjCheckNAddStates(5, 7); - break; - case 2: - if ((0x2400L & l) != 0L && kind > 5) - kind = 5; - break; - case 3: - if (curChar == 10 && kind > 5) - kind = 5; - break; - case 4: - if (curChar == 13) - jjstateSet[jjnewStateCnt++] = 3; - break; - case 5: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 51) - kind = 51; - jjCheckNAdd(5); - break; - case 7: - if ((0x3ff400000000000L & l) == 0L) - break; - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - break; - case 8: - if (curChar == 34) - jjCheckNAddTwoStates(9, 10); - break; - case 9: - if ((0xfffffffbffffffffL & l) != 0L) - jjCheckNAddTwoStates(9, 10); - break; - case 10: - if (curChar == 34 && kind > 56) - kind = 56; - break; - case 11: - if (curChar == 39) - jjCheckNAddTwoStates(12, 13); - break; - case 12: - if ((0xffffff7fffffffffL & l) != 0L) - jjCheckNAddTwoStates(12, 13); - break; - case 13: - if (curChar == 39 && kind > 56) - kind = 56; - break; - case 14: - if (curChar != 45) - break; - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - break; - case 15: - if ((0x3ff600000000000L & l) == 0L) - break; - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - break; - case 16: - if (curChar == 47) - jjAddStates(3, 4); - break; - case 17: - if (curChar == 47) - jjCheckNAddStates(8, 10); - break; - case 18: - if ((0xffffffffffffdbffL & l) != 0L) - jjCheckNAddStates(8, 10); - break; - case 19: - if ((0x2400L & l) != 0L && kind > 6) - kind = 6; - break; - case 20: - if (curChar == 10 && kind > 6) - kind = 6; - break; - case 21: - if (curChar == 13) - jjstateSet[jjnewStateCnt++] = 20; - break; - case 22: - if (curChar == 42) - jjCheckNAddTwoStates(23, 24); - break; - case 23: - if ((0xfffffbffffffffffL & l) != 0L) - jjCheckNAddTwoStates(23, 24); - break; - case 24: - if (curChar == 42) - jjAddStates(11, 12); - break; - case 25: - if ((0xffff7fffffffffffL & l) != 0L) - jjCheckNAddTwoStates(26, 24); - break; - case 26: - if ((0xfffffbffffffffffL & l) != 0L) - jjCheckNAddTwoStates(26, 24); - break; - case 27: - if (curChar == 47 && kind > 7) - kind = 7; - break; - case 28: - if ((0x280000000000L & l) != 0L) - jjCheckNAddStates(0, 2); - break; - case 29: - if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(29, 30); - break; - case 30: - if (curChar == 46) - jjCheckNAdd(31); - break; - case 31: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 52) - kind = 52; - jjCheckNAddTwoStates(31, 32); - break; - case 33: - if ((0x280000000000L & l) != 0L) - jjCheckNAdd(34); - break; - case 34: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 52) - kind = 52; - jjCheckNAdd(34); - break; - default : break; - } - } while(i != startsAt); - } - else if (curChar < 128) - { - long l = 1L << (curChar & 077); - MatchLoop: do - { - switch(jjstateSet[--i]) - { - case 35: - if ((0x7fffffe87fffffeL & l) != 0L) - { - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - } - if ((0x7fffffe87fffffeL & l) != 0L) - { - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - } - break; - case 0: - if ((0x7fffffe07fffffeL & l) != 0L) - { - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - } - if ((0x7fffffe07fffffeL & l) != 0L) - { - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - } - break; - case 1: - jjAddStates(5, 7); - break; - case 6: - if ((0x7fffffe07fffffeL & l) == 0L) - break; - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - break; - case 7: - if ((0x7fffffe87fffffeL & l) == 0L) - break; - if (kind > 53) - kind = 53; - jjCheckNAdd(7); - break; - case 9: - jjAddStates(13, 14); - break; - case 12: - jjAddStates(15, 16); - break; - case 14: - if ((0x7fffffe07fffffeL & l) == 0L) - break; - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - break; - case 15: - if ((0x7fffffe87fffffeL & l) == 0L) - break; - if (kind > 57) - kind = 57; - jjCheckNAdd(15); - break; - case 18: - jjAddStates(8, 10); - break; - case 23: - jjCheckNAddTwoStates(23, 24); - break; - case 25: - case 26: - jjCheckNAddTwoStates(26, 24); - break; - case 32: - if ((0x2000000020L & l) != 0L) - jjAddStates(17, 18); - break; - default : break; - } - } while(i != startsAt); - } - else - { - int i2 = (curChar & 0xff) >> 6; - long l2 = 1L << (curChar & 077); - MatchLoop: do - { - switch(jjstateSet[--i]) - { - case 1: - if ((jjbitVec0[i2] & l2) != 0L) - jjAddStates(5, 7); - break; - case 9: - if ((jjbitVec0[i2] & l2) != 0L) - jjAddStates(13, 14); - break; - case 12: - if ((jjbitVec0[i2] & l2) != 0L) - jjAddStates(15, 16); - break; - case 18: - if ((jjbitVec0[i2] & l2) != 0L) - jjAddStates(8, 10); - break; - case 23: - if ((jjbitVec0[i2] & l2) != 0L) - jjCheckNAddTwoStates(23, 24); - break; - case 25: - case 26: - if ((jjbitVec0[i2] & l2) != 0L) - jjCheckNAddTwoStates(26, 24); - break; - default : break; - } - } while(i != startsAt); - } - if (kind != 0x7fffffff) - { - jjmatchedKind = kind; - jjmatchedPos = curPos; - kind = 0x7fffffff; - } - ++curPos; - if ((i = jjnewStateCnt) == (startsAt = 35 - (jjnewStateCnt = startsAt))) - return curPos; - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { return curPos; } - } -} -static final int[] jjnextStates = { - 5, 29, 30, 17, 22, 1, 2, 4, 18, 19, 21, 25, 27, 9, 10, 12, - 13, 33, 34, -}; -public static final String[] jjstrLiteralImages = { -"", null, null, null, null, null, null, null, "\143\157\156\163\164", -"\156\141\155\145\163\160\141\143\145", "\143\160\160\137\156\141\155\145\163\160\141\143\145", -"\143\160\160\137\151\156\143\154\165\144\145", "\143\160\160\137\164\171\160\145", -"\152\141\166\141\137\160\141\143\153\141\147\145", "\143\157\143\157\141\137\160\162\145\146\151\170", -"\143\163\150\141\162\160\137\156\141\155\145\163\160\141\143\145", "\160\150\160\137\156\141\155\145\163\160\141\143\145", -"\160\171\137\155\157\144\165\154\145", "\160\145\162\154\137\160\141\143\153\141\147\145", -"\162\165\142\171\137\156\141\155\145\163\160\141\143\145", "\163\155\141\154\154\164\141\154\153\137\143\141\164\145\147\157\162\171", -"\163\155\141\154\154\164\141\154\153\137\160\162\145\146\151\170", "\170\163\144\137\141\154\154", -"\170\163\144\137\157\160\164\151\157\156\141\154", "\170\163\144\137\156\151\154\154\141\142\154\145", -"\170\163\144\137\156\141\155\145\163\160\141\143\145", "\170\163\144\137\141\164\164\162\163", "\151\156\143\154\165\144\145", -"\166\157\151\144", "\142\157\157\154", "\142\171\164\145", "\151\61\66", "\151\63\62", -"\151\66\64", "\144\157\165\142\154\145", "\163\164\162\151\156\147", -"\163\154\151\163\164", "\163\145\156\165\155", "\155\141\160", "\154\151\163\164", "\163\145\164", -"\141\163\171\156\143", "\164\171\160\145\144\145\146", "\163\164\162\165\143\164", -"\145\170\143\145\160\164\151\157\156", "\145\170\164\145\156\144\163", "\164\150\162\157\167\163", -"\163\145\162\166\151\143\145", "\145\156\165\155", "\162\145\161\165\151\162\145\144", -"\157\160\164\151\157\156\141\154", null, null, null, null, null, null, null, "\54", "\73", "\173", "\175", "\75", -"\133", "\135", "\72", "\50", "\51", "\74", "\76", }; -public static final String[] lexStateNames = { - "DEFAULT", -}; -static final long[] jjtoToken = { - 0xff3fffffffffff01L, 0x3fL, -}; -static final long[] jjtoSkip = { - 0xfeL, 0x0L, -}; -protected SimpleCharStream input_stream; -private final int[] jjrounds = new int[35]; -private final int[] jjstateSet = new int[70]; -protected char curChar; -public thrift_grammarTokenManager(SimpleCharStream stream){ - if (SimpleCharStream.staticFlag) - throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer."); - input_stream = stream; -} -public thrift_grammarTokenManager(SimpleCharStream stream, int lexState){ - this(stream); - SwitchTo(lexState); -} -public void ReInit(SimpleCharStream stream) -{ - jjmatchedPos = jjnewStateCnt = 0; - curLexState = defaultLexState; - input_stream = stream; - ReInitRounds(); -} -private final void ReInitRounds() -{ - int i; - jjround = 0x80000001; - for (i = 35; i-- > 0;) - jjrounds[i] = 0x80000000; -} -public void ReInit(SimpleCharStream stream, int lexState) -{ - ReInit(stream); - SwitchTo(lexState); -} -public void SwitchTo(int lexState) -{ - if (lexState >= 1 || lexState < 0) - throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE); - else - curLexState = lexState; -} - -protected Token jjFillToken() -{ - Token t = Token.newToken(jjmatchedKind); - t.kind = jjmatchedKind; - String im = jjstrLiteralImages[jjmatchedKind]; - t.image = (im == null) ? input_stream.GetImage() : im; - t.beginLine = input_stream.getBeginLine(); - t.beginColumn = input_stream.getBeginColumn(); - t.endLine = input_stream.getEndLine(); - t.endColumn = input_stream.getEndColumn(); - return t; -} - -int curLexState = 0; -int defaultLexState = 0; -int jjnewStateCnt; -int jjround; -int jjmatchedPos; -int jjmatchedKind; - -public Token getNextToken() -{ - int kind; - Token specialToken = null; - Token matchedToken; - int curPos = 0; - - EOFLoop : - for (;;) - { - try - { - curChar = input_stream.BeginToken(); - } - catch(java.io.IOException e) - { - jjmatchedKind = 0; - matchedToken = jjFillToken(); - return matchedToken; - } - - try { input_stream.backup(0); - while (curChar <= 32 && (0x100002600L & (1L << curChar)) != 0L) - curChar = input_stream.BeginToken(); - } - catch (java.io.IOException e1) { continue EOFLoop; } - jjmatchedKind = 0x7fffffff; - jjmatchedPos = 0; - curPos = jjMoveStringLiteralDfa0_0(); - if (jjmatchedKind != 0x7fffffff) - { - if (jjmatchedPos + 1 < curPos) - input_stream.backup(curPos - jjmatchedPos - 1); - if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) - { - matchedToken = jjFillToken(); - return matchedToken; - } - else - { - continue EOFLoop; - } - } - int error_line = input_stream.getEndLine(); - int error_column = input_stream.getEndColumn(); - String error_after = null; - boolean EOFSeen = false; - try { input_stream.readChar(); input_stream.backup(1); } - catch (java.io.IOException e1) { - EOFSeen = true; - error_after = curPos <= 1 ? "" : input_stream.GetImage(); - if (curChar == '\n' || curChar == '\r') { - error_line++; - error_column = 0; - } - else - error_column++; - } - if (!EOFSeen) { - input_stream.backup(1); - error_after = curPos <= 1 ? "" : input_stream.GetImage(); - } - throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR); - } -} - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java (working copy) @@ -1,439 +0,0 @@ -/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 4.0 */ -package org.apache.hadoop.hive.serde2.dynamic_type; - -/** - * An implementation of interface CharStream, where the stream is assumed to - * contain only ASCII characters (without unicode processing). - */ - -public class SimpleCharStream -{ - public static final boolean staticFlag = false; - int bufsize; - int available; - int tokenBegin; - public int bufpos = -1; - protected int bufline[]; - protected int bufcolumn[]; - - protected int column = 0; - protected int line = 1; - - protected boolean prevCharIsCR = false; - protected boolean prevCharIsLF = false; - - protected java.io.Reader inputStream; - - protected char[] buffer; - protected int maxNextCharInd = 0; - protected int inBuf = 0; - protected int tabSize = 8; - - protected void setTabSize(int i) { tabSize = i; } - protected int getTabSize(int i) { return tabSize; } - - - protected void ExpandBuff(boolean wrapAround) - { - char[] newbuffer = new char[bufsize + 2048]; - int newbufline[] = new int[bufsize + 2048]; - int newbufcolumn[] = new int[bufsize + 2048]; - - try - { - if (wrapAround) - { - System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); - System.arraycopy(buffer, 0, newbuffer, - bufsize - tokenBegin, bufpos); - buffer = newbuffer; - - System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); - System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos); - bufline = newbufline; - - System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); - System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos); - bufcolumn = newbufcolumn; - - maxNextCharInd = (bufpos += (bufsize - tokenBegin)); - } - else - { - System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); - buffer = newbuffer; - - System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); - bufline = newbufline; - - System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); - bufcolumn = newbufcolumn; - - maxNextCharInd = (bufpos -= tokenBegin); - } - } - catch (Throwable t) - { - throw new Error(t.getMessage()); - } - - - bufsize += 2048; - available = bufsize; - tokenBegin = 0; - } - - protected void FillBuff() throws java.io.IOException - { - if (maxNextCharInd == available) - { - if (available == bufsize) - { - if (tokenBegin > 2048) - { - bufpos = maxNextCharInd = 0; - available = tokenBegin; - } - else if (tokenBegin < 0) - bufpos = maxNextCharInd = 0; - else - ExpandBuff(false); - } - else if (available > tokenBegin) - available = bufsize; - else if ((tokenBegin - available) < 2048) - ExpandBuff(true); - else - available = tokenBegin; - } - - int i; - try { - if ((i = inputStream.read(buffer, maxNextCharInd, - available - maxNextCharInd)) == -1) - { - inputStream.close(); - throw new java.io.IOException(); - } - else - maxNextCharInd += i; - return; - } - catch(java.io.IOException e) { - --bufpos; - backup(0); - if (tokenBegin == -1) - tokenBegin = bufpos; - throw e; - } - } - - public char BeginToken() throws java.io.IOException - { - tokenBegin = -1; - char c = readChar(); - tokenBegin = bufpos; - - return c; - } - - protected void UpdateLineColumn(char c) - { - column++; - - if (prevCharIsLF) - { - prevCharIsLF = false; - line += (column = 1); - } - else if (prevCharIsCR) - { - prevCharIsCR = false; - if (c == '\n') - { - prevCharIsLF = true; - } - else - line += (column = 1); - } - - switch (c) - { - case '\r' : - prevCharIsCR = true; - break; - case '\n' : - prevCharIsLF = true; - break; - case '\t' : - column--; - column += (tabSize - (column % tabSize)); - break; - default : - break; - } - - bufline[bufpos] = line; - bufcolumn[bufpos] = column; - } - - public char readChar() throws java.io.IOException - { - if (inBuf > 0) - { - --inBuf; - - if (++bufpos == bufsize) - bufpos = 0; - - return buffer[bufpos]; - } - - if (++bufpos >= maxNextCharInd) - FillBuff(); - - char c = buffer[bufpos]; - - UpdateLineColumn(c); - return (c); - } - - /** - * @deprecated - * @see #getEndColumn - */ - - public int getColumn() { - return bufcolumn[bufpos]; - } - - /** - * @deprecated - * @see #getEndLine - */ - - public int getLine() { - return bufline[bufpos]; - } - - public int getEndColumn() { - return bufcolumn[bufpos]; - } - - public int getEndLine() { - return bufline[bufpos]; - } - - public int getBeginColumn() { - return bufcolumn[tokenBegin]; - } - - public int getBeginLine() { - return bufline[tokenBegin]; - } - - public void backup(int amount) { - - inBuf += amount; - if ((bufpos -= amount) < 0) - bufpos += bufsize; - } - - public SimpleCharStream(java.io.Reader dstream, int startline, - int startcolumn, int buffersize) - { - inputStream = dstream; - line = startline; - column = startcolumn - 1; - - available = bufsize = buffersize; - buffer = new char[buffersize]; - bufline = new int[buffersize]; - bufcolumn = new int[buffersize]; - } - - public SimpleCharStream(java.io.Reader dstream, int startline, - int startcolumn) - { - this(dstream, startline, startcolumn, 4096); - } - - public SimpleCharStream(java.io.Reader dstream) - { - this(dstream, 1, 1, 4096); - } - public void ReInit(java.io.Reader dstream, int startline, - int startcolumn, int buffersize) - { - inputStream = dstream; - line = startline; - column = startcolumn - 1; - - if (buffer == null || buffersize != buffer.length) - { - available = bufsize = buffersize; - buffer = new char[buffersize]; - bufline = new int[buffersize]; - bufcolumn = new int[buffersize]; - } - prevCharIsLF = prevCharIsCR = false; - tokenBegin = inBuf = maxNextCharInd = 0; - bufpos = -1; - } - - public void ReInit(java.io.Reader dstream, int startline, - int startcolumn) - { - ReInit(dstream, startline, startcolumn, 4096); - } - - public void ReInit(java.io.Reader dstream) - { - ReInit(dstream, 1, 1, 4096); - } - public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, - int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException - { - this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); - } - - public SimpleCharStream(java.io.InputStream dstream, int startline, - int startcolumn, int buffersize) - { - this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); - } - - public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, - int startcolumn) throws java.io.UnsupportedEncodingException - { - this(dstream, encoding, startline, startcolumn, 4096); - } - - public SimpleCharStream(java.io.InputStream dstream, int startline, - int startcolumn) - { - this(dstream, startline, startcolumn, 4096); - } - - public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException - { - this(dstream, encoding, 1, 1, 4096); - } - - public SimpleCharStream(java.io.InputStream dstream) - { - this(dstream, 1, 1, 4096); - } - - public void ReInit(java.io.InputStream dstream, String encoding, int startline, - int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException - { - ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); - } - - public void ReInit(java.io.InputStream dstream, int startline, - int startcolumn, int buffersize) - { - ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); - } - - public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException - { - ReInit(dstream, encoding, 1, 1, 4096); - } - - public void ReInit(java.io.InputStream dstream) - { - ReInit(dstream, 1, 1, 4096); - } - public void ReInit(java.io.InputStream dstream, String encoding, int startline, - int startcolumn) throws java.io.UnsupportedEncodingException - { - ReInit(dstream, encoding, startline, startcolumn, 4096); - } - public void ReInit(java.io.InputStream dstream, int startline, - int startcolumn) - { - ReInit(dstream, startline, startcolumn, 4096); - } - public String GetImage() - { - if (bufpos >= tokenBegin) - return new String(buffer, tokenBegin, bufpos - tokenBegin + 1); - else - return new String(buffer, tokenBegin, bufsize - tokenBegin) + - new String(buffer, 0, bufpos + 1); - } - - public char[] GetSuffix(int len) - { - char[] ret = new char[len]; - - if ((bufpos + 1) >= len) - System.arraycopy(buffer, bufpos - len + 1, ret, 0, len); - else - { - System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0, - len - bufpos - 1); - System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1); - } - - return ret; - } - - public void Done() - { - buffer = null; - bufline = null; - bufcolumn = null; - } - - /** - * Method to adjust line and column numbers for the start of a token. - */ - public void adjustBeginLineColumn(int newLine, int newCol) - { - int start = tokenBegin; - int len; - - if (bufpos >= tokenBegin) - { - len = bufpos - tokenBegin + inBuf + 1; - } - else - { - len = bufsize - tokenBegin + bufpos + 1 + inBuf; - } - - int i = 0, j = 0, k = 0; - int nextColDiff = 0, columnDiff = 0; - - while (i < len && - bufline[j = start % bufsize] == bufline[k = ++start % bufsize]) - { - bufline[j] = newLine; - nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j]; - bufcolumn[j] = newCol + columnDiff; - columnDiff = nextColDiff; - i++; - } - - if (i < len) - { - bufline[j] = newLine++; - bufcolumn[j] = newCol + columnDiff; - - while (i++ < len) - { - if (bufline[j = start % bufsize] != bufline[++start % bufsize]) - bufline[j] = newLine++; - else - bufline[j] = newLine; - } - } - - line = bufline[j]; - column = bufcolumn[j]; - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeAsync.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeAsync.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeAsync.java (working copy) @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Generated By:JJTree: Do not edit this line. DynamicSerDeAsync.java */ - -package org.apache.hadoop.hive.serde2.dynamic_type; - -public class DynamicSerDeAsync extends SimpleNode { - public DynamicSerDeAsync(int id) { - super(id); - } - - public DynamicSerDeAsync(thrift_grammar p, int id) { - super(p, id); - } - -} Index: src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java =================================================================== --- src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java (revision 712243) +++ src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java (working copy) @@ -1,2290 +0,0 @@ -/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammar.java */ -package org.apache.hadoop.hive.serde2.dynamic_type; - -import java.util.*; -import java.io.*; -import java.net.*; -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.transport.*; -import org.apache.hadoop.hive.serde2.dynamic_type.*; - -public class thrift_grammar/*@bgen(jjtree)*/implements thrift_grammarTreeConstants, thrift_grammarConstants {/*@bgen(jjtree)*/ - protected JJTthrift_grammarState jjtree = new JJTthrift_grammarState(); - private List include_path = null; - - // for computing the autogenerated field ids in thrift - private int field_val; - - // store types and tables - // separately because one cannot use a table (ie service.method) as a Struct like type. - protected Map types; - protected Map tables; - - // system include path - final private static String default_include_path[] = { "/usr/local/include","/usr/include","/usr/local/include/thrift/if","/usr/local/include/fb303/if" }; - - // need three params to differentiate between this and 2 param method auto generated since - // some calls in the autogenerated code use null param for 2nd param and thus ambiguous. - protected thrift_grammar(InputStream is, List include_path, boolean junk) { - this(is,null); - this.types = new HashMap () ; - this.tables = new HashMap () ; - this.include_path = include_path; - this.field_val = -1; - } - - // find the file on the include path - private static File findFile(String fname, List include_path) { - for(String path: include_path) { - final String full = path + "/" + fname; - File f = new File(full); - if(f.exists()) { - return f; - } - } - return null; - } - - public static void main(String args[]) { - String filename = null; - List include_path = new ArrayList(); - - for(String path: default_include_path) { - include_path.add(path); - } - for(int i = 0; i < args.length; i++) { - String arg = args[i]; - if(arg.equals("--include") && i + 1 < args.length) { - include_path.add(args[++i]); - } - if(arg.equals("--file") && i + 1 < args.length) { - filename = args[++i]; - } - } - - InputStream is = System.in; - if(filename != null) { - try { - is = new FileInputStream(findFile(filename, include_path)); - } catch(IOException e) { - } - } - thrift_grammar t = new thrift_grammar(is,include_path,false); - - try { - t.Start(); - } catch (Exception e) { - System.out.println("Parse error."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } - - final public SimpleNode Start() throws ParseException { - /*@bgen(jjtree) Start */ - DynamicSerDeStart jjtn000 = new DynamicSerDeStart(JJTSTART); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - HeaderList(); - label_1: - while (true) { - Definition(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_const: - case tok_senum: - case tok_typedef: - case tok_struct: - case tok_exception: - case tok_service: - case tok_enum: - ; - break; - default: - jj_la1[0] = jj_gen; - break label_1; - } - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode HeaderList() throws ParseException { - /*@bgen(jjtree) HeaderList */ - DynamicSerDeHeaderList jjtn000 = new DynamicSerDeHeaderList(JJTHEADERLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - label_2: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_namespace: - case tok_cpp_namespace: - case tok_cpp_include: - case tok_java_package: - case tok_cocoa_prefix: - case tok_csharp_namespace: - case tok_php_namespace: - case tok_py_module: - case tok_perl_package: - case tok_ruby_namespace: - case tok_smalltalk_category: - case tok_smalltalk_prefix: - case tok_xsd_namespace: - case tok_include: - ; - break; - default: - jj_la1[1] = jj_gen; - break label_2; - } - Header(); - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Header() throws ParseException { - /*@bgen(jjtree) Header */ - DynamicSerDeHeader jjtn000 = new DynamicSerDeHeader(JJTHEADER); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_include: - Include(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_namespace: - case tok_cpp_namespace: - case tok_cpp_include: - case tok_java_package: - case tok_cocoa_prefix: - case tok_csharp_namespace: - case tok_php_namespace: - case tok_py_module: - case tok_perl_package: - case tok_ruby_namespace: - case tok_smalltalk_category: - case tok_smalltalk_prefix: - case tok_xsd_namespace: - Namespace(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[2] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Namespace() throws ParseException { - /*@bgen(jjtree) Namespace */ - DynamicSerDeNamespace jjtn000 = new DynamicSerDeNamespace(JJTNAMESPACE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_namespace: - jj_consume_token(tok_namespace); - jj_consume_token(IDENTIFIER); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_cpp_namespace: - jj_consume_token(tok_cpp_namespace); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_cpp_include: - jj_consume_token(tok_cpp_include); - jj_consume_token(tok_literal); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_php_namespace: - jj_consume_token(tok_php_namespace); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_py_module: - jj_consume_token(tok_py_module); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_perl_package: - jj_consume_token(tok_perl_package); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_ruby_namespace: - jj_consume_token(tok_ruby_namespace); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_smalltalk_category: - jj_consume_token(tok_smalltalk_category); - jj_consume_token(tok_st_identifier); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_smalltalk_prefix: - jj_consume_token(tok_smalltalk_prefix); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_java_package: - jj_consume_token(tok_java_package); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_cocoa_prefix: - jj_consume_token(tok_cocoa_prefix); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_xsd_namespace: - jj_consume_token(tok_xsd_namespace); - jj_consume_token(tok_literal); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_csharp_namespace: - jj_consume_token(tok_csharp_namespace); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[3] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Include() throws ParseException { - /*@bgen(jjtree) Include */ - DynamicSerDeInclude jjtn000 = new DynamicSerDeInclude(JJTINCLUDE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000);String fname; - boolean found = false; - try { - jj_consume_token(tok_include); - fname = jj_consume_token(tok_literal).image; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - // bugbug somewhat fragile below substring expression - fname = fname.substring(1,fname.length() - 1); - - // try to find the file on the include path - File f = thrift_grammar.findFile(fname, this.include_path); - if(f != null) { - found = true; - try { - FileInputStream fis = new FileInputStream(f); - thrift_grammar t = new thrift_grammar(fis,this.include_path, false); - t.Start(); - fis.close(); - found = true; - // add in what we found to our type and table tables. - this.tables.putAll(t.tables); - this.types.putAll(t.types); - } catch (Exception e) { - System.out.println("File: " + fname + " - Oops."); - System.out.println(e.getMessage()); - e.printStackTrace(); - } - } - if(!found) { - {if (true) throw new RuntimeException("include file not found: " + fname);} - } - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Definition() throws ParseException { - /*@bgen(jjtree) Definition */ - DynamicSerDeDefinition jjtn000 = new DynamicSerDeDefinition(JJTDEFINITION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_const: - Const(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_service: - Service(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_senum: - case tok_typedef: - case tok_struct: - case tok_exception: - case tok_enum: - TypeDefinition(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[4] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode TypeDefinition() throws ParseException { - /*@bgen(jjtree) TypeDefinition */ - DynamicSerDeTypeDefinition jjtn000 = new DynamicSerDeTypeDefinition(JJTTYPEDEFINITION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_typedef: - Typedef(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_enum: - Enum(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_senum: - Senum(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_struct: - Struct(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_exception: - Xception(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[5] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypedef Typedef() throws ParseException { - /*@bgen(jjtree) Typedef */ - DynamicSerDeTypedef jjtn000 = new DynamicSerDeTypedef(JJTTYPEDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_typedef); - DefinitionType(); - jjtn000.name = jj_consume_token(IDENTIFIER).image; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - // store the type for later retrieval - this.types.put(jjtn000.name, jjtn000); - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - -// returning void because we ignore this production. - final public void CommaOrSemicolon() throws ParseException { - /*@bgen(jjtree) CommaOrSemicolon */ - DynamicSerDeCommaOrSemicolon jjtn000 = new DynamicSerDeCommaOrSemicolon(JJTCOMMAORSEMICOLON); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - jj_consume_token(58); - break; - case 59: - jj_consume_token(59); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - default: - jj_la1[6] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - } - - final public SimpleNode Enum() throws ParseException { - /*@bgen(jjtree) Enum */ - DynamicSerDeEnum jjtn000 = new DynamicSerDeEnum(JJTENUM); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_enum); - jj_consume_token(IDENTIFIER); - jj_consume_token(60); - EnumDefList(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode EnumDefList() throws ParseException { - /*@bgen(jjtree) EnumDefList */ - DynamicSerDeEnumDefList jjtn000 = new DynamicSerDeEnumDefList(JJTENUMDEFLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - label_3: - while (true) { - EnumDef(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case IDENTIFIER: - ; - break; - default: - jj_la1[7] = jj_gen; - break label_3; - } - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode EnumDef() throws ParseException { - /*@bgen(jjtree) EnumDef */ - DynamicSerDeEnumDef jjtn000 = new DynamicSerDeEnumDef(JJTENUMDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(IDENTIFIER); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 62: - jj_consume_token(62); - jj_consume_token(tok_int_constant); - break; - default: - jj_la1[8] = jj_gen; - ; - } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[9] = jj_gen; - ; - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Senum() throws ParseException { - /*@bgen(jjtree) Senum */ - DynamicSerDeSenum jjtn000 = new DynamicSerDeSenum(JJTSENUM); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_senum); - jj_consume_token(IDENTIFIER); - jj_consume_token(60); - SenumDefList(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode SenumDefList() throws ParseException { - /*@bgen(jjtree) SenumDefList */ - DynamicSerDeSenumDefList jjtn000 = new DynamicSerDeSenumDefList(JJTSENUMDEFLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - label_4: - while (true) { - SenumDef(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_literal: - ; - break; - default: - jj_la1[10] = jj_gen; - break label_4; - } - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode SenumDef() throws ParseException { - /*@bgen(jjtree) SenumDef */ - DynamicSerDeSenumDef jjtn000 = new DynamicSerDeSenumDef(JJTSENUMDEF); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_literal); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[11] = jj_gen; - ; - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Const() throws ParseException { - /*@bgen(jjtree) Const */ - DynamicSerDeConst jjtn000 = new DynamicSerDeConst(JJTCONST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_const); - FieldType(); - jj_consume_token(IDENTIFIER); - jj_consume_token(62); - ConstValue(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[12] = jj_gen; - ; - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode ConstValue() throws ParseException { - /*@bgen(jjtree) ConstValue */ - DynamicSerDeConstValue jjtn000 = new DynamicSerDeConstValue(JJTCONSTVALUE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_int_constant: - jj_consume_token(tok_int_constant); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - case tok_double_constant: - jj_consume_token(tok_double_constant); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - case tok_literal: - jj_consume_token(tok_literal); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - case IDENTIFIER: - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - case 63: - ConstList(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - case 60: - ConstMap(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[13] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode ConstList() throws ParseException { - /*@bgen(jjtree) ConstList */ - DynamicSerDeConstList jjtn000 = new DynamicSerDeConstList(JJTCONSTLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(63); - ConstListContents(); - jj_consume_token(64); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode ConstListContents() throws ParseException { - /*@bgen(jjtree) ConstListContents */ - DynamicSerDeConstListContents jjtn000 = new DynamicSerDeConstListContents(JJTCONSTLISTCONTENTS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - label_5: - while (true) { - ConstValue(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[14] = jj_gen; - ; - } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_int_constant: - case tok_double_constant: - case IDENTIFIER: - case tok_literal: - case 60: - case 63: - ; - break; - default: - jj_la1[15] = jj_gen; - break label_5; - } - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode ConstMap() throws ParseException { - /*@bgen(jjtree) ConstMap */ - DynamicSerDeConstMap jjtn000 = new DynamicSerDeConstMap(JJTCONSTMAP); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(60); - ConstMapContents(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode ConstMapContents() throws ParseException { - /*@bgen(jjtree) ConstMapContents */ - DynamicSerDeConstMapContents jjtn000 = new DynamicSerDeConstMapContents(JJTCONSTMAPCONTENTS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_int_constant: - case tok_double_constant: - case IDENTIFIER: - case tok_literal: - case 60: - case 63: - label_6: - while (true) { - ConstValue(); - jj_consume_token(65); - ConstValue(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[16] = jj_gen; - ; - } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_int_constant: - case tok_double_constant: - case IDENTIFIER: - case tok_literal: - case 60: - case 63: - ; - break; - default: - jj_la1[17] = jj_gen; - break label_6; - } - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - default: - jj_la1[18] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeStruct Struct() throws ParseException { - /*@bgen(jjtree) Struct */ - DynamicSerDeStruct jjtn000 = new DynamicSerDeStruct(JJTSTRUCT); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_struct); - jjtn000.name = jj_consume_token(IDENTIFIER).image; - jj_consume_token(60); - FieldList(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - this.types.put(jjtn000.name,jjtn000); - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Xception() throws ParseException { - /*@bgen(jjtree) Xception */ - DynamicSerDeXception jjtn000 = new DynamicSerDeXception(JJTXCEPTION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_exception); - jj_consume_token(IDENTIFIER); - jj_consume_token(60); - FieldList(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Service() throws ParseException { - /*@bgen(jjtree) Service */ - DynamicSerDeService jjtn000 = new DynamicSerDeService(JJTSERVICE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_service); - jj_consume_token(IDENTIFIER); - Extends(); - jj_consume_token(60); - FlagArgs(); - label_7: - while (true) { - Function(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_void: - case tok_bool: - case tok_i16: - case tok_i32: - case tok_i64: - case tok_double: - case tok_string: - case tok_map: - case tok_list: - case tok_set: - case tok_async: - case IDENTIFIER: - ; - break; - default: - jj_la1[19] = jj_gen; - break label_7; - } - } - UnflagArgs(); - jj_consume_token(61); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - // at some point, these should be inserted as a "db" - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode FlagArgs() throws ParseException { - /*@bgen(jjtree) FlagArgs */ - DynamicSerDeFlagArgs jjtn000 = new DynamicSerDeFlagArgs(JJTFLAGARGS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode UnflagArgs() throws ParseException { - /*@bgen(jjtree) UnflagArgs */ - DynamicSerDeUnflagArgs jjtn000 = new DynamicSerDeUnflagArgs(JJTUNFLAGARGS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode Extends() throws ParseException { - /*@bgen(jjtree) Extends */ - DynamicSerDeExtends jjtn000 = new DynamicSerDeExtends(JJTEXTENDS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_extends: - jj_consume_token(tok_extends); - jj_consume_token(IDENTIFIER); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[20] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeFunction Function() throws ParseException { - /*@bgen(jjtree) Function */ - DynamicSerDeFunction jjtn000 = new DynamicSerDeFunction(JJTFUNCTION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - Async(); - FunctionType(); - // the name of the function/table - jjtn000.name = jj_consume_token(IDENTIFIER).image; - jj_consume_token(66); - FieldList(); - jj_consume_token(67); - Throws(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[21] = jj_gen; - ; - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - this.tables.put(jjtn000.name, jjtn000); - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public void Async() throws ParseException { - /*@bgen(jjtree) Async */ - DynamicSerDeAsync jjtn000 = new DynamicSerDeAsync(JJTASYNC); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_async: - jj_consume_token(tok_async); - break; - default: - jj_la1[22] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - } - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - } - - final public void Throws() throws ParseException { - /*@bgen(jjtree) Throws */ - DynamicSerDeThrows jjtn000 = new DynamicSerDeThrows(JJTTHROWS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_throws: - jj_consume_token(tok_throws); - jj_consume_token(66); - FieldList(); - jj_consume_token(67); - break; - default: - jj_la1[23] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - } - -// nothing special - just use the DynamicSerDeFieldList's children methods to access the fields - final public DynamicSerDeFieldList FieldList() throws ParseException { - /*@bgen(jjtree) FieldList */ - DynamicSerDeFieldList jjtn000 = new DynamicSerDeFieldList(JJTFIELDLIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000);this.field_val = -1; - try { - label_8: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_bool: - case tok_i16: - case tok_i32: - case tok_i64: - case tok_double: - case tok_string: - case tok_map: - case tok_list: - case tok_set: - case tok_required: - case tok_optional: - case tok_int_constant: - case IDENTIFIER: - ; - break; - default: - jj_la1[24] = jj_gen; - break label_8; - } - Field(); - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeField Field() throws ParseException { - /*@bgen(jjtree) Field */ - DynamicSerDeField jjtn000 = new DynamicSerDeField(JJTFIELD); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000);String fidnum = ""; - String fid; - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_int_constant: - fidnum = jj_consume_token(tok_int_constant).image; - jj_consume_token(65); - break; - default: - jj_la1[25] = jj_gen; - ; - } - FieldRequiredness(); - FieldType(); - // the name of the field - not optional - jjtn000.name = jj_consume_token(IDENTIFIER).image; - FieldValue(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 58: - case 59: - CommaOrSemicolon(); - break; - default: - jj_la1[26] = jj_gen; - ; - } - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - if(fidnum.length() > 0) { - int fidInt = Integer.valueOf(fidnum); - jjtn000.fieldid = fidInt; - } else { - jjtn000.fieldid = this.field_val--; - } - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode FieldRequiredness() throws ParseException { - /*@bgen(jjtree) FieldRequiredness */ - DynamicSerDeFieldRequiredness jjtn000 = new DynamicSerDeFieldRequiredness(JJTFIELDREQUIREDNESS); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_required: - jj_consume_token(tok_required); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_optional: - jj_consume_token(tok_optional); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[27] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode FieldValue() throws ParseException { - /*@bgen(jjtree) FieldValue */ - DynamicSerDeFieldValue jjtn000 = new DynamicSerDeFieldValue(JJTFIELDVALUE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case 62: - jj_consume_token(62); - ConstValue(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[28] = jj_gen; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public SimpleNode DefinitionType() throws ParseException { - /*@bgen(jjtree) DefinitionType */ - DynamicSerDeDefinitionType jjtn000 = new DynamicSerDeDefinitionType(JJTDEFINITIONTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_string: - TypeString(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_bool: - TypeBool(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i16: - Typei16(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i32: - Typei32(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i64: - Typei64(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_double: - TypeDouble(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_map: - TypeMap(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_set: - TypeSet(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_list: - TypeList(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[29] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public void FunctionType() throws ParseException { - /*@bgen(jjtree) FunctionType */ - DynamicSerDeFunctionType jjtn000 = new DynamicSerDeFunctionType(JJTFUNCTIONTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_bool: - case tok_i16: - case tok_i32: - case tok_i64: - case tok_double: - case tok_string: - case tok_map: - case tok_list: - case tok_set: - case IDENTIFIER: - FieldType(); - break; - case tok_void: - jj_consume_token(tok_void); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - - break; - default: - jj_la1[30] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - } - - final public DynamicSerDeFieldType FieldType() throws ParseException { - /*@bgen(jjtree) FieldType */ - DynamicSerDeFieldType jjtn000 = new DynamicSerDeFieldType(JJTFIELDTYPE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case tok_string: - TypeString(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_bool: - TypeBool(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i16: - Typei16(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i32: - Typei32(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_i64: - Typei64(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_double: - TypeDouble(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_map: - TypeMap(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_set: - TypeSet(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case tok_list: - TypeList(); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - case IDENTIFIER: - jjtn000.name = jj_consume_token(IDENTIFIER).image; - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - break; - default: - jj_la1[31] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeString TypeString() throws ParseException { - /*@bgen(jjtree) TypeString */ - DynamicSerDeTypeString jjtn000 = new DynamicSerDeTypeString(JJTTYPESTRING); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_string); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeByte TypeByte() throws ParseException { - /*@bgen(jjtree) TypeByte */ - DynamicSerDeTypeByte jjtn000 = new DynamicSerDeTypeByte(JJTTYPEBYTE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_byte); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypei16 Typei16() throws ParseException { - /*@bgen(jjtree) Typei16 */ - DynamicSerDeTypei16 jjtn000 = new DynamicSerDeTypei16(JJTTYPEI16); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_i16); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypei32 Typei32() throws ParseException { - /*@bgen(jjtree) Typei32 */ - DynamicSerDeTypei32 jjtn000 = new DynamicSerDeTypei32(JJTTYPEI32); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_i32); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypei64 Typei64() throws ParseException { - /*@bgen(jjtree) Typei64 */ - DynamicSerDeTypei64 jjtn000 = new DynamicSerDeTypei64(JJTTYPEI64); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_i64); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeDouble TypeDouble() throws ParseException { - /*@bgen(jjtree) TypeDouble */ - DynamicSerDeTypeDouble jjtn000 = new DynamicSerDeTypeDouble(JJTTYPEDOUBLE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_double); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeBool TypeBool() throws ParseException { - /*@bgen(jjtree) TypeBool */ - DynamicSerDeTypeBool jjtn000 = new DynamicSerDeTypeBool(JJTTYPEBOOL); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_bool); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeMap TypeMap() throws ParseException { - /*@bgen(jjtree) TypeMap */ - DynamicSerDeTypeMap jjtn000 = new DynamicSerDeTypeMap(JJTTYPEMAP); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_map); - jj_consume_token(68); - FieldType(); - jj_consume_token(58); - FieldType(); - jj_consume_token(69); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeSet TypeSet() throws ParseException { - /*@bgen(jjtree) TypeSet */ - DynamicSerDeTypeSet jjtn000 = new DynamicSerDeTypeSet(JJTTYPESET); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_set); - jj_consume_token(68); - FieldType(); - jj_consume_token(69); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public DynamicSerDeTypeList TypeList() throws ParseException { - /*@bgen(jjtree) TypeList */ - DynamicSerDeTypeList jjtn000 = new DynamicSerDeTypeList(JJTTYPELIST); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - jj_consume_token(tok_list); - jj_consume_token(68); - FieldType(); - jj_consume_token(69); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - public thrift_grammarTokenManager token_source; - SimpleCharStream jj_input_stream; - public Token token, jj_nt; - private int jj_ntk; - private int jj_gen; - final private int[] jj_la1 = new int[32]; - static private int[] jj_la1_0; - static private int[] jj_la1_1; - static private int[] jj_la1_2; - static { - jj_la1_0(); - jj_la1_1(); - jj_la1_2(); - } - private static void jj_la1_0() { - jj_la1_0 = new int[] {0x100,0xa3fee00,0xa3fee00,0x23fee00,0x100,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xb0000000,0x0,0x0,0x0,0x0,0xa0000000,0x0,0x0,0x0,0x0,0xa0000000,0xb0000000,0xa0000000,}; - } - private static void jj_la1_1() { - jj_la1_1 = new int[] {0x19c20,0x0,0x0,0x0,0x19c20,0x11c20,0xc000000,0x200000,0x40000000,0xc000000,0x1000000,0xc000000,0xc000000,0x91380000,0xc000000,0x91380000,0xc000000,0x91380000,0x91380000,0x2003cf,0x2000,0xc000000,0x200,0x4000,0x2e01cf,0x80000,0xc000000,0x60000,0x40000000,0x1cf,0x2001cf,0x2001cf,}; - } - private static void jj_la1_2() { - jj_la1_2 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; - } - - public thrift_grammar(java.io.InputStream stream) { - this(stream, null); - } - public thrift_grammar(java.io.InputStream stream, String encoding) { - try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } - token_source = new thrift_grammarTokenManager(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - public void ReInit(java.io.InputStream stream) { - ReInit(stream, null); - } - public void ReInit(java.io.InputStream stream, String encoding) { - try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } - token_source.ReInit(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jjtree.reset(); - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - public thrift_grammar(java.io.Reader stream) { - jj_input_stream = new SimpleCharStream(stream, 1, 1); - token_source = new thrift_grammarTokenManager(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - public void ReInit(java.io.Reader stream) { - jj_input_stream.ReInit(stream, 1, 1); - token_source.ReInit(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jjtree.reset(); - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - public thrift_grammar(thrift_grammarTokenManager tm) { - token_source = tm; - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - public void ReInit(thrift_grammarTokenManager tm) { - token_source = tm; - token = new Token(); - jj_ntk = -1; - jjtree.reset(); - jj_gen = 0; - for (int i = 0; i < 32; i++) jj_la1[i] = -1; - } - - final private Token jj_consume_token(int kind) throws ParseException { - Token oldToken; - if ((oldToken = token).next != null) token = token.next; - else token = token.next = token_source.getNextToken(); - jj_ntk = -1; - if (token.kind == kind) { - jj_gen++; - return token; - } - token = oldToken; - jj_kind = kind; - throw generateParseException(); - } - - final public Token getNextToken() { - if (token.next != null) token = token.next; - else token = token.next = token_source.getNextToken(); - jj_ntk = -1; - jj_gen++; - return token; - } - - final public Token getToken(int index) { - Token t = token; - for (int i = 0; i < index; i++) { - if (t.next != null) t = t.next; - else t = t.next = token_source.getNextToken(); - } - return t; - } - - final private int jj_ntk() { - if ((jj_nt=token.next) == null) - return (jj_ntk = (token.next=token_source.getNextToken()).kind); - else - return (jj_ntk = jj_nt.kind); - } - - private java.util.Vector jj_expentries = new java.util.Vector(); - private int[] jj_expentry; - private int jj_kind = -1; - - public ParseException generateParseException() { - jj_expentries.removeAllElements(); - boolean[] la1tokens = new boolean[70]; - for (int i = 0; i < 70; i++) { - la1tokens[i] = false; - } - if (jj_kind >= 0) { - la1tokens[jj_kind] = true; - jj_kind = -1; - } - for (int i = 0; i < 32; i++) { - if (jj_la1[i] == jj_gen) { - for (int j = 0; j < 32; j++) { - if ((jj_la1_0[i] & (1< true, 'tinyint' => true, + 'smallint' => true, 'int' => true, 'bigint' => true, 'float' => true, Index: src/contrib/hive/serde/README =================================================================== --- src/contrib/hive/serde/README (revision 0) +++ src/contrib/hive/serde/README (revision 0) @@ -0,0 +1,115 @@ +What is SerDe +----------- +SerDe is a short name for Serializer and Deserializer. +Hive uses SerDe (and FileFormat) to read from/write to tables. + +* HDFS files --(InputFileFormat)--> --(Deserializer)--> Row object +* Row object --(Serializer)--> --(OutputFileFormat)--> HDFS files + +Note that the "key" part is ignored when reading, and is always a constant when +writing. Basically the row object is only stored into the "value". + +One principle of Hive is that Hive does not own the HDFS file format - Users +should be able to directly read the HDFS files in the Hive tables using other +tools, or use other tools to directly write to HDFS files that can be read by +Hive through "CREATE EXTERNAL TABLE", or can be loaded into Hive through "LOAD +DATA INPATH" which just move the file into Hive table directory. + + +Note that org.apache.hadoop.hive.serde is the deprecated old serde library. +Please look at org.apache.hadoop.hive.serde2 for the latest version. + + +Existing FileFormats and SerDe classes +------------------------ +Hive currently use these FileFormats to read/write to files: + +* TextInputFormat/NoKeyTextOutputFormat + These 2 classes read/write data in plain text file format. + +* SequenceFileInputFormat/SequenceFileOutputFormat + These 2 classes read/write data in hadoop SequenceFile format. + +Hive currently use these SerDe classes to serialize and deserialize data: + +* MetadataTypedColumnsetSerDe + This serde is used to read/write delimited records like CSV, tab-separated + control-A separated records (sorry, quote is not supported yet.) + +* ThriftSerDe + This serde is used to read/write thrift serialized objects. The class file + for the Thrift object must be loaded first. + +* DynamicSerDe + This serde also read/write thrift serialized objects, but it understands thrift + DDL so the schema of the object can be provided at runtime. Also it supports + a lot of different protocols, including TBinaryProtocol, TJSONProtocol, + TCTLSeparatedProtocol (which writes data in delimited records). + + + +How to load data into Hive +------------------------ +In order to load data into Hive, we need to tell Hive the format of the data +through "CREATE TABLE" statement: + +* FileFormat: the data has to be in Text or SequenceFile. +* Format of the row: + * If the data is in delimited format, use MetadataTypedColumnsetSerDe + * If the data is in delimited format and has more than 1 levels of delimitor, + use DynamicSerDe with TCTLSeparatedProtocol + * If the data is a serialized thrift object, use ThriftSerDe + +The steps to load the data: +1 Create a table: + + CREATE TABLE t (foo STRING, bar STRING) + ROW FORMAT DELIMITED + FIELDS TERMINATED BY '\t' + STORED AS TEXTFILE; + + CREATE TABLE t2 (foo STRING, bar ARRAY) + ROW FORMAT DELIMITED + FIELDS TERMINATED BY '\t' + COLLECTION ITEMS TERMINATED BY ',' + STORED AS TEXTFILE; + + CREATE TABLE t3 (foo STRING, bar MAP) + ROW FORMAT DELIMITED + FIELDS TERMINATED BY '\t' + COLLECTION ITEMS TERMINATED BY ',' + MAP KEYS TERMINATED BY ':' + STORED AS TEXTFILE; + + CREATE TABLE t4 (foo STRING, bar MAP) + ROW FORMAT SERIALIZER 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe' + WITH SERDEPROPERTIES ('columns'='foo,bar','SERIALIZATION.FORMAT'='9'); + + (RegexDeserializer is not done yet) + CREATE TABLE t5 (foo STRING, bar STRING) + ROW FORMAT SERIALIZER 'org.apache.hadoop.hive.serde2.RegexDeserializer' + WITH SERDEPROPERTIES ('regex'='([a-z]*) *([a-z]*)'); + +2 Load the data: + LOAD DATA LOCAL INPATH '../examples/files/kv1.txt' OVERWRITE INTO TABLE t; + + + +How to read data from Hive tables +------------------------ +In order to read data from Hive tables, we need to know the same 2 things: +* File Format +* Row Format + +Then we just need to directly open the HDFS file and read the data. + + +How to write your own SerDe +------------------------ + +In most cases, users want to write a Deserializer instead of a SerDe. +For example, the RegexDeserializer will deserialize the data using the +configuration parameter 'regex', and possibly a list of column names (see +serde2.MetadataTypedColumnsetSerDe). + +Please see serde2/Deserializer.java for details. Index: src/contrib/hive/serde/if/serde.thrift =================================================================== --- src/contrib/hive/serde/if/serde.thrift (revision 712243) +++ src/contrib/hive/serde/if/serde.thrift (working copy) @@ -9,20 +9,25 @@ const string SERIALIZATION_FORMAT = "serialization.format" const string SERIALIZATION_DDL = "serialization.ddl" const string SERIALIZATION_NULL_FORMAT = "serialization.null.format" +const string SERIALIZATION_LAST_COLUMN_TAKES_REST = "serialization.last.column.takes.rest" +const string SERIALIZATION_SORT_ORDER = "serialization.sort.order" const string FIELD_DELIM = "field.delim" const string COLLECTION_DELIM = "colelction.delim" const string LINE_DELIM = "line.delim" const string MAPKEY_DELIM = "mapkey.delim" +const string QUOTE_CHAR = "quote.delim" typedef string PrimitiveType typedef string CollectionType +const string BOOLEAN_TYPE_NAME = "boolean"; const string TINYINT_TYPE_NAME = "tinyint"; +const string SMALLINT_TYPE_NAME = "smallint"; const string INT_TYPE_NAME = "int"; const string BIGINT_TYPE_NAME = "bigint"; const string FLOAT_TYPE_NAME = "float"; -const string DOUBLE_TYPE_NAME = "double"; +const string DOUBLE_TYPE_NAME = "double"; const string STRING_TYPE_NAME = "string"; const string DATE_TYPE_NAME = "date"; const string DATETIME_TYPE_NAME = "datetime"; @@ -31,7 +36,7 @@ const string LIST_TYPE_NAME = "array"; const string MAP_TYPE_NAME = "map"; -const set PrimitiveTypes = [ TINYINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME ], +const set PrimitiveTypes = [ BOOLEAN_TYPE_NAME TINYINT_TYPE_NAME SMALLINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME ], const set CollectionTypes = [ LIST_TYPE_NAME MAP_TYPE_NAME ], Index: src/contrib/hive/serde/build.xml =================================================================== --- src/contrib/hive/serde/build.xml (revision 712243) +++ src/contrib/hive/serde/build.xml (working copy) @@ -27,21 +27,27 @@ + - + + + - + Index: src/contrib/hive/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java =================================================================== --- src/contrib/hive/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (revision 712243) +++ src/contrib/hive/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (working copy) @@ -28,6 +28,8 @@ import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.InvalidObjectException; +import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.SerDeInfo; @@ -350,6 +352,81 @@ } } + public void testAlterTable() throws Exception { + try { + String dbName = "alterdb"; + String invTblName = "alter-tbl"; + String tblName = "altertbl"; + + client.dropTable(dbName, tblName); + client.dropDatabase(dbName); + boolean ret = client.createDatabase(dbName, "strange_loc"); + assertTrue("Unable to create the databse " + dbName, ret); + + ArrayList invCols = new ArrayList(2); + invCols.add(new FieldSchema("n-ame", Constants.STRING_TYPE_NAME, "")); + invCols.add(new FieldSchema("in.come", Constants.INT_TYPE_NAME, "")); + + Table tbl = new Table(); + tbl.setDbName(dbName); + tbl.setTableName(invTblName); + StorageDescriptor sd = new StorageDescriptor(); + tbl.setSd(sd); + sd.setCols(invCols); + sd.setCompressed(false); + sd.setNumBuckets(1); + sd.setParameters(new HashMap()); + sd.getParameters().put("test_param_1", "Use this for comments etc"); + sd.setBucketCols(new ArrayList(2)); + sd.getBucketCols().add("name"); + sd.setSerdeInfo(new SerDeInfo()); + sd.getSerdeInfo().setName(tbl.getTableName()); + sd.getSerdeInfo().setParameters(new HashMap()); + sd.getSerdeInfo().getParameters().put(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1"); + boolean failed = false; + try { + client.createTable(tbl); + } catch (InvalidObjectException ex) { + failed = true; + } + if(!failed) { + assertTrue("Able to create table with invalid name: " + invTblName, false); + } + ArrayList cols = new ArrayList(2); + cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, "")); + cols.add(new FieldSchema("income", Constants.INT_TYPE_NAME, "")); + + // create a valid table + tbl.setTableName(tblName); + tbl.getSd().setCols(cols); + client.createTable(tbl); + + // now try to invalid alter table + Table tbl2 = client.getTable(dbName, tblName); + failed = false; + try { + tbl2.setTableName(invTblName); + tbl2.getSd().setCols(invCols); + client.alter_table(dbName, tblName, tbl2); + } catch (InvalidOperationException ex) { + failed = true; + } + if(!failed) { + assertTrue("Able to rename table with invalid name: " + invTblName, false); + } + // try a valid alter table + tbl2.setTableName(tblName); + tbl2.getSd().setCols(cols); + tbl2.getSd().setNumBuckets(32); + client.alter_table(dbName, tblName, tbl2); + Table tbl3 = client.getTable(dbName, tblName); + assertEquals("Alter table didn't succeed. Num buckets ", tbl2.getSd().getNumBuckets(), tbl3.getSd().getNumBuckets()); + } catch (Exception e) { + System.err.println(StringUtils.stringifyException(e)); + System.err.println("testSimpleTable() failed."); + throw e; + } + } public void testComplexTable() throws Exception { String dbName = "compdb"; Index: src/contrib/hive/metastore/src/test/org/apache/hadoop/hive/metastore/TestPartitions.java =================================================================== --- src/contrib/hive/metastore/src/test/org/apache/hadoop/hive/metastore/TestPartitions.java (revision 712243) +++ src/contrib/hive/metastore/src/test/org/apache/hadoop/hive/metastore/TestPartitions.java (working copy) @@ -60,8 +60,8 @@ fileSys_.mkdirs(part2); List partitions = bar1.getPartitions(); assertTrue(partitions.size() == 2); - assertTrue(partitions.get(0).equals("ds=2008-01-01")); - assertTrue(partitions.get(1).equals("ds=2008-01-02")); + assertTrue(partitions.contains("ds=2008-01-01")); + assertTrue(partitions.contains("ds=2008-01-02")); cleanup(); } catch(MetaException e) { e.printStackTrace(); Index: src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java =================================================================== --- src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (revision 712243) +++ src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (working copy) @@ -271,8 +271,10 @@ this.incrementCounter("create_table"); logStartFunction("create_table: db=" + tbl.getDbName() + " tbl=" + tbl.getTableName()); boolean success = false; - if(!MetaStoreUtils.validateName(tbl.getTableName())) { - throw new InvalidObjectException(tbl.getTableName() + " is not a valid object name"); + if(!MetaStoreUtils.validateName(tbl.getTableName()) || + !MetaStoreUtils.validateColNames(tbl.getSd().getCols()) || + (tbl.getPartitionKeys() != null && !MetaStoreUtils.validateColNames(tbl.getPartitionKeys()))) { + throw new InvalidObjectException(tbl.getTableName() + " is not a valid object name"); } try { getMS().openTransaction(); @@ -540,11 +542,15 @@ logStartFunction("getVersion"); return "3.0"; } - + public void alter_table(String dbname, String name, Table newTable) throws InvalidOperationException, MetaException { this.incrementCounter("alter_table"); logStartFunction("truncate_table: db=" + dbname + " tbl=" + name + " newtbl=" + newTable.getTableName()); + if(!MetaStoreUtils.validateName(newTable.getTableName()) || + !MetaStoreUtils.validateColNames(newTable.getSd().getCols())) { + throw new InvalidOperationException(newTable.getTableName() + " is not a valid object name"); + } try { getMS().alterTable(dbname, name, newTable); } catch (InvalidObjectException e) { Index: src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java =================================================================== --- src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (revision 712243) +++ src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (working copy) @@ -231,8 +231,8 @@ /** * validateName * - * Checks the name conforms to our standars which are: "[a-zA-z-_0-9]+". - * checks this is just characters and numbers and _ and . and - + * Checks the name conforms to our standars which are: "[a-zA-z_0-9]+". + * checks this is just characters and numbers and _ * * @param tableName the name to validate * @return none @@ -246,6 +246,14 @@ } return false; } + + static public boolean validateColNames(List cols) { + for (FieldSchema fieldSchema : cols) { + if(!validateName(fieldSchema.getName())) + return false; + } + return true; + } /** * Change from old to new format properties of a schema file @@ -395,7 +403,9 @@ static HashMap typeToThriftTypeMap; static { typeToThriftTypeMap = new HashMap(); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.BOOLEAN_TYPE_NAME, "bool"); typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.TINYINT_TYPE_NAME, "byte"); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.SMALLINT_TYPE_NAME, "i16"); typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.INT_TYPE_NAME, "i32"); typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.BIGINT_TYPE_NAME, "i64"); typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.DOUBLE_TYPE_NAME, "double"); @@ -446,7 +456,7 @@ ddl.append(col.getName()); } ddl.append("}"); - LOG.warn("DDL: " + ddl); + LOG.info("DDL: " + ddl); return ddl.toString(); } public static Properties getSchema(org.apache.hadoop.hive.metastore.api.Table tbl) { @@ -538,7 +548,7 @@ * @throws SerDeException * @throws MetaException */ - static List getFieldsFromDeserializer(String tableName, Deserializer deserializer) throws SerDeException, MetaException { + public static List getFieldsFromDeserializer(String tableName, Deserializer deserializer) throws SerDeException, MetaException { ObjectInspector oi = deserializer.getObjectInspector(); String [] names = tableName.split("\\."); String last_name = names[names.length-1]; @@ -572,4 +582,5 @@ } return str_fields; } + } Index: src/contrib/hive/conf/hive-default.xml =================================================================== --- src/contrib/hive/conf/hive-default.xml (revision 712243) +++ src/contrib/hive/conf/hive-default.xml (working copy) @@ -84,4 +84,22 @@ Name of the class that implements org.apache.hadoop.hive.metastore.rawstore interface. This class is used to store and retrieval of raw metadata objects such as table, database + + hive.default.fileformat + TextFile + Default file format for CREATE TABLE statement. Options are TextFile and SequenceFile. Users can explicitly say CREATE TABLE ... STORED AS <TEXTFILE|SEQUENCEFILE> to override + + + + hive.map.aggr + false + Whether to use map-side aggregation in Hive Group By queries + + + + hive.join.emit.interval + 1000 + How many rows in the right-most join operand Hive should buffer before emitting the join result. + + Index: src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java =================================================================== --- src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (revision 712243) +++ src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (working copy) @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.ql.Driver; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; public class CliDriver { @@ -41,7 +42,18 @@ public static SetProcessor sp; public static Driver qp; public static FsShell dfs; + public static Log LOG = LogFactory.getLog("CliDriver"); + /** + * delay console initialization until session has been initialized + */ + public static LogHelper console; + public static LogHelper getConsole() { + if(console == null) + console = new LogHelper(LOG); + return (console); + } + public CliDriver(CliSessionState ss) { SessionState.start(ss); sp = new SetProcessor(); @@ -49,23 +61,28 @@ } public static int processCmd(String cmd) { + + SessionState ss = SessionState.get(); + LogHelper console = getConsole(); + String[] tokens = cmd.split("\\s+"); String cmd_1 = cmd.substring(tokens[0].length()); int ret = 0; - if(tokens[0].equals("set")) { + if(tokens[0].toLowerCase().equals("set")) { + ret = sp.run(cmd_1); - } else if (cmd.equals("quit") || cmd.equals("exit")) { + + } else if (cmd.toLowerCase().equals("quit") || cmd.toLowerCase().equals("exit")) { + // if we have come this far - either the previous commands // are all successful or this is command line. in either case // this counts as a successful run System.exit(0); + } else if (cmd.startsWith("!")) { - SessionState ss = SessionState.get(); + String shell_cmd = cmd.substring(1); - if (shell_cmd.endsWith(";")) { - shell_cmd = shell_cmd.substring(0, shell_cmd.length()-1); - } //shell_cmd = "/bin/bash -c \'" + shell_cmd + "\'"; try { @@ -76,48 +93,102 @@ outPrinter.start(); errPrinter.start(); - int exitVal = executor.waitFor(); - if (exitVal != 0) { - ss.err.write((new String("Command failed with exit code = " + exitVal)).getBytes()); + ret = executor.waitFor(); + if (ret != 0) { + console.printError("Command failed with exit code = " + ret); } } catch (Exception e) { - e.printStackTrace(); + console.printError("Exception raised from Shell command " + e.getLocalizedMessage(), + org.apache.hadoop.util.StringUtils.stringifyException(e)); + ret = 1; } - } else if (cmd.startsWith("dfs")) { + + } else if (tokens[0].toLowerCase().equals("dfs")) { + // dfs shell commands - SessionState ss = SessionState.get(); if(dfs == null) dfs = new FsShell(ss.getConf()); - String hadoopCmd = cmd.replaceFirst("dfs\\s+", ""); - hadoopCmd = hadoopCmd.trim(); - if (hadoopCmd.endsWith(";")) { - hadoopCmd = hadoopCmd.substring(0, hadoopCmd.length()-1); - } - String[] args = hadoopCmd.split("\\s+"); + + String [] alt_tokens = new String [tokens.length-1]; + System.arraycopy(tokens, 1, alt_tokens, 0, tokens.length-1); + tokens = alt_tokens; + try { PrintStream oldOut = System.out; System.setOut(ss.out); - int val = dfs.run(args); + ret = dfs.run(tokens); System.setOut(oldOut); - if (val != 0) { - ss.err.write((new String("Command failed with exit code = " + val)).getBytes()); + if (ret != 0) { + console.printError("Command failed with exit code = " + ret); } } catch (Exception e) { - ss.err.println("Exception raised from DFSShell.run " + e.getLocalizedMessage()); + console.printError("Exception raised from DFSShell.run " + e.getLocalizedMessage(), + org.apache.hadoop.util.StringUtils.stringifyException(e)); + ret = 1; } + + } else if (tokens[0].toLowerCase().equals("list")) { + + SessionState.ResourceType t; + if(tokens.length < 2 || (t = SessionState.find_resource_type(tokens[1])) == null) { + console.printError("Usage: list [" + + StringUtils.join(SessionState.ResourceType.values(),"|") + + "] [ []*]" ); + ret = 1; + } else { + List filter = null; + if(tokens.length >=3) { + System.arraycopy(tokens, 2, tokens, 0, tokens.length-2); + filter = Arrays.asList(tokens); + } + Set s = ss.list_resource(t, filter); + if(s != null && !s.isEmpty()) + ss.out.println(StringUtils.join(s, "\n")); + } + + } else if (tokens[0].toLowerCase().equals("add")) { + + SessionState.ResourceType t; + if(tokens.length < 3 || (t = SessionState.find_resource_type(tokens[1])) == null) { + console.printError("Usage: add [" + + StringUtils.join(SessionState.ResourceType.values(),"|") + + "] []*"); + ret = 1; + } else { + for(int i = 2; i]"); + ret = 1; + } else if (tokens.length >= 3) { + for(int i = 2; i res = new Vector(); while (qp.getResults(res)) { for (String r:res) { - SessionState ss = SessionState.get(); - PrintStream out = ss.out; out.println(r); } res.clear(); } - + int cret = qp.close(); if (ret == 0) { ret = cret; @@ -223,8 +294,7 @@ String historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE; reader.setHistory(new History(new File(historyFile))); int ret = 0; - Log LOG = LogFactory.getLog("CliDriver"); - LogHelper console = new LogHelper(LOG); + String prefix = ""; String curPrompt = prompt; while ((line = reader.readLine(curPrompt+"> ")) != null) { @@ -242,10 +312,11 @@ long end = System.currentTimeMillis(); if (end > start) { double timeTaken = (double)(end-start)/1000.0; - console.printInfo("Time taken: " + timeTaken + " seconds", null); + getConsole().printInfo("Time taken: " + timeTaken + " seconds", null); } } System.exit(ret); } + } Index: src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/SetProcessor.java =================================================================== --- src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/SetProcessor.java (revision 712243) +++ src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/SetProcessor.java (working copy) @@ -81,8 +81,8 @@ part[0] = nwcmd.substring(0, nwcmd.length()-1); part[1] = ""; } else { - part[0] = nwcmd.substring(0, eqIndex); - part[1] = nwcmd.substring(eqIndex+1); + part[0] = nwcmd.substring(0, eqIndex).trim(); + part[1] = nwcmd.substring(eqIndex+1).trim(); } try { Index: src/contrib/hive/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- src/contrib/hive/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 712243) +++ src/contrib/hive/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -81,8 +81,14 @@ HIVETABLENAME("hive.table.name", ""), HIVEPARTITIONNAME("hive.partition.name", ""), HIVEPARTITIONPRUNER("hive.partition.pruning", "nonstrict"), - HIVEALIAS("hive.alias", ""); + HIVEALIAS("hive.alias", ""), + HIVEMAPSIDEAGGREGATE("hive.map.aggr", "false"), + HIVEJOINEMITINTERVAL("hive.join.emit.interval", 1000), + // Default file format for CREATE TABLE statement + // Options: TextFile, SequenceFile + HIVEDEFAULTFILEFORMAT("hive.default.fileformat", "TextFile"); + public final String varname; public final String defaultVal; public final int defaultIntVal; Index: src/contrib/hive/data/files/apache.access.log =================================================================== --- src/contrib/hive/data/files/apache.access.log (revision 0) +++ src/contrib/hive/data/files/apache.access.log (revision 0) @@ -0,0 +1 @@ +127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326 Index: src/contrib/hive/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (revision 0) @@ -0,0 +1,2 @@ +FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Partition collumn name aint conflicts with table columns. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask Index: src/contrib/hive/ql/src/test/results/clientnegative/input1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientnegative/input1.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientnegative/input1.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: line 1:7 Invalid Table Alias a Index: src/contrib/hive/ql/src/test/results/clientnegative/notable_alias3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientnegative/notable_alias3.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientnegative/notable_alias3.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: line 2:44 Expression Not In Group By Key key Index: src/contrib/hive/ql/src/test/results/clientnegative/input2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientnegative/input2.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientnegative/input2.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: line 1:7 Invalid Table Alias a Index: src/contrib/hive/ql/src/test/results/clientnegative/notable_alias4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientnegative/notable_alias4.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientnegative/notable_alias4.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: Column key Found in more than One Tables/Subqueries Index: src/contrib/hive/ql/src/test/results/clientnegative/bad_sample_clause.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientnegative/bad_sample_clause.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientnegative/bad_sample_clause.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: Sampling Expression Needed for Non-Bucketed Table srcpart Index: src/contrib/hive/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out (revision 0) @@ -0,0 +1,2 @@ +Failed with exception Cannot load text files into a table stored as SequenceFile. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MoveTask Index: src/contrib/hive/ql/src/test/results/clientnegative/input_testxpath4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientnegative/input_testxpath4.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientnegative/input_testxpath4.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: line 3:42 Invalid Table Alias lintstring Index: src/contrib/hive/ql/src/test/results/clientnegative/invalid_tbl_name.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientnegative/invalid_tbl_name.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientnegative/invalid_tbl_name.q.out (revision 0) @@ -0,0 +1,2 @@ +FAILED: Parse Error: line 1:20 mismatched input '-' expecting EOF + Index: src/contrib/hive/ql/src/test/results/clientnegative/joinneg.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientnegative/joinneg.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientnegative/joinneg.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: line 5:12 Invalid Table Alias b Index: src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out (working copy) @@ -10,32 +10,36 @@ Map Reduce Alias -> Map Operator Tree: src - Filter Operator - predicate: - expr: (key = 86) - type: Boolean - Select Operator - expressions: - expr: (3 + 2) - type: int - expr: (3.0 + UDFToDouble(2)) - type: double - expr: (UDFToDouble(3) + 2.0) - type: double - expr: (3.0 + 2.0) - type: double - expr: (3 + UDFToInteger(2.0)) - type: int - expr: UDFToBoolean(1) - type: Boolean - expr: UDFToInteger(true) - type: int - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - name: dest1 + Select Operator + expressions: + expr: key + type: string + Filter Operator + predicate: + expr: (0 = 86) + type: boolean + Select Operator + expressions: + expr: (3 + 2) + type: int + expr: (3.0 + UDFToDouble(2)) + type: double + expr: (UDFToDouble(3) + 2.0) + type: double + expr: (3.0 + 2.0) + type: double + expr: (3 + UDFToInteger(2.0)) + type: int + expr: UDFToBoolean(1) + type: boolean + expr: UDFToInteger(true) + type: int + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/inputddl8.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/inputddl8.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/inputddl8.q.out (revision 0) @@ -0,0 +1,10 @@ +aint int 'from deserializer' +astring string 'from deserializer' +lint array 'from deserializer' +lstring array 'from deserializer' +lintstring array 'from deserializer' +mstringstring map 'from deserializer' +ds datetime +country string +Detailed Table Information: +Table(tableName:inputddl8,dbName:default,owner:njain,createTime:1225994139,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/inputddl8,inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat,outputFormat:org.apache.hadoop.mapred.SequenceFileOutputFormat,compressed:false,numBuckets:32,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.ThriftDeserializer,parameters:{serialization.class=org.apache.hadoop.hive.serde2.thrift.test.Complex,serialization.format=com.facebook.thrift.protocol.TBinaryProtocol}),bucketCols:[aint],sortCols:[Order(col:lint,order:1)],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null), FieldSchema(name:country,type:string,comment:null)],parameters:{comment=This is a thrift based table}) Index: src/contrib/hive/ql/src/test/results/clientpositive/quote1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/quote1.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/quote1.q.out (revision 0) @@ -0,0 +1,179 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1 (TOK_PARTSPEC (TOK_PARTVAL `table` '2008-04-08')))) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key) `partition`) (TOK_SELEXPR (TOK_COLREF src value) `from`)) (TOK_WHERE (and (>= (TOK_COLREF src key) 200) (< (TOK_COLREF src key) 300))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Filter Operator + predicate: + expr: ((key >= 200) and (key < 300)) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + partition: + table 2008-04-08 + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF dest1 `table`)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF `table` `location`)) (TOK_SELEXPR (TOK_COLREF `table` `type`)) (TOK_SELEXPR (TOK_COLREF `table` `table`))) (TOK_WHERE (= (TOK_COLREF `table` `table`) '2008-04-08')))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + table + Filter Operator + predicate: + expr: (table = '2008-04-08') + type: boolean + Select Operator + expressions: + expr: location + type: string + expr: type + type: string + expr: table + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +238 val_238 2008-04-08 +255 val_255 2008-04-08 +278 val_278 2008-04-08 +265 val_265 2008-04-08 +273 val_273 2008-04-08 +224 val_224 2008-04-08 +213 val_213 2008-04-08 +281 val_281 2008-04-08 +277 val_277 2008-04-08 +209 val_209 2008-04-08 +252 val_252 2008-04-08 +292 val_292 2008-04-08 +219 val_219 2008-04-08 +287 val_287 2008-04-08 +237 val_237 2008-04-08 +207 val_207 2008-04-08 +208 val_208 2008-04-08 +247 val_247 2008-04-08 +266 val_266 2008-04-08 +203 val_203 2008-04-08 +205 val_205 2008-04-08 +221 val_221 2008-04-08 +280 val_280 2008-04-08 +277 val_277 2008-04-08 +208 val_208 2008-04-08 +286 val_286 2008-04-08 +239 val_239 2008-04-08 +213 val_213 2008-04-08 +216 val_216 2008-04-08 +278 val_278 2008-04-08 +289 val_289 2008-04-08 +221 val_221 2008-04-08 +275 val_275 2008-04-08 +241 val_241 2008-04-08 +284 val_284 2008-04-08 +230 val_230 2008-04-08 +260 val_260 2008-04-08 +272 val_272 2008-04-08 +217 val_217 2008-04-08 +230 val_230 2008-04-08 +208 val_208 2008-04-08 +298 val_298 2008-04-08 +230 val_230 2008-04-08 +205 val_205 2008-04-08 +288 val_288 2008-04-08 +282 val_282 2008-04-08 +282 val_282 2008-04-08 +238 val_238 2008-04-08 +277 val_277 2008-04-08 +273 val_273 2008-04-08 +224 val_224 2008-04-08 +242 val_242 2008-04-08 +272 val_272 2008-04-08 +242 val_242 2008-04-08 +226 val_226 2008-04-08 +229 val_229 2008-04-08 +233 val_233 2008-04-08 +223 val_223 2008-04-08 +218 val_218 2008-04-08 +228 val_228 2008-04-08 +209 val_209 2008-04-08 +230 val_230 2008-04-08 +296 val_296 2008-04-08 +216 val_216 2008-04-08 +274 val_274 2008-04-08 +219 val_219 2008-04-08 +239 val_239 2008-04-08 +223 val_223 2008-04-08 +256 val_256 2008-04-08 +263 val_263 2008-04-08 +288 val_288 2008-04-08 +244 val_244 2008-04-08 +202 val_202 2008-04-08 +229 val_229 2008-04-08 +280 val_280 2008-04-08 +283 val_283 2008-04-08 +235 val_235 2008-04-08 +257 val_257 2008-04-08 +258 val_258 2008-04-08 +203 val_203 2008-04-08 +262 val_262 2008-04-08 +201 val_201 2008-04-08 +217 val_217 2008-04-08 +298 val_298 2008-04-08 +291 val_291 2008-04-08 +255 val_255 2008-04-08 +200 val_200 2008-04-08 +237 val_237 2008-04-08 +248 val_248 2008-04-08 +277 val_277 2008-04-08 +230 val_230 2008-04-08 +207 val_207 2008-04-08 +249 val_249 2008-04-08 +265 val_265 2008-04-08 +214 val_214 2008-04-08 +233 val_233 2008-04-08 +256 val_256 2008-04-08 +298 val_298 2008-04-08 +285 val_285 2008-04-08 +273 val_273 2008-04-08 +281 val_281 2008-04-08 +222 val_222 2008-04-08 +200 val_200 2008-04-08 Index: src/contrib/hive/ql/src/test/results/clientpositive/notable_alias2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/notable_alias2.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/notable_alias2.q.out (revision 0) @@ -0,0 +1,152 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR '1234') (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (< (TOK_COLREF key) 100)) (TOK_GROUPBY (TOK_COLREF src key)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Select Operator + expressions: + expr: key + type: string + Filter Operator + predicate: + expr: (0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: 1 + type: int + Reduce Operator Tree: + Group By Operator + + expr: count(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/140889204/772631826.10001 + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: -1 + value expressions: + expr: 1 + type: bigint + Reduce Operator Tree: + Group By Operator + + expr: count(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: unknown + Select Operator + expressions: + expr: '1234' + type: string + expr: 0 + type: string + expr: 1 + type: bigint + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +1234 0 3 +1234 10 1 +1234 11 1 +1234 12 2 +1234 15 2 +1234 17 1 +1234 18 2 +1234 19 1 +1234 2 1 +1234 20 1 +1234 24 2 +1234 26 2 +1234 27 1 +1234 28 1 +1234 30 1 +1234 33 1 +1234 34 1 +1234 35 3 +1234 37 2 +1234 4 1 +1234 41 1 +1234 42 2 +1234 43 1 +1234 44 1 +1234 47 1 +1234 5 3 +1234 51 2 +1234 53 1 +1234 54 1 +1234 57 1 +1234 58 2 +1234 64 1 +1234 65 1 +1234 66 1 +1234 67 2 +1234 69 1 +1234 70 3 +1234 72 2 +1234 74 1 +1234 76 2 +1234 77 1 +1234 78 1 +1234 8 1 +1234 80 1 +1234 82 1 +1234 83 2 +1234 84 2 +1234 85 1 +1234 86 1 +1234 87 1 +1234 9 1 +1234 90 3 +1234 92 1 +1234 95 2 +1234 96 1 +1234 97 2 +1234 98 2 Index: src/contrib/hive/ql/src/test/results/clientpositive/join2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join2.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/join2.q.out (working copy) @@ -12,40 +12,49 @@ Map Reduce Alias -> Map Operator Tree: src2 - Reduce Output Operator - key expressions: + Select Operator + expressions: expr: key type: string - # partition fields: 1 - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 1 + value expressions: + expr: 0 + type: string src1 - Reduce Output Operator - key expressions: + Select Operator + expressions: expr: key type: string - # partition fields: 1 - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 0 + value expressions: + expr: 0 + type: string Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE.0} {VALUE.1} - 1 {VALUE.0} {VALUE.1} + 0 {VALUE.0} + 1 {VALUE.0} File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce @@ -55,7 +64,9 @@ key expressions: expr: UDFToDouble(key) type: double - # partition fields: 1 + Map-reduce partition columns: + expr: UDFToDouble(key) + type: double tag: 1 value expressions: expr: key @@ -65,31 +76,29 @@ $INTNAME Reduce Output Operator key expressions: - expr: (UDFToDouble(0) + UDFToDouble(2)) + expr: (UDFToDouble(0) + UDFToDouble(1)) type: double - # partition fields: 1 + Map-reduce partition columns: + expr: (UDFToDouble(0) + UDFToDouble(1)) + type: double tag: 0 value expressions: - expr: 2 + expr: 1 type: string - expr: 3 - type: string expr: 0 type: string - expr: 1 - type: string Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE.0} {VALUE.1} {VALUE.2} {VALUE.3} + 0 {VALUE.0} {VALUE.1} 1 {VALUE.0} {VALUE.1} Select Operator expressions: - expr: 2 + expr: 1 type: string - expr: 5 + expr: 3 type: string File Output Operator table: @@ -136,6 +145,8 @@ 0 val_0 0 val_0 0 val_0 +2 val_4 +4 val_8 5 val_10 5 val_10 5 val_10 @@ -145,6 +156,65 @@ 5 val_10 5 val_10 5 val_10 +9 val_18 +9 val_18 +10 val_20 +12 val_24 +12 val_24 +12 val_24 +12 val_24 +12 val_24 +12 val_24 +12 val_24 +12 val_24 +15 val_30 +15 val_30 +15 val_30 +15 val_30 +17 val_34 +27 val_54 +33 val_66 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +35 val_70 +37 val_74 +37 val_74 +37 val_74 +37 val_74 +41 val_82 +42 val_84 +42 val_84 +42 val_84 +42 val_84 +42 val_84 +42 val_84 +42 val_84 +42 val_84 +43 val_86 57 val_114 58 val_116 58 val_116 @@ -190,8 +260,6 @@ 86 val_172 87 val_174 87 val_174 -9 val_18 -9 val_18 90 val_180 90 val_180 90 val_180 @@ -214,7 +282,6 @@ 98 val_196 98 val_196 98 val_196 -10 val_20 100 val_200 100 val_200 100 val_200 @@ -259,14 +326,6 @@ 119 val_238 119 val_238 119 val_238 -12 val_24 -12 val_24 -12 val_24 -12 val_24 -12 val_24 -12 val_24 -12 val_24 -12 val_24 126 val_252 128 val_256 128 val_256 @@ -315,10 +374,6 @@ 149 val_298 149 val_298 149 val_298 -15 val_30 -15 val_30 -15 val_30 -15 val_30 153 val_306 155 val_310 158 val_316 @@ -342,7 +397,6 @@ 169 val_338 169 val_338 169 val_338 -17 val_34 172 val_344 172 val_344 172 val_344 @@ -410,7 +464,6 @@ 197 val_394 197 val_394 197 val_394 -2 val_4 200 val_400 200 val_400 200 val_400 @@ -537,47 +590,3 @@ 249 val_498 249 val_498 249 val_498 -27 val_54 -33 val_66 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -35 val_70 -37 val_74 -37 val_74 -37 val_74 -37 val_74 -4 val_8 -41 val_82 -42 val_84 -42 val_84 -42 val_84 -42 val_84 -42 val_84 -42 val_84 -42 val_84 -42 val_84 -43 val_86 Index: src/contrib/hive/ql/src/test/results/clientpositive/input3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input3.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input3.q.out (working copy) @@ -61,4 +61,4 @@ r1 int r2 float Detailed Table Information: -Table(tableName:test3c,dbName:default,owner:njain,createTime:1224285029,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:r1,type:int,comment:null), FieldSchema(name:r2,type:float,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/test3b,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=njain,last_modified_time=1224285029,SORTBUCKETCOLSPREFIX=TRUE}) +Table(tableName:test3c,dbName:default,owner:njain,createTime:1225993819,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:r1,type:int,comment:null), FieldSchema(name:r2,type:float,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/test3b,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=njain,last_modified_time=1225993820}) Index: src/contrib/hive/ql/src/test/results/clientpositive/join4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join4.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/join4.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: ((key > 15) and (key < 25)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -24,7 +24,9 @@ key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: 1 value expressions: expr: 0 @@ -35,7 +37,7 @@ Filter Operator predicate: expr: ((key > 10) and (key < 20)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -46,7 +48,9 @@ key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: 0 value expressions: expr: 0 Index: src/contrib/hive/ql/src/test/results/clientpositive/input5.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input5.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input5.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_COLLIST (TOK_COLREF src_thrift lint) (TOK_COLREF src_thrift lintstring)) (TOK_ALIASLIST tkey tvalue) '/bin/cat'))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))))) + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src_thrift lint) (TOK_COLREF src_thrift lintstring)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -16,22 +16,30 @@ type: array expr: lintstring type: array - Transform Operator - command: /bin/cat - output info: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - Reduce Output Operator - key expressions: - expr: tkey - type: string - # partition fields: 1 - tag: -1 - value expressions: - expr: tkey - type: string - expr: tvalue - type: string + Select Operator + expressions: + expr: 0 + type: array + expr: 1 + type: array + Transform Operator + command: /bin/cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + Reduce Output Operator + key expressions: + expr: tkey + type: string + Map-reduce partition columns: + expr: tkey + type: string + tag: -1 + value expressions: + expr: tkey + type: string + expr: tvalue + type: string Reduce Operator Tree: Extract Select Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/join6.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join6.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/join6.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: ((key > 15) and (key < 25)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -24,7 +24,9 @@ key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: 1 value expressions: expr: 0 @@ -35,7 +37,7 @@ Filter Operator predicate: expr: ((key > 10) and (key < 20)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -46,7 +48,9 @@ key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: 0 value expressions: expr: 0 Index: src/contrib/hive/ql/src/test/results/clientpositive/input7.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input7.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input7.q.out (working copy) @@ -12,16 +12,20 @@ src1 Select Operator expressions: - expr: null - type: string expr: key type: string - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - name: dest1 + Select Operator + expressions: + expr: null + type: string + expr: 0 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/input_dynamicserde.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input_dynamicserde.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input_dynamicserde.q.out (working copy) @@ -22,12 +22,24 @@ type: int expr: astring type: string - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - name: dest1 + Select Operator + expressions: + expr: 0 + type: array + expr: 1 + type: array + expr: 2 + type: map + expr: 3 + type: int + expr: 4 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + name: dest1 Stage: Stage-0 Move Operator @@ -50,3 +62,13 @@ [7,14,21] ["70","700","7000"] {"key_7":"value_7"} 1539139264 record_7 [8,16,24] ["80","800","8000"] {"key_8":"value_8"} -1103622763 record_8 [9,18,27] ["90","900","9000"] {"key_9":"value_9"} -1883609167 record_9 +0 0 NULL -1220068486 record_0 +1 10 NULL -1147582750 record_1 +2 20 NULL -2091002570 record_2 +3 30 NULL -1587372273 record_3 +4 40 NULL -240543265 record_4 +5 50 NULL 1914724537 record_5 +6 60 NULL -1281615210 record_6 +7 70 NULL 1539139264 record_7 +8 80 NULL -1103622763 record_8 +9 90 NULL -1883609167 record_9 Index: src/contrib/hive/ql/src/test/results/clientpositive/input_testxpath3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input_testxpath3.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input_testxpath3.q.out (working copy) @@ -12,14 +12,20 @@ src_thrift Select Operator expressions: - expr: mstringstring['key_9'] - type: string - expr: lintstring.myint - type: array - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + expr: mstringstring + type: map + expr: lintstring + type: array + Select Operator + expressions: + expr: 0['key_9'] + type: string + expr: 1.myint + type: array + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/join8.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join8.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/join8.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: ((key > 15) and (key < 25)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -24,7 +24,9 @@ key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: 1 value expressions: expr: 0 @@ -35,7 +37,7 @@ Filter Operator predicate: expr: ((key > 10) and (key < 20)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -46,7 +48,9 @@ key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: 0 value expressions: expr: 0 @@ -73,7 +77,7 @@ Filter Operator predicate: expr: (2 is null and 0 is not null) - type: Boolean + type: boolean Select Operator expressions: expr: 0 Index: src/contrib/hive/ql/src/test/results/clientpositive/input9.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input9.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input9.q.out (working copy) @@ -10,22 +10,26 @@ Map Reduce Alias -> Map Operator Tree: src1 - Filter Operator - predicate: - expr: (null = null) - type: Boolean - Select Operator - expressions: - expr: null - type: string - expr: key - type: string - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - name: dest1 + Select Operator + expressions: + expr: key + type: string + Filter Operator + predicate: + expr: (null = null) + type: boolean + Select Operator + expressions: + expr: null + type: string + expr: 0 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/udf1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/udf1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/udf1.q.out (working copy) @@ -10,52 +10,56 @@ Map Reduce Alias -> Map Operator Tree: src - Filter Operator - predicate: - expr: (key = 86) - type: Boolean - Select Operator - expressions: - expr: ('a' like '%a%') - type: Boolean - expr: ('b' like '%a%') - type: Boolean - expr: ('ab' like '%a%') - type: Boolean - expr: ('ab' like '%a_') - type: Boolean - expr: ('%_' like '\%\_') - type: Boolean - expr: ('ab' like '\%\_') - type: Boolean - expr: ('ab' like '_a%') - type: Boolean - expr: ('ab' like 'a') - type: Boolean - expr: ('' regexp '.*') - type: Boolean - expr: ('a' regexp '[ab]') - type: Boolean - expr: ('' regexp '[ab]') - type: Boolean - expr: ('hadoop' regexp '[a-z]*') - type: Boolean - expr: ('hadoop' regexp 'o*') - type: Boolean - expr: regexp_replace('abc', 'b', 'c') - type: string - expr: regexp_replace('abc', 'z', 'a') - type: string - expr: regexp_replace('abbbb', 'bb', 'b') - type: string - expr: regexp_replace('hadoop', '(.)[a-z]*', '$1ive') - type: string - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - name: dest1 + Select Operator + expressions: + expr: key + type: string + Filter Operator + predicate: + expr: (0 = 86) + type: boolean + Select Operator + expressions: + expr: ('a' like '%a%') + type: boolean + expr: ('b' like '%a%') + type: boolean + expr: ('ab' like '%a%') + type: boolean + expr: ('ab' like '%a_') + type: boolean + expr: ('%_' like '\%\_') + type: boolean + expr: ('ab' like '\%\_') + type: boolean + expr: ('ab' like '_a%') + type: boolean + expr: ('ab' like 'a') + type: boolean + expr: ('' regexp '.*') + type: boolean + expr: ('a' regexp '[ab]') + type: boolean + expr: ('' regexp '[ab]') + type: boolean + expr: ('hadoop' regexp '[a-z]*') + type: boolean + expr: ('hadoop' regexp 'o*') + type: boolean + expr: regexp_replace('abc', 'b', 'c') + type: string + expr: regexp_replace('abc', 'z', 'a') + type: string + expr: regexp_replace('abbbb', 'bb', 'b') + type: string + expr: regexp_replace('hadoop', '(.)[a-z]*', '$1ive') + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/join10.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join10.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/join10.q.out (revision 0) @@ -0,0 +1,78 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))))) x) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))))) Y) (= (TOK_COLREF x key) (TOK_COLREF Y key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF Y))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + x:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 0 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + y:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 1 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + Index: src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (key < 100) - type: Boolean + type: boolean Select Operator expressions: expr: key Index: src/contrib/hive/ql/src/test/results/clientpositive/udf3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/udf3.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/udf3.q.out (working copy) @@ -11,44 +11,47 @@ Map Reduce Alias -> Map Operator Tree: src - Reduce Output Operator - # partition fields: -1 - tag: -1 - value expressions: - expr: UDFToInteger('') - type: int + Select Operator + Reduce Output Operator + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: UDFToInteger('') + type: int Reduce Operator Tree: Group By Operator - expr: max(VALUE.0) - expr: avg(VALUE.0) + expr: max(UDFToDouble(VALUE.0)) + expr: avg(UDFToDouble(VALUE.0)) expr: count(VALUE.0) - expr: sum(VALUE.0) - expr: min(VALUE.0) + expr: sum(UDFToDouble(VALUE.0)) + expr: min(UDFToDouble(VALUE.0)) mode: partial1 File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-njain/159338588/98912587.10001 + /tmp/hive-njain/20731779/1245261687.10001 Reduce Output Operator - # partition fields: 0 tag: -1 value expressions: expr: 0 - type: string + type: double expr: 1 type: string expr: 2 - type: string + type: bigint expr: 3 - type: string + type: double expr: 4 - type: string + type: double Reduce Operator Tree: Group By Operator @@ -57,19 +60,19 @@ expr: count(VALUE.2) expr: sum(VALUE.3) expr: min(VALUE.4) - mode: partial2 + mode: unknown Select Operator expressions: expr: 2 - type: string + type: bigint expr: 3 - type: string + type: double expr: 1 type: string expr: 4 - type: string + type: double expr: 0 - type: string + type: double File Output Operator table: input format: org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/noalias_subq1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/noalias_subq1.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/noalias_subq1.q.out (revision 0) @@ -0,0 +1,120 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF value) c1) (TOK_SELEXPR (TOK_COLREF key) c2)))) x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF c1))) (TOK_WHERE (< (TOK_COLREF c2) 100)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + x:src + Select Operator + expressions: + expr: value + type: string + expr: key + type: string + Filter Operator + predicate: + expr: (1 < 100) + type: boolean + Select Operator + expressions: + expr: 0 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +val_86 +val_27 +val_98 +val_66 +val_37 +val_15 +val_82 +val_17 +val_0 +val_57 +val_20 +val_92 +val_47 +val_72 +val_4 +val_35 +val_54 +val_51 +val_65 +val_83 +val_12 +val_67 +val_84 +val_58 +val_8 +val_24 +val_42 +val_0 +val_96 +val_26 +val_51 +val_43 +val_95 +val_98 +val_85 +val_77 +val_0 +val_87 +val_15 +val_72 +val_90 +val_19 +val_10 +val_5 +val_58 +val_35 +val_95 +val_11 +val_34 +val_42 +val_78 +val_76 +val_41 +val_30 +val_64 +val_76 +val_74 +val_69 +val_33 +val_70 +val_5 +val_2 +val_35 +val_80 +val_44 +val_53 +val_90 +val_12 +val_5 +val_70 +val_24 +val_70 +val_83 +val_26 +val_67 +val_18 +val_9 +val_18 +val_97 +val_84 +val_28 +val_37 +val_90 +val_97 Index: src/contrib/hive/ql/src/test/results/clientpositive/join12.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join12.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/join12.q.out (revision 0) @@ -0,0 +1,336 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key) c1) (TOK_SELEXPR (TOK_COLREF src value) c2)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key) c3) (TOK_SELEXPR (TOK_COLREF src value) c4)))) src2) (AND (= (TOK_COLREF src1 c1) (TOK_COLREF src2 c3)) (< (TOK_COLREF src1 c1) 100))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key) c5) (TOK_SELEXPR (TOK_COLREF src value) c6)))) src3) (AND (= (TOK_COLREF src1 c1) (TOK_COLREF src3 c5)) (< (TOK_COLREF src3 c5) 80)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 c1)) (TOK_SELEXPR (TOK_COLREF src2 c4))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src2:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 1 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + src1:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Filter Operator + predicate: + expr: (0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 0 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + src3:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Filter Operator + predicate: + expr: (0 < 80) + type: boolean + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 2 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + 2 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 0 + type: string + expr: 3 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +10 val_10 +11 val_11 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +17 val_17 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +19 val_19 +2 val_2 +20 val_20 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +27 val_27 +28 val_28 +30 val_30 +33 val_33 +34 val_34 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +4 val_4 +41 val_41 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +43 val_43 +44 val_44 +47 val_47 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +51 val_51 +51 val_51 +51 val_51 +51 val_51 +51 val_51 +51 val_51 +51 val_51 +51 val_51 +53 val_53 +54 val_54 +57 val_57 +58 val_58 +58 val_58 +58 val_58 +58 val_58 +58 val_58 +58 val_58 +58 val_58 +58 val_58 +64 val_64 +65 val_65 +66 val_66 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +69 val_69 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +72 val_72 +72 val_72 +72 val_72 +72 val_72 +72 val_72 +72 val_72 +72 val_72 +72 val_72 +74 val_74 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +77 val_77 +78 val_78 +8 val_8 +9 val_9 Index: src/contrib/hive/ql/src/test/results/clientpositive/input_testxpath.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input_testxpath.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input_testxpath.q.out (working copy) @@ -12,18 +12,26 @@ src_thrift Select Operator expressions: - expr: lint[1] - type: int - expr: lintstring[0].mystring - type: string - expr: mstringstring['key_2'] - type: string - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - name: dest1 + expr: lint + type: array + expr: lintstring + type: array + expr: mstringstring + type: map + Select Operator + expressions: + expr: 0[1] + type: int + expr: 1[0].mystring + type: string + expr: 2['key_2'] + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/input13.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input13.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input13.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (key < 100) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -29,7 +29,7 @@ Filter Operator predicate: expr: ((key >= 100) and (key < 200)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -45,7 +45,7 @@ Filter Operator predicate: expr: ((key >= 200) and (key < 300)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -59,7 +59,7 @@ Filter Operator predicate: expr: (key >= 300) - type: Boolean + type: boolean Select Operator expressions: expr: value @@ -287,109 +287,109 @@ 194 val_194 126 val_126 169 val_169 -238 NULL 2008-04-08 12 -255 NULL 2008-04-08 12 -278 NULL 2008-04-08 12 -265 NULL 2008-04-08 12 -273 NULL 2008-04-08 12 -224 NULL 2008-04-08 12 -213 NULL 2008-04-08 12 -281 NULL 2008-04-08 12 -277 NULL 2008-04-08 12 -209 NULL 2008-04-08 12 -252 NULL 2008-04-08 12 -292 NULL 2008-04-08 12 -219 NULL 2008-04-08 12 -287 NULL 2008-04-08 12 -237 NULL 2008-04-08 12 -207 NULL 2008-04-08 12 -208 NULL 2008-04-08 12 -247 NULL 2008-04-08 12 -266 NULL 2008-04-08 12 -203 NULL 2008-04-08 12 -205 NULL 2008-04-08 12 -221 NULL 2008-04-08 12 -280 NULL 2008-04-08 12 -277 NULL 2008-04-08 12 -208 NULL 2008-04-08 12 -286 NULL 2008-04-08 12 -239 NULL 2008-04-08 12 -213 NULL 2008-04-08 12 -216 NULL 2008-04-08 12 -278 NULL 2008-04-08 12 -289 NULL 2008-04-08 12 -221 NULL 2008-04-08 12 -275 NULL 2008-04-08 12 -241 NULL 2008-04-08 12 -284 NULL 2008-04-08 12 -230 NULL 2008-04-08 12 -260 NULL 2008-04-08 12 -272 NULL 2008-04-08 12 -217 NULL 2008-04-08 12 -230 NULL 2008-04-08 12 -208 NULL 2008-04-08 12 -298 NULL 2008-04-08 12 -230 NULL 2008-04-08 12 -205 NULL 2008-04-08 12 -288 NULL 2008-04-08 12 -282 NULL 2008-04-08 12 -282 NULL 2008-04-08 12 -238 NULL 2008-04-08 12 -277 NULL 2008-04-08 12 -273 NULL 2008-04-08 12 -224 NULL 2008-04-08 12 -242 NULL 2008-04-08 12 -272 NULL 2008-04-08 12 -242 NULL 2008-04-08 12 -226 NULL 2008-04-08 12 -229 NULL 2008-04-08 12 -233 NULL 2008-04-08 12 -223 NULL 2008-04-08 12 -218 NULL 2008-04-08 12 -228 NULL 2008-04-08 12 -209 NULL 2008-04-08 12 -230 NULL 2008-04-08 12 -296 NULL 2008-04-08 12 -216 NULL 2008-04-08 12 -274 NULL 2008-04-08 12 -219 NULL 2008-04-08 12 -239 NULL 2008-04-08 12 -223 NULL 2008-04-08 12 -256 NULL 2008-04-08 12 -263 NULL 2008-04-08 12 -288 NULL 2008-04-08 12 -244 NULL 2008-04-08 12 -202 NULL 2008-04-08 12 -229 NULL 2008-04-08 12 -280 NULL 2008-04-08 12 -283 NULL 2008-04-08 12 -235 NULL 2008-04-08 12 -257 NULL 2008-04-08 12 -258 NULL 2008-04-08 12 -203 NULL 2008-04-08 12 -262 NULL 2008-04-08 12 -201 NULL 2008-04-08 12 -217 NULL 2008-04-08 12 -298 NULL 2008-04-08 12 -291 NULL 2008-04-08 12 -255 NULL 2008-04-08 12 -200 NULL 2008-04-08 12 -237 NULL 2008-04-08 12 -248 NULL 2008-04-08 12 -277 NULL 2008-04-08 12 -230 NULL 2008-04-08 12 -207 NULL 2008-04-08 12 -249 NULL 2008-04-08 12 -265 NULL 2008-04-08 12 -214 NULL 2008-04-08 12 -233 NULL 2008-04-08 12 -256 NULL 2008-04-08 12 -298 NULL 2008-04-08 12 -285 NULL 2008-04-08 12 -273 NULL 2008-04-08 12 -281 NULL 2008-04-08 12 -222 NULL 2008-04-08 12 -200 NULL 2008-04-08 12 +238 2008-04-08 12 +255 2008-04-08 12 +278 2008-04-08 12 +265 2008-04-08 12 +273 2008-04-08 12 +224 2008-04-08 12 +213 2008-04-08 12 +281 2008-04-08 12 +277 2008-04-08 12 +209 2008-04-08 12 +252 2008-04-08 12 +292 2008-04-08 12 +219 2008-04-08 12 +287 2008-04-08 12 +237 2008-04-08 12 +207 2008-04-08 12 +208 2008-04-08 12 +247 2008-04-08 12 +266 2008-04-08 12 +203 2008-04-08 12 +205 2008-04-08 12 +221 2008-04-08 12 +280 2008-04-08 12 +277 2008-04-08 12 +208 2008-04-08 12 +286 2008-04-08 12 +239 2008-04-08 12 +213 2008-04-08 12 +216 2008-04-08 12 +278 2008-04-08 12 +289 2008-04-08 12 +221 2008-04-08 12 +275 2008-04-08 12 +241 2008-04-08 12 +284 2008-04-08 12 +230 2008-04-08 12 +260 2008-04-08 12 +272 2008-04-08 12 +217 2008-04-08 12 +230 2008-04-08 12 +208 2008-04-08 12 +298 2008-04-08 12 +230 2008-04-08 12 +205 2008-04-08 12 +288 2008-04-08 12 +282 2008-04-08 12 +282 2008-04-08 12 +238 2008-04-08 12 +277 2008-04-08 12 +273 2008-04-08 12 +224 2008-04-08 12 +242 2008-04-08 12 +272 2008-04-08 12 +242 2008-04-08 12 +226 2008-04-08 12 +229 2008-04-08 12 +233 2008-04-08 12 +223 2008-04-08 12 +218 2008-04-08 12 +228 2008-04-08 12 +209 2008-04-08 12 +230 2008-04-08 12 +296 2008-04-08 12 +216 2008-04-08 12 +274 2008-04-08 12 +219 2008-04-08 12 +239 2008-04-08 12 +223 2008-04-08 12 +256 2008-04-08 12 +263 2008-04-08 12 +288 2008-04-08 12 +244 2008-04-08 12 +202 2008-04-08 12 +229 2008-04-08 12 +280 2008-04-08 12 +283 2008-04-08 12 +235 2008-04-08 12 +257 2008-04-08 12 +258 2008-04-08 12 +203 2008-04-08 12 +262 2008-04-08 12 +201 2008-04-08 12 +217 2008-04-08 12 +298 2008-04-08 12 +291 2008-04-08 12 +255 2008-04-08 12 +200 2008-04-08 12 +237 2008-04-08 12 +248 2008-04-08 12 +277 2008-04-08 12 +230 2008-04-08 12 +207 2008-04-08 12 +249 2008-04-08 12 +265 2008-04-08 12 +214 2008-04-08 12 +233 2008-04-08 12 +256 2008-04-08 12 +298 2008-04-08 12 +285 2008-04-08 12 +273 2008-04-08 12 +281 2008-04-08 12 +222 2008-04-08 12 +200 2008-04-08 12 val_311 val_409 val_484 Index: src/contrib/hive/ql/src/test/results/clientpositive/join14.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join14.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/join14.q.out (revision 0) @@ -0,0 +1,1842 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src) (TOK_TABREF srcpart) (and (AND (= (TOK_COLREF src key) (TOK_COLREF srcpart key)) (= (TOK_COLREF srcpart ds) '2008-04-08')) (> (TOK_COLREF src key) 100)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_COLREF srcpart value))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + srcpart + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + Filter Operator + predicate: + expr: (2 = '2008-04-08') + type: boolean + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 1 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + src + Select Operator + expressions: + expr: key + type: string + Filter Operator + predicate: + expr: (0 > 100) + type: boolean + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 0 + value expressions: + expr: 0 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} + 1 {VALUE.0} {VALUE.1} {VALUE.2} + Select Operator + expressions: + expr: 0 + type: string + expr: 2 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +105 val_105 +105 val_105 +111 val_111 +111 val_111 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +114 val_114 +114 val_114 +116 val_116 +116 val_116 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +126 val_126 +126 val_126 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +131 val_131 +131 val_131 +133 val_133 +133 val_133 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +136 val_136 +136 val_136 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +143 val_143 +143 val_143 +145 val_145 +145 val_145 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +150 val_150 +150 val_150 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +153 val_153 +153 val_153 +155 val_155 +155 val_155 +156 val_156 +156 val_156 +157 val_157 +157 val_157 +158 val_158 +158 val_158 +160 val_160 +160 val_160 +162 val_162 +162 val_162 +163 val_163 +163 val_163 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +166 val_166 +166 val_166 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +168 val_168 +168 val_168 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +170 val_170 +170 val_170 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +177 val_177 +177 val_177 +178 val_178 +178 val_178 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +180 val_180 +180 val_180 +181 val_181 +181 val_181 +183 val_183 +183 val_183 +186 val_186 +186 val_186 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +189 val_189 +189 val_189 +190 val_190 +190 val_190 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +192 val_192 +192 val_192 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +194 val_194 +194 val_194 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +196 val_196 +196 val_196 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +201 val_201 +201 val_201 +202 val_202 +202 val_202 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +214 val_214 +214 val_214 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +218 val_218 +218 val_218 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +222 val_222 +222 val_222 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +226 val_226 +226 val_226 +228 val_228 +228 val_228 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +235 val_235 +235 val_235 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +241 val_241 +241 val_241 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +244 val_244 +244 val_244 +247 val_247 +247 val_247 +248 val_248 +248 val_248 +249 val_249 +249 val_249 +252 val_252 +252 val_252 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +257 val_257 +257 val_257 +258 val_258 +258 val_258 +260 val_260 +260 val_260 +262 val_262 +262 val_262 +263 val_263 +263 val_263 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +266 val_266 +266 val_266 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +274 val_274 +274 val_274 +275 val_275 +275 val_275 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +283 val_283 +283 val_283 +284 val_284 +284 val_284 +285 val_285 +285 val_285 +286 val_286 +286 val_286 +287 val_287 +287 val_287 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +289 val_289 +289 val_289 +291 val_291 +291 val_291 +292 val_292 +292 val_292 +296 val_296 +296 val_296 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +302 val_302 +302 val_302 +305 val_305 +305 val_305 +306 val_306 +306 val_306 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +308 val_308 +308 val_308 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +310 val_310 +310 val_310 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +315 val_315 +315 val_315 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +323 val_323 +323 val_323 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +332 val_332 +332 val_332 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +335 val_335 +335 val_335 +336 val_336 +336 val_336 +338 val_338 +338 val_338 +339 val_339 +339 val_339 +341 val_341 +341 val_341 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +345 val_345 +345 val_345 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +351 val_351 +351 val_351 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +356 val_356 +356 val_356 +360 val_360 +360 val_360 +362 val_362 +362 val_362 +364 val_364 +364 val_364 +365 val_365 +365 val_365 +366 val_366 +366 val_366 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +368 val_368 +368 val_368 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +373 val_373 +373 val_373 +374 val_374 +374 val_374 +375 val_375 +375 val_375 +377 val_377 +377 val_377 +378 val_378 +378 val_378 +379 val_379 +379 val_379 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +386 val_386 +386 val_386 +389 val_389 +389 val_389 +392 val_392 +392 val_392 +393 val_393 +393 val_393 +394 val_394 +394 val_394 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +400 val_400 +400 val_400 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +402 val_402 +402 val_402 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +404 val_404 +404 val_404 +404 val_404 +404 val_404 +404 val_404 +404 val_404 +404 val_404 +404 val_404 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +407 val_407 +407 val_407 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +411 val_411 +411 val_411 +413 val_413 +413 val_413 +413 val_413 +413 val_413 +413 val_413 +413 val_413 +413 val_413 +413 val_413 +414 val_414 +414 val_414 +414 val_414 +414 val_414 +414 val_414 +414 val_414 +414 val_414 +414 val_414 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +418 val_418 +418 val_418 +419 val_419 +419 val_419 +421 val_421 +421 val_421 +424 val_424 +424 val_424 +424 val_424 +424 val_424 +424 val_424 +424 val_424 +424 val_424 +424 val_424 +427 val_427 +427 val_427 +429 val_429 +429 val_429 +429 val_429 +429 val_429 +429 val_429 +429 val_429 +429 val_429 +429 val_429 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +432 val_432 +432 val_432 +435 val_435 +435 val_435 +436 val_436 +436 val_436 +437 val_437 +437 val_437 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +439 val_439 +439 val_439 +439 val_439 +439 val_439 +439 val_439 +439 val_439 +439 val_439 +439 val_439 +443 val_443 +443 val_443 +444 val_444 +444 val_444 +446 val_446 +446 val_446 +448 val_448 +448 val_448 +449 val_449 +449 val_449 +452 val_452 +452 val_452 +453 val_453 +453 val_453 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +455 val_455 +455 val_455 +457 val_457 +457 val_457 +458 val_458 +458 val_458 +458 val_458 +458 val_458 +458 val_458 +458 val_458 +458 val_458 +458 val_458 +459 val_459 +459 val_459 +459 val_459 +459 val_459 +459 val_459 +459 val_459 +459 val_459 +459 val_459 +460 val_460 +460 val_460 +462 val_462 +462 val_462 +462 val_462 +462 val_462 +462 val_462 +462 val_462 +462 val_462 +462 val_462 +463 val_463 +463 val_463 +463 val_463 +463 val_463 +463 val_463 +463 val_463 +463 val_463 +463 val_463 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +467 val_467 +467 val_467 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +470 val_470 +470 val_470 +472 val_472 +472 val_472 +475 val_475 +475 val_475 +477 val_477 +477 val_477 +478 val_478 +478 val_478 +478 val_478 +478 val_478 +478 val_478 +478 val_478 +478 val_478 +478 val_478 +479 val_479 +479 val_479 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +481 val_481 +481 val_481 +482 val_482 +482 val_482 +483 val_483 +483 val_483 +484 val_484 +484 val_484 +485 val_485 +485 val_485 +487 val_487 +487 val_487 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +490 val_490 +490 val_490 +491 val_491 +491 val_491 +492 val_492 +492 val_492 +492 val_492 +492 val_492 +492 val_492 +492 val_492 +492 val_492 +492 val_492 +493 val_493 +493 val_493 +494 val_494 +494 val_494 +495 val_495 +495 val_495 +496 val_496 +496 val_496 +497 val_497 +497 val_497 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 Index: src/contrib/hive/ql/src/test/results/clientpositive/input15.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input15.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input15.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_CREATETABLE TEST15 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEROWFORMAT (TOK_TABLEROWFORMATFIELD '\t'))) + (TOK_CREATETABLE TEST15 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEROWFORMAT (TOK_TABLEROWFORMATFIELD '\t')) TOK_TBLTEXTFILE) STAGE DEPENDENCIES: Stage-0 is a root stage Index: src/contrib/hive/ql/src/test/results/clientpositive/input_part0.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input_part0.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/input_part0.q.out (revision 0) @@ -0,0 +1,1012 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (= (TOK_COLREF x ds) '2008-04-08')))) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + + +238 val_238 2008-04-08 11 +86 val_86 2008-04-08 11 +311 val_311 2008-04-08 11 +27 val_27 2008-04-08 11 +165 val_165 2008-04-08 11 +409 val_409 2008-04-08 11 +255 val_255 2008-04-08 11 +278 val_278 2008-04-08 11 +98 val_98 2008-04-08 11 +484 val_484 2008-04-08 11 +265 val_265 2008-04-08 11 +193 val_193 2008-04-08 11 +401 val_401 2008-04-08 11 +150 val_150 2008-04-08 11 +273 val_273 2008-04-08 11 +224 val_224 2008-04-08 11 +369 val_369 2008-04-08 11 +66 val_66 2008-04-08 11 +128 val_128 2008-04-08 11 +213 val_213 2008-04-08 11 +146 val_146 2008-04-08 11 +406 val_406 2008-04-08 11 +429 val_429 2008-04-08 11 +374 val_374 2008-04-08 11 +152 val_152 2008-04-08 11 +469 val_469 2008-04-08 11 +145 val_145 2008-04-08 11 +495 val_495 2008-04-08 11 +37 val_37 2008-04-08 11 +327 val_327 2008-04-08 11 +281 val_281 2008-04-08 11 +277 val_277 2008-04-08 11 +209 val_209 2008-04-08 11 +15 val_15 2008-04-08 11 +82 val_82 2008-04-08 11 +403 val_403 2008-04-08 11 +166 val_166 2008-04-08 11 +417 val_417 2008-04-08 11 +430 val_430 2008-04-08 11 +252 val_252 2008-04-08 11 +292 val_292 2008-04-08 11 +219 val_219 2008-04-08 11 +287 val_287 2008-04-08 11 +153 val_153 2008-04-08 11 +193 val_193 2008-04-08 11 +338 val_338 2008-04-08 11 +446 val_446 2008-04-08 11 +459 val_459 2008-04-08 11 +394 val_394 2008-04-08 11 +237 val_237 2008-04-08 11 +482 val_482 2008-04-08 11 +174 val_174 2008-04-08 11 +413 val_413 2008-04-08 11 +494 val_494 2008-04-08 11 +207 val_207 2008-04-08 11 +199 val_199 2008-04-08 11 +466 val_466 2008-04-08 11 +208 val_208 2008-04-08 11 +174 val_174 2008-04-08 11 +399 val_399 2008-04-08 11 +396 val_396 2008-04-08 11 +247 val_247 2008-04-08 11 +417 val_417 2008-04-08 11 +489 val_489 2008-04-08 11 +162 val_162 2008-04-08 11 +377 val_377 2008-04-08 11 +397 val_397 2008-04-08 11 +309 val_309 2008-04-08 11 +365 val_365 2008-04-08 11 +266 val_266 2008-04-08 11 +439 val_439 2008-04-08 11 +342 val_342 2008-04-08 11 +367 val_367 2008-04-08 11 +325 val_325 2008-04-08 11 +167 val_167 2008-04-08 11 +195 val_195 2008-04-08 11 +475 val_475 2008-04-08 11 +17 val_17 2008-04-08 11 +113 val_113 2008-04-08 11 +155 val_155 2008-04-08 11 +203 val_203 2008-04-08 11 +339 val_339 2008-04-08 11 +0 val_0 2008-04-08 11 +455 val_455 2008-04-08 11 +128 val_128 2008-04-08 11 +311 val_311 2008-04-08 11 +316 val_316 2008-04-08 11 +57 val_57 2008-04-08 11 +302 val_302 2008-04-08 11 +205 val_205 2008-04-08 11 +149 val_149 2008-04-08 11 +438 val_438 2008-04-08 11 +345 val_345 2008-04-08 11 +129 val_129 2008-04-08 11 +170 val_170 2008-04-08 11 +20 val_20 2008-04-08 11 +489 val_489 2008-04-08 11 +157 val_157 2008-04-08 11 +378 val_378 2008-04-08 11 +221 val_221 2008-04-08 11 +92 val_92 2008-04-08 11 +111 val_111 2008-04-08 11 +47 val_47 2008-04-08 11 +72 val_72 2008-04-08 11 +4 val_4 2008-04-08 11 +280 val_280 2008-04-08 11 +35 val_35 2008-04-08 11 +427 val_427 2008-04-08 11 +277 val_277 2008-04-08 11 +208 val_208 2008-04-08 11 +356 val_356 2008-04-08 11 +399 val_399 2008-04-08 11 +169 val_169 2008-04-08 11 +382 val_382 2008-04-08 11 +498 val_498 2008-04-08 11 +125 val_125 2008-04-08 11 +386 val_386 2008-04-08 11 +437 val_437 2008-04-08 11 +469 val_469 2008-04-08 11 +192 val_192 2008-04-08 11 +286 val_286 2008-04-08 11 +187 val_187 2008-04-08 11 +176 val_176 2008-04-08 11 +54 val_54 2008-04-08 11 +459 val_459 2008-04-08 11 +51 val_51 2008-04-08 11 +138 val_138 2008-04-08 11 +103 val_103 2008-04-08 11 +239 val_239 2008-04-08 11 +213 val_213 2008-04-08 11 +216 val_216 2008-04-08 11 +430 val_430 2008-04-08 11 +278 val_278 2008-04-08 11 +176 val_176 2008-04-08 11 +289 val_289 2008-04-08 11 +221 val_221 2008-04-08 11 +65 val_65 2008-04-08 11 +318 val_318 2008-04-08 11 +332 val_332 2008-04-08 11 +311 val_311 2008-04-08 11 +275 val_275 2008-04-08 11 +137 val_137 2008-04-08 11 +241 val_241 2008-04-08 11 +83 val_83 2008-04-08 11 +333 val_333 2008-04-08 11 +180 val_180 2008-04-08 11 +284 val_284 2008-04-08 11 +12 val_12 2008-04-08 11 +230 val_230 2008-04-08 11 +181 val_181 2008-04-08 11 +67 val_67 2008-04-08 11 +260 val_260 2008-04-08 11 +404 val_404 2008-04-08 11 +384 val_384 2008-04-08 11 +489 val_489 2008-04-08 11 +353 val_353 2008-04-08 11 +373 val_373 2008-04-08 11 +272 val_272 2008-04-08 11 +138 val_138 2008-04-08 11 +217 val_217 2008-04-08 11 +84 val_84 2008-04-08 11 +348 val_348 2008-04-08 11 +466 val_466 2008-04-08 11 +58 val_58 2008-04-08 11 +8 val_8 2008-04-08 11 +411 val_411 2008-04-08 11 +230 val_230 2008-04-08 11 +208 val_208 2008-04-08 11 +348 val_348 2008-04-08 11 +24 val_24 2008-04-08 11 +463 val_463 2008-04-08 11 +431 val_431 2008-04-08 11 +179 val_179 2008-04-08 11 +172 val_172 2008-04-08 11 +42 val_42 2008-04-08 11 +129 val_129 2008-04-08 11 +158 val_158 2008-04-08 11 +119 val_119 2008-04-08 11 +496 val_496 2008-04-08 11 +0 val_0 2008-04-08 11 +322 val_322 2008-04-08 11 +197 val_197 2008-04-08 11 +468 val_468 2008-04-08 11 +393 val_393 2008-04-08 11 +454 val_454 2008-04-08 11 +100 val_100 2008-04-08 11 +298 val_298 2008-04-08 11 +199 val_199 2008-04-08 11 +191 val_191 2008-04-08 11 +418 val_418 2008-04-08 11 +96 val_96 2008-04-08 11 +26 val_26 2008-04-08 11 +165 val_165 2008-04-08 11 +327 val_327 2008-04-08 11 +230 val_230 2008-04-08 11 +205 val_205 2008-04-08 11 +120 val_120 2008-04-08 11 +131 val_131 2008-04-08 11 +51 val_51 2008-04-08 11 +404 val_404 2008-04-08 11 +43 val_43 2008-04-08 11 +436 val_436 2008-04-08 11 +156 val_156 2008-04-08 11 +469 val_469 2008-04-08 11 +468 val_468 2008-04-08 11 +308 val_308 2008-04-08 11 +95 val_95 2008-04-08 11 +196 val_196 2008-04-08 11 +288 val_288 2008-04-08 11 +481 val_481 2008-04-08 11 +457 val_457 2008-04-08 11 +98 val_98 2008-04-08 11 +282 val_282 2008-04-08 11 +197 val_197 2008-04-08 11 +187 val_187 2008-04-08 11 +318 val_318 2008-04-08 11 +318 val_318 2008-04-08 11 +409 val_409 2008-04-08 11 +470 val_470 2008-04-08 11 +137 val_137 2008-04-08 11 +369 val_369 2008-04-08 11 +316 val_316 2008-04-08 11 +169 val_169 2008-04-08 11 +413 val_413 2008-04-08 11 +85 val_85 2008-04-08 11 +77 val_77 2008-04-08 11 +0 val_0 2008-04-08 11 +490 val_490 2008-04-08 11 +87 val_87 2008-04-08 11 +364 val_364 2008-04-08 11 +179 val_179 2008-04-08 11 +118 val_118 2008-04-08 11 +134 val_134 2008-04-08 11 +395 val_395 2008-04-08 11 +282 val_282 2008-04-08 11 +138 val_138 2008-04-08 11 +238 val_238 2008-04-08 11 +419 val_419 2008-04-08 11 +15 val_15 2008-04-08 11 +118 val_118 2008-04-08 11 +72 val_72 2008-04-08 11 +90 val_90 2008-04-08 11 +307 val_307 2008-04-08 11 +19 val_19 2008-04-08 11 +435 val_435 2008-04-08 11 +10 val_10 2008-04-08 11 +277 val_277 2008-04-08 11 +273 val_273 2008-04-08 11 +306 val_306 2008-04-08 11 +224 val_224 2008-04-08 11 +309 val_309 2008-04-08 11 +389 val_389 2008-04-08 11 +327 val_327 2008-04-08 11 +242 val_242 2008-04-08 11 +369 val_369 2008-04-08 11 +392 val_392 2008-04-08 11 +272 val_272 2008-04-08 11 +331 val_331 2008-04-08 11 +401 val_401 2008-04-08 11 +242 val_242 2008-04-08 11 +452 val_452 2008-04-08 11 +177 val_177 2008-04-08 11 +226 val_226 2008-04-08 11 +5 val_5 2008-04-08 11 +497 val_497 2008-04-08 11 +402 val_402 2008-04-08 11 +396 val_396 2008-04-08 11 +317 val_317 2008-04-08 11 +395 val_395 2008-04-08 11 +58 val_58 2008-04-08 11 +35 val_35 2008-04-08 11 +336 val_336 2008-04-08 11 +95 val_95 2008-04-08 11 +11 val_11 2008-04-08 11 +168 val_168 2008-04-08 11 +34 val_34 2008-04-08 11 +229 val_229 2008-04-08 11 +233 val_233 2008-04-08 11 +143 val_143 2008-04-08 11 +472 val_472 2008-04-08 11 +322 val_322 2008-04-08 11 +498 val_498 2008-04-08 11 +160 val_160 2008-04-08 11 +195 val_195 2008-04-08 11 +42 val_42 2008-04-08 11 +321 val_321 2008-04-08 11 +430 val_430 2008-04-08 11 +119 val_119 2008-04-08 11 +489 val_489 2008-04-08 11 +458 val_458 2008-04-08 11 +78 val_78 2008-04-08 11 +76 val_76 2008-04-08 11 +41 val_41 2008-04-08 11 +223 val_223 2008-04-08 11 +492 val_492 2008-04-08 11 +149 val_149 2008-04-08 11 +449 val_449 2008-04-08 11 +218 val_218 2008-04-08 11 +228 val_228 2008-04-08 11 +138 val_138 2008-04-08 11 +453 val_453 2008-04-08 11 +30 val_30 2008-04-08 11 +209 val_209 2008-04-08 11 +64 val_64 2008-04-08 11 +468 val_468 2008-04-08 11 +76 val_76 2008-04-08 11 +74 val_74 2008-04-08 11 +342 val_342 2008-04-08 11 +69 val_69 2008-04-08 11 +230 val_230 2008-04-08 11 +33 val_33 2008-04-08 11 +368 val_368 2008-04-08 11 +103 val_103 2008-04-08 11 +296 val_296 2008-04-08 11 +113 val_113 2008-04-08 11 +216 val_216 2008-04-08 11 +367 val_367 2008-04-08 11 +344 val_344 2008-04-08 11 +167 val_167 2008-04-08 11 +274 val_274 2008-04-08 11 +219 val_219 2008-04-08 11 +239 val_239 2008-04-08 11 +485 val_485 2008-04-08 11 +116 val_116 2008-04-08 11 +223 val_223 2008-04-08 11 +256 val_256 2008-04-08 11 +263 val_263 2008-04-08 11 +70 val_70 2008-04-08 11 +487 val_487 2008-04-08 11 +480 val_480 2008-04-08 11 +401 val_401 2008-04-08 11 +288 val_288 2008-04-08 11 +191 val_191 2008-04-08 11 +5 val_5 2008-04-08 11 +244 val_244 2008-04-08 11 +438 val_438 2008-04-08 11 +128 val_128 2008-04-08 11 +467 val_467 2008-04-08 11 +432 val_432 2008-04-08 11 +202 val_202 2008-04-08 11 +316 val_316 2008-04-08 11 +229 val_229 2008-04-08 11 +469 val_469 2008-04-08 11 +463 val_463 2008-04-08 11 +280 val_280 2008-04-08 11 +2 val_2 2008-04-08 11 +35 val_35 2008-04-08 11 +283 val_283 2008-04-08 11 +331 val_331 2008-04-08 11 +235 val_235 2008-04-08 11 +80 val_80 2008-04-08 11 +44 val_44 2008-04-08 11 +193 val_193 2008-04-08 11 +321 val_321 2008-04-08 11 +335 val_335 2008-04-08 11 +104 val_104 2008-04-08 11 +466 val_466 2008-04-08 11 +366 val_366 2008-04-08 11 +175 val_175 2008-04-08 11 +403 val_403 2008-04-08 11 +483 val_483 2008-04-08 11 +53 val_53 2008-04-08 11 +105 val_105 2008-04-08 11 +257 val_257 2008-04-08 11 +406 val_406 2008-04-08 11 +409 val_409 2008-04-08 11 +190 val_190 2008-04-08 11 +406 val_406 2008-04-08 11 +401 val_401 2008-04-08 11 +114 val_114 2008-04-08 11 +258 val_258 2008-04-08 11 +90 val_90 2008-04-08 11 +203 val_203 2008-04-08 11 +262 val_262 2008-04-08 11 +348 val_348 2008-04-08 11 +424 val_424 2008-04-08 11 +12 val_12 2008-04-08 11 +396 val_396 2008-04-08 11 +201 val_201 2008-04-08 11 +217 val_217 2008-04-08 11 +164 val_164 2008-04-08 11 +431 val_431 2008-04-08 11 +454 val_454 2008-04-08 11 +478 val_478 2008-04-08 11 +298 val_298 2008-04-08 11 +125 val_125 2008-04-08 11 +431 val_431 2008-04-08 11 +164 val_164 2008-04-08 11 +424 val_424 2008-04-08 11 +187 val_187 2008-04-08 11 +382 val_382 2008-04-08 11 +5 val_5 2008-04-08 11 +70 val_70 2008-04-08 11 +397 val_397 2008-04-08 11 +480 val_480 2008-04-08 11 +291 val_291 2008-04-08 11 +24 val_24 2008-04-08 11 +351 val_351 2008-04-08 11 +255 val_255 2008-04-08 11 +104 val_104 2008-04-08 11 +70 val_70 2008-04-08 11 +163 val_163 2008-04-08 11 +438 val_438 2008-04-08 11 +119 val_119 2008-04-08 11 +414 val_414 2008-04-08 11 +200 val_200 2008-04-08 11 +491 val_491 2008-04-08 11 +237 val_237 2008-04-08 11 +439 val_439 2008-04-08 11 +360 val_360 2008-04-08 11 +248 val_248 2008-04-08 11 +479 val_479 2008-04-08 11 +305 val_305 2008-04-08 11 +417 val_417 2008-04-08 11 +199 val_199 2008-04-08 11 +444 val_444 2008-04-08 11 +120 val_120 2008-04-08 11 +429 val_429 2008-04-08 11 +169 val_169 2008-04-08 11 +443 val_443 2008-04-08 11 +323 val_323 2008-04-08 11 +325 val_325 2008-04-08 11 +277 val_277 2008-04-08 11 +230 val_230 2008-04-08 11 +478 val_478 2008-04-08 11 +178 val_178 2008-04-08 11 +468 val_468 2008-04-08 11 +310 val_310 2008-04-08 11 +317 val_317 2008-04-08 11 +333 val_333 2008-04-08 11 +493 val_493 2008-04-08 11 +460 val_460 2008-04-08 11 +207 val_207 2008-04-08 11 +249 val_249 2008-04-08 11 +265 val_265 2008-04-08 11 +480 val_480 2008-04-08 11 +83 val_83 2008-04-08 11 +136 val_136 2008-04-08 11 +353 val_353 2008-04-08 11 +172 val_172 2008-04-08 11 +214 val_214 2008-04-08 11 +462 val_462 2008-04-08 11 +233 val_233 2008-04-08 11 +406 val_406 2008-04-08 11 +133 val_133 2008-04-08 11 +175 val_175 2008-04-08 11 +189 val_189 2008-04-08 11 +454 val_454 2008-04-08 11 +375 val_375 2008-04-08 11 +401 val_401 2008-04-08 11 +421 val_421 2008-04-08 11 +407 val_407 2008-04-08 11 +384 val_384 2008-04-08 11 +256 val_256 2008-04-08 11 +26 val_26 2008-04-08 11 +134 val_134 2008-04-08 11 +67 val_67 2008-04-08 11 +384 val_384 2008-04-08 11 +379 val_379 2008-04-08 11 +18 val_18 2008-04-08 11 +462 val_462 2008-04-08 11 +492 val_492 2008-04-08 11 +100 val_100 2008-04-08 11 +298 val_298 2008-04-08 11 +9 val_9 2008-04-08 11 +341 val_341 2008-04-08 11 +498 val_498 2008-04-08 11 +146 val_146 2008-04-08 11 +458 val_458 2008-04-08 11 +362 val_362 2008-04-08 11 +186 val_186 2008-04-08 11 +285 val_285 2008-04-08 11 +348 val_348 2008-04-08 11 +167 val_167 2008-04-08 11 +18 val_18 2008-04-08 11 +273 val_273 2008-04-08 11 +183 val_183 2008-04-08 11 +281 val_281 2008-04-08 11 +344 val_344 2008-04-08 11 +97 val_97 2008-04-08 11 +469 val_469 2008-04-08 11 +315 val_315 2008-04-08 11 +84 val_84 2008-04-08 11 +28 val_28 2008-04-08 11 +37 val_37 2008-04-08 11 +448 val_448 2008-04-08 11 +152 val_152 2008-04-08 11 +348 val_348 2008-04-08 11 +307 val_307 2008-04-08 11 +194 val_194 2008-04-08 11 +414 val_414 2008-04-08 11 +477 val_477 2008-04-08 11 +222 val_222 2008-04-08 11 +126 val_126 2008-04-08 11 +90 val_90 2008-04-08 11 +169 val_169 2008-04-08 11 +403 val_403 2008-04-08 11 +400 val_400 2008-04-08 11 +200 val_200 2008-04-08 11 +97 val_97 2008-04-08 11 +238 val_238 2008-04-08 12 +86 val_86 2008-04-08 12 +311 val_311 2008-04-08 12 +27 val_27 2008-04-08 12 +165 val_165 2008-04-08 12 +409 val_409 2008-04-08 12 +255 val_255 2008-04-08 12 +278 val_278 2008-04-08 12 +98 val_98 2008-04-08 12 +484 val_484 2008-04-08 12 +265 val_265 2008-04-08 12 +193 val_193 2008-04-08 12 +401 val_401 2008-04-08 12 +150 val_150 2008-04-08 12 +273 val_273 2008-04-08 12 +224 val_224 2008-04-08 12 +369 val_369 2008-04-08 12 +66 val_66 2008-04-08 12 +128 val_128 2008-04-08 12 +213 val_213 2008-04-08 12 +146 val_146 2008-04-08 12 +406 val_406 2008-04-08 12 +429 val_429 2008-04-08 12 +374 val_374 2008-04-08 12 +152 val_152 2008-04-08 12 +469 val_469 2008-04-08 12 +145 val_145 2008-04-08 12 +495 val_495 2008-04-08 12 +37 val_37 2008-04-08 12 +327 val_327 2008-04-08 12 +281 val_281 2008-04-08 12 +277 val_277 2008-04-08 12 +209 val_209 2008-04-08 12 +15 val_15 2008-04-08 12 +82 val_82 2008-04-08 12 +403 val_403 2008-04-08 12 +166 val_166 2008-04-08 12 +417 val_417 2008-04-08 12 +430 val_430 2008-04-08 12 +252 val_252 2008-04-08 12 +292 val_292 2008-04-08 12 +219 val_219 2008-04-08 12 +287 val_287 2008-04-08 12 +153 val_153 2008-04-08 12 +193 val_193 2008-04-08 12 +338 val_338 2008-04-08 12 +446 val_446 2008-04-08 12 +459 val_459 2008-04-08 12 +394 val_394 2008-04-08 12 +237 val_237 2008-04-08 12 +482 val_482 2008-04-08 12 +174 val_174 2008-04-08 12 +413 val_413 2008-04-08 12 +494 val_494 2008-04-08 12 +207 val_207 2008-04-08 12 +199 val_199 2008-04-08 12 +466 val_466 2008-04-08 12 +208 val_208 2008-04-08 12 +174 val_174 2008-04-08 12 +399 val_399 2008-04-08 12 +396 val_396 2008-04-08 12 +247 val_247 2008-04-08 12 +417 val_417 2008-04-08 12 +489 val_489 2008-04-08 12 +162 val_162 2008-04-08 12 +377 val_377 2008-04-08 12 +397 val_397 2008-04-08 12 +309 val_309 2008-04-08 12 +365 val_365 2008-04-08 12 +266 val_266 2008-04-08 12 +439 val_439 2008-04-08 12 +342 val_342 2008-04-08 12 +367 val_367 2008-04-08 12 +325 val_325 2008-04-08 12 +167 val_167 2008-04-08 12 +195 val_195 2008-04-08 12 +475 val_475 2008-04-08 12 +17 val_17 2008-04-08 12 +113 val_113 2008-04-08 12 +155 val_155 2008-04-08 12 +203 val_203 2008-04-08 12 +339 val_339 2008-04-08 12 +0 val_0 2008-04-08 12 +455 val_455 2008-04-08 12 +128 val_128 2008-04-08 12 +311 val_311 2008-04-08 12 +316 val_316 2008-04-08 12 +57 val_57 2008-04-08 12 +302 val_302 2008-04-08 12 +205 val_205 2008-04-08 12 +149 val_149 2008-04-08 12 +438 val_438 2008-04-08 12 +345 val_345 2008-04-08 12 +129 val_129 2008-04-08 12 +170 val_170 2008-04-08 12 +20 val_20 2008-04-08 12 +489 val_489 2008-04-08 12 +157 val_157 2008-04-08 12 +378 val_378 2008-04-08 12 +221 val_221 2008-04-08 12 +92 val_92 2008-04-08 12 +111 val_111 2008-04-08 12 +47 val_47 2008-04-08 12 +72 val_72 2008-04-08 12 +4 val_4 2008-04-08 12 +280 val_280 2008-04-08 12 +35 val_35 2008-04-08 12 +427 val_427 2008-04-08 12 +277 val_277 2008-04-08 12 +208 val_208 2008-04-08 12 +356 val_356 2008-04-08 12 +399 val_399 2008-04-08 12 +169 val_169 2008-04-08 12 +382 val_382 2008-04-08 12 +498 val_498 2008-04-08 12 +125 val_125 2008-04-08 12 +386 val_386 2008-04-08 12 +437 val_437 2008-04-08 12 +469 val_469 2008-04-08 12 +192 val_192 2008-04-08 12 +286 val_286 2008-04-08 12 +187 val_187 2008-04-08 12 +176 val_176 2008-04-08 12 +54 val_54 2008-04-08 12 +459 val_459 2008-04-08 12 +51 val_51 2008-04-08 12 +138 val_138 2008-04-08 12 +103 val_103 2008-04-08 12 +239 val_239 2008-04-08 12 +213 val_213 2008-04-08 12 +216 val_216 2008-04-08 12 +430 val_430 2008-04-08 12 +278 val_278 2008-04-08 12 +176 val_176 2008-04-08 12 +289 val_289 2008-04-08 12 +221 val_221 2008-04-08 12 +65 val_65 2008-04-08 12 +318 val_318 2008-04-08 12 +332 val_332 2008-04-08 12 +311 val_311 2008-04-08 12 +275 val_275 2008-04-08 12 +137 val_137 2008-04-08 12 +241 val_241 2008-04-08 12 +83 val_83 2008-04-08 12 +333 val_333 2008-04-08 12 +180 val_180 2008-04-08 12 +284 val_284 2008-04-08 12 +12 val_12 2008-04-08 12 +230 val_230 2008-04-08 12 +181 val_181 2008-04-08 12 +67 val_67 2008-04-08 12 +260 val_260 2008-04-08 12 +404 val_404 2008-04-08 12 +384 val_384 2008-04-08 12 +489 val_489 2008-04-08 12 +353 val_353 2008-04-08 12 +373 val_373 2008-04-08 12 +272 val_272 2008-04-08 12 +138 val_138 2008-04-08 12 +217 val_217 2008-04-08 12 +84 val_84 2008-04-08 12 +348 val_348 2008-04-08 12 +466 val_466 2008-04-08 12 +58 val_58 2008-04-08 12 +8 val_8 2008-04-08 12 +411 val_411 2008-04-08 12 +230 val_230 2008-04-08 12 +208 val_208 2008-04-08 12 +348 val_348 2008-04-08 12 +24 val_24 2008-04-08 12 +463 val_463 2008-04-08 12 +431 val_431 2008-04-08 12 +179 val_179 2008-04-08 12 +172 val_172 2008-04-08 12 +42 val_42 2008-04-08 12 +129 val_129 2008-04-08 12 +158 val_158 2008-04-08 12 +119 val_119 2008-04-08 12 +496 val_496 2008-04-08 12 +0 val_0 2008-04-08 12 +322 val_322 2008-04-08 12 +197 val_197 2008-04-08 12 +468 val_468 2008-04-08 12 +393 val_393 2008-04-08 12 +454 val_454 2008-04-08 12 +100 val_100 2008-04-08 12 +298 val_298 2008-04-08 12 +199 val_199 2008-04-08 12 +191 val_191 2008-04-08 12 +418 val_418 2008-04-08 12 +96 val_96 2008-04-08 12 +26 val_26 2008-04-08 12 +165 val_165 2008-04-08 12 +327 val_327 2008-04-08 12 +230 val_230 2008-04-08 12 +205 val_205 2008-04-08 12 +120 val_120 2008-04-08 12 +131 val_131 2008-04-08 12 +51 val_51 2008-04-08 12 +404 val_404 2008-04-08 12 +43 val_43 2008-04-08 12 +436 val_436 2008-04-08 12 +156 val_156 2008-04-08 12 +469 val_469 2008-04-08 12 +468 val_468 2008-04-08 12 +308 val_308 2008-04-08 12 +95 val_95 2008-04-08 12 +196 val_196 2008-04-08 12 +288 val_288 2008-04-08 12 +481 val_481 2008-04-08 12 +457 val_457 2008-04-08 12 +98 val_98 2008-04-08 12 +282 val_282 2008-04-08 12 +197 val_197 2008-04-08 12 +187 val_187 2008-04-08 12 +318 val_318 2008-04-08 12 +318 val_318 2008-04-08 12 +409 val_409 2008-04-08 12 +470 val_470 2008-04-08 12 +137 val_137 2008-04-08 12 +369 val_369 2008-04-08 12 +316 val_316 2008-04-08 12 +169 val_169 2008-04-08 12 +413 val_413 2008-04-08 12 +85 val_85 2008-04-08 12 +77 val_77 2008-04-08 12 +0 val_0 2008-04-08 12 +490 val_490 2008-04-08 12 +87 val_87 2008-04-08 12 +364 val_364 2008-04-08 12 +179 val_179 2008-04-08 12 +118 val_118 2008-04-08 12 +134 val_134 2008-04-08 12 +395 val_395 2008-04-08 12 +282 val_282 2008-04-08 12 +138 val_138 2008-04-08 12 +238 val_238 2008-04-08 12 +419 val_419 2008-04-08 12 +15 val_15 2008-04-08 12 +118 val_118 2008-04-08 12 +72 val_72 2008-04-08 12 +90 val_90 2008-04-08 12 +307 val_307 2008-04-08 12 +19 val_19 2008-04-08 12 +435 val_435 2008-04-08 12 +10 val_10 2008-04-08 12 +277 val_277 2008-04-08 12 +273 val_273 2008-04-08 12 +306 val_306 2008-04-08 12 +224 val_224 2008-04-08 12 +309 val_309 2008-04-08 12 +389 val_389 2008-04-08 12 +327 val_327 2008-04-08 12 +242 val_242 2008-04-08 12 +369 val_369 2008-04-08 12 +392 val_392 2008-04-08 12 +272 val_272 2008-04-08 12 +331 val_331 2008-04-08 12 +401 val_401 2008-04-08 12 +242 val_242 2008-04-08 12 +452 val_452 2008-04-08 12 +177 val_177 2008-04-08 12 +226 val_226 2008-04-08 12 +5 val_5 2008-04-08 12 +497 val_497 2008-04-08 12 +402 val_402 2008-04-08 12 +396 val_396 2008-04-08 12 +317 val_317 2008-04-08 12 +395 val_395 2008-04-08 12 +58 val_58 2008-04-08 12 +35 val_35 2008-04-08 12 +336 val_336 2008-04-08 12 +95 val_95 2008-04-08 12 +11 val_11 2008-04-08 12 +168 val_168 2008-04-08 12 +34 val_34 2008-04-08 12 +229 val_229 2008-04-08 12 +233 val_233 2008-04-08 12 +143 val_143 2008-04-08 12 +472 val_472 2008-04-08 12 +322 val_322 2008-04-08 12 +498 val_498 2008-04-08 12 +160 val_160 2008-04-08 12 +195 val_195 2008-04-08 12 +42 val_42 2008-04-08 12 +321 val_321 2008-04-08 12 +430 val_430 2008-04-08 12 +119 val_119 2008-04-08 12 +489 val_489 2008-04-08 12 +458 val_458 2008-04-08 12 +78 val_78 2008-04-08 12 +76 val_76 2008-04-08 12 +41 val_41 2008-04-08 12 +223 val_223 2008-04-08 12 +492 val_492 2008-04-08 12 +149 val_149 2008-04-08 12 +449 val_449 2008-04-08 12 +218 val_218 2008-04-08 12 +228 val_228 2008-04-08 12 +138 val_138 2008-04-08 12 +453 val_453 2008-04-08 12 +30 val_30 2008-04-08 12 +209 val_209 2008-04-08 12 +64 val_64 2008-04-08 12 +468 val_468 2008-04-08 12 +76 val_76 2008-04-08 12 +74 val_74 2008-04-08 12 +342 val_342 2008-04-08 12 +69 val_69 2008-04-08 12 +230 val_230 2008-04-08 12 +33 val_33 2008-04-08 12 +368 val_368 2008-04-08 12 +103 val_103 2008-04-08 12 +296 val_296 2008-04-08 12 +113 val_113 2008-04-08 12 +216 val_216 2008-04-08 12 +367 val_367 2008-04-08 12 +344 val_344 2008-04-08 12 +167 val_167 2008-04-08 12 +274 val_274 2008-04-08 12 +219 val_219 2008-04-08 12 +239 val_239 2008-04-08 12 +485 val_485 2008-04-08 12 +116 val_116 2008-04-08 12 +223 val_223 2008-04-08 12 +256 val_256 2008-04-08 12 +263 val_263 2008-04-08 12 +70 val_70 2008-04-08 12 +487 val_487 2008-04-08 12 +480 val_480 2008-04-08 12 +401 val_401 2008-04-08 12 +288 val_288 2008-04-08 12 +191 val_191 2008-04-08 12 +5 val_5 2008-04-08 12 +244 val_244 2008-04-08 12 +438 val_438 2008-04-08 12 +128 val_128 2008-04-08 12 +467 val_467 2008-04-08 12 +432 val_432 2008-04-08 12 +202 val_202 2008-04-08 12 +316 val_316 2008-04-08 12 +229 val_229 2008-04-08 12 +469 val_469 2008-04-08 12 +463 val_463 2008-04-08 12 +280 val_280 2008-04-08 12 +2 val_2 2008-04-08 12 +35 val_35 2008-04-08 12 +283 val_283 2008-04-08 12 +331 val_331 2008-04-08 12 +235 val_235 2008-04-08 12 +80 val_80 2008-04-08 12 +44 val_44 2008-04-08 12 +193 val_193 2008-04-08 12 +321 val_321 2008-04-08 12 +335 val_335 2008-04-08 12 +104 val_104 2008-04-08 12 +466 val_466 2008-04-08 12 +366 val_366 2008-04-08 12 +175 val_175 2008-04-08 12 +403 val_403 2008-04-08 12 +483 val_483 2008-04-08 12 +53 val_53 2008-04-08 12 +105 val_105 2008-04-08 12 +257 val_257 2008-04-08 12 +406 val_406 2008-04-08 12 +409 val_409 2008-04-08 12 +190 val_190 2008-04-08 12 +406 val_406 2008-04-08 12 +401 val_401 2008-04-08 12 +114 val_114 2008-04-08 12 +258 val_258 2008-04-08 12 +90 val_90 2008-04-08 12 +203 val_203 2008-04-08 12 +262 val_262 2008-04-08 12 +348 val_348 2008-04-08 12 +424 val_424 2008-04-08 12 +12 val_12 2008-04-08 12 +396 val_396 2008-04-08 12 +201 val_201 2008-04-08 12 +217 val_217 2008-04-08 12 +164 val_164 2008-04-08 12 +431 val_431 2008-04-08 12 +454 val_454 2008-04-08 12 +478 val_478 2008-04-08 12 +298 val_298 2008-04-08 12 +125 val_125 2008-04-08 12 +431 val_431 2008-04-08 12 +164 val_164 2008-04-08 12 +424 val_424 2008-04-08 12 +187 val_187 2008-04-08 12 +382 val_382 2008-04-08 12 +5 val_5 2008-04-08 12 +70 val_70 2008-04-08 12 +397 val_397 2008-04-08 12 +480 val_480 2008-04-08 12 +291 val_291 2008-04-08 12 +24 val_24 2008-04-08 12 +351 val_351 2008-04-08 12 +255 val_255 2008-04-08 12 +104 val_104 2008-04-08 12 +70 val_70 2008-04-08 12 +163 val_163 2008-04-08 12 +438 val_438 2008-04-08 12 +119 val_119 2008-04-08 12 +414 val_414 2008-04-08 12 +200 val_200 2008-04-08 12 +491 val_491 2008-04-08 12 +237 val_237 2008-04-08 12 +439 val_439 2008-04-08 12 +360 val_360 2008-04-08 12 +248 val_248 2008-04-08 12 +479 val_479 2008-04-08 12 +305 val_305 2008-04-08 12 +417 val_417 2008-04-08 12 +199 val_199 2008-04-08 12 +444 val_444 2008-04-08 12 +120 val_120 2008-04-08 12 +429 val_429 2008-04-08 12 +169 val_169 2008-04-08 12 +443 val_443 2008-04-08 12 +323 val_323 2008-04-08 12 +325 val_325 2008-04-08 12 +277 val_277 2008-04-08 12 +230 val_230 2008-04-08 12 +478 val_478 2008-04-08 12 +178 val_178 2008-04-08 12 +468 val_468 2008-04-08 12 +310 val_310 2008-04-08 12 +317 val_317 2008-04-08 12 +333 val_333 2008-04-08 12 +493 val_493 2008-04-08 12 +460 val_460 2008-04-08 12 +207 val_207 2008-04-08 12 +249 val_249 2008-04-08 12 +265 val_265 2008-04-08 12 +480 val_480 2008-04-08 12 +83 val_83 2008-04-08 12 +136 val_136 2008-04-08 12 +353 val_353 2008-04-08 12 +172 val_172 2008-04-08 12 +214 val_214 2008-04-08 12 +462 val_462 2008-04-08 12 +233 val_233 2008-04-08 12 +406 val_406 2008-04-08 12 +133 val_133 2008-04-08 12 +175 val_175 2008-04-08 12 +189 val_189 2008-04-08 12 +454 val_454 2008-04-08 12 +375 val_375 2008-04-08 12 +401 val_401 2008-04-08 12 +421 val_421 2008-04-08 12 +407 val_407 2008-04-08 12 +384 val_384 2008-04-08 12 +256 val_256 2008-04-08 12 +26 val_26 2008-04-08 12 +134 val_134 2008-04-08 12 +67 val_67 2008-04-08 12 +384 val_384 2008-04-08 12 +379 val_379 2008-04-08 12 +18 val_18 2008-04-08 12 +462 val_462 2008-04-08 12 +492 val_492 2008-04-08 12 +100 val_100 2008-04-08 12 +298 val_298 2008-04-08 12 +9 val_9 2008-04-08 12 +341 val_341 2008-04-08 12 +498 val_498 2008-04-08 12 +146 val_146 2008-04-08 12 +458 val_458 2008-04-08 12 +362 val_362 2008-04-08 12 +186 val_186 2008-04-08 12 +285 val_285 2008-04-08 12 +348 val_348 2008-04-08 12 +167 val_167 2008-04-08 12 +18 val_18 2008-04-08 12 +273 val_273 2008-04-08 12 +183 val_183 2008-04-08 12 +281 val_281 2008-04-08 12 +344 val_344 2008-04-08 12 +97 val_97 2008-04-08 12 +469 val_469 2008-04-08 12 +315 val_315 2008-04-08 12 +84 val_84 2008-04-08 12 +28 val_28 2008-04-08 12 +37 val_37 2008-04-08 12 +448 val_448 2008-04-08 12 +152 val_152 2008-04-08 12 +348 val_348 2008-04-08 12 +307 val_307 2008-04-08 12 +194 val_194 2008-04-08 12 +414 val_414 2008-04-08 12 +477 val_477 2008-04-08 12 +222 val_222 2008-04-08 12 +126 val_126 2008-04-08 12 +90 val_90 2008-04-08 12 +169 val_169 2008-04-08 12 +403 val_403 2008-04-08 12 +400 val_400 2008-04-08 12 +200 val_200 2008-04-08 12 +97 val_97 2008-04-08 12 Index: src/contrib/hive/ql/src/test/results/clientpositive/join16.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join16.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/join16.q.out (revision 0) @@ -0,0 +1,88 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a key)) (TOK_SELEXPR (TOK_COLREF a value))) (TOK_WHERE (> (TOK_COLREF a key) 10)))) subq) (TOK_TABREF src tab) (and (and (= (TOK_COLREF subq key) (TOK_COLREF tab key)) (> (TOK_COLREF subq key) 20)) (= (TOK_COLREF subq value) (TOK_COLREF tab value))))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF subq key)) (TOK_SELEXPR (TOK_COLREF tab value))) (TOK_WHERE (< (TOK_COLREF tab value) 200)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + subq:a + Filter Operator + predicate: + expr: (key > 10) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Filter Operator + predicate: + expr: (0 > 20) + type: boolean + Reduce Output Operator + key expressions: + expr: 0 + type: string + expr: 1 + type: string + Map-reduce partition columns: + expr: 0 + type: string + expr: 1 + type: string + tag: 0 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + tab + Reduce Output Operator + key expressions: + expr: key + type: string + expr: value + type: string + Map-reduce partition columns: + expr: key + type: string + expr: value + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + Filter Operator + predicate: + expr: (3 < 200) + type: boolean + Select Operator + expressions: + expr: 0 + type: string + expr: 3 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + Index: src/contrib/hive/ql/src/test/results/clientpositive/input17.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input17.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/input17.q.out (revision 0) @@ -0,0 +1,80 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (+ (TOK_COLREF src_thrift aint) ([ (TOK_COLREF src_thrift lint) 0)) ([ (TOK_COLREF src_thrift lintstring) 0)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + tmap:src_thrift + Select Operator + expressions: + expr: aint + type: int + expr: lint + type: array + expr: lintstring + type: array + Select Operator + expressions: + expr: (0 + 1[0]) + type: int + expr: 2[0] + type: struct{myint:int,mystring:string} + Transform Operator + command: /bin/cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + Reduce Output Operator + key expressions: + expr: tkey + type: string + Map-reduce partition columns: + expr: tkey + type: string + tag: -1 + value expressions: + expr: tkey + type: string + expr: tvalue + type: string + Reduce Operator Tree: + Extract + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +-1103622755 {"myint":64,"mystring":"512"} +-1147582749 {"myint":1,"mystring":"1"} +-1220068486 {"myint":0,"mystring":"0"} +-1281615204 {"myint":36,"mystring":"216"} +-1587372270 {"myint":9,"mystring":"27"} +-1883609158 {"myint":81,"mystring":"729"} +-2091002568 {"myint":4,"mystring":"8"} +-240543261 {"myint":16,"mystring":"64"} +1539139271 {"myint":49,"mystring":"343"} +1914724542 {"myint":25,"mystring":"125"} Index: src/contrib/hive/ql/src/test/results/clientpositive/input_part2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input_part2.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input_part2.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (((key < 100) and (ds = '2008-04-08')) and (hr = '12')) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -25,7 +25,7 @@ expr: ds type: string File Output Operator - directory: /tmp/hive-njain/925269835/11972654.10000.insclause-0 + directory: /tmp/hive-njain/238334885/114382058.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -34,7 +34,6 @@ serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 columns key,value,hr,ds - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -45,7 +44,7 @@ Filter Operator predicate: expr: (((key < 100) and (ds = '2008-04-09')) and (hr = '12')) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -57,7 +56,7 @@ expr: ds type: string File Output Operator - directory: /tmp/hive-njain/925269835/11972654.10001.insclause-1 + directory: /tmp/hive-njain/238334885/114382058.10001.insclause-1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -66,7 +65,6 @@ serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 columns key,value,hr,ds - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -126,7 +124,7 @@ Move Operator tables: replace: - source: /tmp/hive-njain/925269835/11972654.10000.insclause-0 + source: /tmp/hive-njain/238334885/114382058.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -135,7 +133,6 @@ serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 columns key,value,hr,ds - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -144,7 +141,7 @@ serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 replace: - source: /tmp/hive-njain/925269835/11972654.10001.insclause-1 + source: /tmp/hive-njain/238334885/114382058.10001.insclause-1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -153,7 +150,6 @@ serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 columns key,value,hr,ds - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out (working copy) @@ -15,7 +15,9 @@ key expressions: expr: key type: string - # partition fields: -1 + Map-reduce partition columns: + expr: rand() + type: double tag: -1 value expressions: expr: substr(value, 4) @@ -23,29 +25,32 @@ Reduce Operator Tree: Group By Operator - expr: sum(VALUE.0) + expr: sum(UDFToDouble(VALUE.0)) keys: expr: KEY.0 type: string mode: partial1 File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-njain/748679827/1407352694.10001 + /tmp/hive-njain/250776234/130709293.10001 Reduce Output Operator key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: -1 value expressions: expr: 1 - type: string + type: double Reduce Operator Tree: Group By Operator @@ -53,13 +58,13 @@ keys: expr: KEY.0 type: string - mode: partial2 + mode: unknown Select Operator expressions: expr: 0 type: string expr: 1 - type: string + type: double File Output Operator table: input format: org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/input_part4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input_part4.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/input_part4.q.out (revision 0) @@ -0,0 +1,12 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (and (= (TOK_COLREF x ds) '2008-04-08') (= (TOK_COLREF x hr) 15))))) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + + Index: src/contrib/hive/ql/src/test/results/clientpositive/input19.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input19.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/input19.q.out (revision 0) @@ -0,0 +1 @@ +127.0.0.1 frank 10/Oct/2000:13:55:36 -0700 GET /apache_pb.gif HTTP/1.0 200 2326 Index: src/contrib/hive/ql/src/test/results/clientpositive/subq.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/subq.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/subq.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (key < 100) - type: Boolean + type: boolean Select Operator expressions: expr: key Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out (working copy) @@ -11,44 +11,50 @@ Map Reduce Alias -> Map Operator Tree: src - Reduce Output Operator - key expressions: - expr: substr(value, 4) + Select Operator + expressions: + expr: value type: string - # partition fields: 2147483647 - tag: -1 + Reduce Output Operator + key expressions: + expr: substr(0, 4) + type: string + Map-reduce partition columns: + expr: substr(0, 4) + type: string + tag: -1 Reduce Operator Tree: Group By Operator - expr: avg(DISTINCT KEY.0) - expr: sum(KEY.0) - expr: avg(KEY.0) - expr: min(KEY.0) - expr: max(KEY.0) + expr: avg(DISTINCT UDFToDouble(KEY.0)) + expr: sum(UDFToDouble(KEY.0)) + expr: avg(UDFToDouble(KEY.0)) + expr: min(UDFToDouble(KEY.0)) + expr: max(UDFToDouble(KEY.0)) mode: partial1 File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-njain/629715569/118113569.10001 + /tmp/hive-njain/67781830/202058716.10001 Reduce Output Operator - # partition fields: 0 tag: -1 value expressions: expr: 0 type: string expr: 1 - type: string + type: double expr: 2 type: string expr: 3 - type: string + type: double expr: 4 - type: string + type: double Reduce Operator Tree: Group By Operator @@ -57,19 +63,19 @@ expr: avg(VALUE.2) expr: min(VALUE.3) expr: max(VALUE.4) - mode: partial2 + mode: unknown Select Operator expressions: expr: 1 - type: string + type: double expr: 2 type: string expr: 0 type: string expr: 4 - type: string + type: double expr: 3 - type: string + type: double File Output Operator table: input format: org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out (working copy) @@ -15,7 +15,9 @@ key expressions: expr: key type: string - # partition fields: -1 + Map-reduce partition columns: + expr: rand() + type: double tag: -1 value expressions: expr: substr(value, 4) @@ -23,29 +25,32 @@ Reduce Operator Tree: Group By Operator - expr: sum(VALUE.0) + expr: sum(UDFToDouble(VALUE.0)) keys: expr: KEY.0 type: string mode: partial1 File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-njain/888102295/1013886705.10001 + /tmp/hive-njain/485297652/61546074.10001 Reduce Output Operator key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: -1 value expressions: expr: 1 - type: string + type: double Reduce Operator Tree: Group By Operator @@ -53,13 +58,13 @@ keys: expr: KEY.0 type: string - mode: partial2 + mode: unknown Select Operator expressions: expr: 0 type: string expr: 1 - type: string + type: double File Output Operator table: input format: org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/alter1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/alter1.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/alter1.q.out (revision 0) @@ -0,0 +1,28 @@ +a int +b int +Detailed Table Information: +Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{}) +a int +b int +Detailed Table Information: +Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=njain,c=3,last_modified_time=1225994182,a=1}) +a int +b int +Detailed Table Information: +Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1}) +a int +b int +Detailed Table Information: +Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1}) +a int +b int +Detailed Table Information: +Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{s2=20,s1=10,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1}) +a string 'from deserializer' +b string 'from deserializer' +Detailed Table Information: +Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.TestSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1}) +a string 'from deserializer' +b string 'from deserializer' +Detailed Table Information: +Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1}) Index: src/contrib/hive/ql/src/test/results/clientpositive/subq2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/subq2.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/subq2.q.out (working copy) @@ -11,15 +11,21 @@ Map Reduce Alias -> Map Operator Tree: a:b - Reduce Output Operator - key expressions: + Select Operator + expressions: expr: key type: string - # partition fields: -1 - tag: -1 - value expressions: - expr: 1 - type: int + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator @@ -30,22 +36,25 @@ mode: partial1 File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-njain/57219117/75379653.10002 + /tmp/hive-njain/158235739/1374397779.10002 Reduce Output Operator key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: -1 value expressions: expr: 1 - type: string + type: bigint Reduce Operator Tree: Group By Operator @@ -53,23 +62,23 @@ keys: expr: KEY.0 type: string - mode: partial2 + mode: unknown Select Operator expressions: expr: 0 type: string expr: 1 - type: string + type: bigint Filter Operator predicate: expr: (0 >= 90) - type: Boolean + type: boolean Select Operator expressions: expr: 0 type: string expr: 1 - type: string + type: bigint File Output Operator table: input format: org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/input11_limit.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input11_limit.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input11_limit.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (key < 100) - type: Boolean + type: boolean Select Operator expressions: expr: key Index: src/contrib/hive/ql/src/test/results/clientpositive/input14_limit.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input14_limit.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input14_limit.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_COLLIST (TOK_COLREF src key) (TOK_COLREF src value)) (TOK_ALIASLIST tkey tvalue) '/bin/cat'))) (TOK_CLUSTERBY tkey) (TOK_LIMIT 20))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))) (TOK_WHERE (< (TOK_COLREF tmap tkey) 100)))) + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY tkey) (TOK_LIMIT 20))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))) (TOK_WHERE (< (TOK_COLREF tmap tkey) 100)))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -26,7 +26,9 @@ key expressions: expr: tkey type: string - # partition fields: 1 + Map-reduce partition columns: + expr: tkey + type: string tag: -1 value expressions: expr: tkey @@ -38,18 +40,21 @@ Limit File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-njain/945772770/1814909502.10001 + /tmp/hive-njain/195632265/461794176.10001 Reduce Output Operator key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: -1 value expressions: expr: 0 @@ -63,7 +68,7 @@ Filter Operator predicate: expr: (0 < 100) - type: Boolean + type: boolean Select Operator expressions: expr: 0 Index: src/contrib/hive/ql/src/test/results/clientpositive/sample2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/sample2.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/sample2.q.out (working copy) @@ -17,7 +17,7 @@ expr: value type: string File Output Operator - directory: /tmp/hive-njain/1178661057/1120939755.10000.insclause-0 + directory: /tmp/hive-njain/74225087/82847953.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -26,7 +26,6 @@ serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -45,9 +44,10 @@ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name srcbucket + bucket_field_name key serialization.ddl struct srcbucket { string key, string value} - serialization.format 1 columns key,value + serialization.format 1 bucket_count 2 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -60,7 +60,7 @@ Move Operator tables: replace: - source: /tmp/hive-njain/1178661057/1120939755.10000.insclause-0 + source: /tmp/hive-njain/74225087/82847953.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -69,7 +69,6 @@ serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/inputddl1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/inputddl1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/inputddl1.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_CREATETABLE INPUTDDL1 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING))) + (TOK_CREATETABLE INPUTDDL1 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) TOK_TBLTEXTFILE) STAGE DEPENDENCIES: Stage-0 is a root stage Index: src/contrib/hive/ql/src/test/results/clientpositive/sample4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/sample4.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/sample4.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 2 key) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) + (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 2 (TOK_COLREF key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -10,48 +10,44 @@ Map Reduce Alias -> Map Operator Tree: s - Filter Operator - predicate: - expr: (((default_sample_hashfn(key) & 2147483647) % 2) = 0) - type: Boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - File Output Operator - directory: /tmp/hive-njain/594156852/317367185.10000.insclause-0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - properties: - name dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - columns key,value - SORTBUCKETCOLSPREFIX TRUE - bucket_count -1 - serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - name: dest1 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + File Output Operator + directory: /tmp/hive-njain/402231827/160820937.10000.insclause-0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Needs Tagging: Path -> Alias: - file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt Path -> Partition: - file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt Partition input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name srcbucket + bucket_field_name key serialization.ddl struct srcbucket { string key, string value} - serialization.format 1 columns key,value + serialization.format 1 bucket_count 2 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -64,7 +60,7 @@ Move Operator tables: replace: - source: /tmp/hive-njain/594156852/317367185.10000.insclause-0 + source: /tmp/hive-njain/402231827/160820937.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -73,7 +69,6 @@ serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -83,28 +78,51 @@ name: dest1 +238 val_238 86 val_86 +311 val_311 +27 val_27 165 val_165 +409 val_409 255 val_255 +278 val_278 +98 val_98 484 val_484 +265 val_265 +193 val_193 +401 val_401 150 val_150 273 val_273 224 val_224 369 val_369 66 val_66 +128 val_128 213 val_213 +146 val_146 406 val_406 +429 val_429 374 val_374 152 val_152 +469 val_469 145 val_145 495 val_495 37 val_37 327 val_327 +281 val_281 277 val_277 +209 val_209 15 val_15 82 val_82 +403 val_403 +166 val_166 417 val_417 +430 val_430 +252 val_252 +292 val_292 219 val_219 +287 val_287 +153 val_153 +193 val_193 338 val_338 446 val_446 459 val_459 @@ -113,105 +131,202 @@ 482 val_482 174 val_174 413 val_413 +494 val_494 +207 val_207 +199 val_199 466 val_466 208 val_208 174 val_174 +399 val_399 396 val_396 +247 val_247 417 val_417 +489 val_489 +162 val_162 +377 val_377 +397 val_397 309 val_309 365 val_365 266 val_266 439 val_439 +342 val_342 367 val_367 325 val_325 167 val_167 +195 val_195 475 val_475 17 val_17 +113 val_113 +155 val_155 +203 val_203 +339 val_339 0 val_0 455 val_455 +128 val_128 +311 val_311 316 val_316 57 val_57 +302 val_302 +205 val_205 149 val_149 +438 val_438 345 val_345 129 val_129 170 val_170 20 val_20 +489 val_489 +157 val_157 378 val_378 +221 val_221 +92 val_92 +111 val_111 +47 val_47 +72 val_72 4 val_4 280 val_280 35 val_35 +427 val_427 277 val_277 208 val_208 356 val_356 +399 val_399 169 val_169 +382 val_382 +498 val_498 125 val_125 +386 val_386 437 val_437 +469 val_469 192 val_192 286 val_286 187 val_187 176 val_176 +54 val_54 459 val_459 51 val_51 138 val_138 103 val_103 239 val_239 213 val_213 +216 val_216 +430 val_430 +278 val_278 176 val_176 +289 val_289 +221 val_221 +65 val_65 318 val_318 332 val_332 +311 val_311 275 val_275 +137 val_137 +241 val_241 +83 val_83 +333 val_333 +180 val_180 284 val_284 +12 val_12 +230 val_230 181 val_181 +67 val_67 260 val_260 404 val_404 +384 val_384 +489 val_489 +353 val_353 +373 val_373 +272 val_272 138 val_138 217 val_217 84 val_84 +348 val_348 466 val_466 +58 val_58 8 val_8 411 val_411 +230 val_230 208 val_208 +348 val_348 24 val_24 +463 val_463 431 val_431 +179 val_179 172 val_172 42 val_42 129 val_129 158 val_158 +119 val_119 +496 val_496 0 val_0 +322 val_322 +197 val_197 468 val_468 +393 val_393 +454 val_454 +100 val_100 +298 val_298 +199 val_199 +191 val_191 +418 val_418 +96 val_96 26 val_26 165 val_165 327 val_327 +230 val_230 +205 val_205 +120 val_120 +131 val_131 51 val_51 404 val_404 +43 val_43 +436 val_436 156 val_156 +469 val_469 468 val_468 +308 val_308 95 val_95 196 val_196 288 val_288 +481 val_481 457 val_457 +98 val_98 282 val_282 +197 val_197 187 val_187 318 val_318 318 val_318 +409 val_409 +470 val_470 +137 val_137 369 val_369 316 val_316 169 val_169 413 val_413 +85 val_85 77 val_77 0 val_0 +490 val_490 +87 val_87 +364 val_364 +179 val_179 118 val_118 134 val_134 +395 val_395 282 val_282 138 val_138 +238 val_238 419 val_419 15 val_15 118 val_118 +72 val_72 +90 val_90 307 val_307 19 val_19 435 val_435 +10 val_10 277 val_277 273 val_273 +306 val_306 224 val_224 309 val_309 389 val_389 @@ -219,365 +334,247 @@ 242 val_242 369 val_369 392 val_392 +272 val_272 +331 val_331 +401 val_401 242 val_242 +452 val_452 +177 val_177 226 val_226 +5 val_5 497 val_497 402 val_402 396 val_396 +317 val_317 +395 val_395 +58 val_58 35 val_35 336 val_336 95 val_95 11 val_11 +168 val_168 +34 val_34 +229 val_229 233 val_233 143 val_143 +472 val_472 +322 val_322 +498 val_498 +160 val_160 +195 val_195 42 val_42 321 val_321 +430 val_430 +119 val_119 +489 val_489 +458 val_458 +78 val_78 +76 val_76 +41 val_41 +223 val_223 +492 val_492 149 val_149 +449 val_449 +218 val_218 228 val_228 138 val_138 453 val_453 +30 val_30 +209 val_209 64 val_64 468 val_468 +76 val_76 +74 val_74 +342 val_342 +69 val_69 +230 val_230 33 val_33 +368 val_368 103 val_103 +296 val_296 +113 val_113 +216 val_216 367 val_367 +344 val_344 167 val_167 +274 val_274 219 val_219 239 val_239 +485 val_485 116 val_116 +223 val_223 +256 val_256 +263 val_263 +70 val_70 +487 val_487 480 val_480 +401 val_401 288 val_288 +191 val_191 +5 val_5 244 val_244 +438 val_438 +128 val_128 +467 val_467 +432 val_432 202 val_202 316 val_316 +229 val_229 +469 val_469 +463 val_463 280 val_280 2 val_2 35 val_35 +283 val_283 +331 val_331 235 val_235 80 val_80 44 val_44 +193 val_193 321 val_321 +335 val_335 +104 val_104 466 val_466 +366 val_366 +175 val_175 +403 val_403 +483 val_483 53 val_53 105 val_105 257 val_257 406 val_406 +409 val_409 190 val_190 406 val_406 +401 val_401 114 val_114 +258 val_258 +90 val_90 +203 val_203 262 val_262 +348 val_348 424 val_424 +12 val_12 396 val_396 +201 val_201 217 val_217 +164 val_164 431 val_431 +454 val_454 +478 val_478 +298 val_298 125 val_125 431 val_431 +164 val_164 424 val_424 187 val_187 +382 val_382 +5 val_5 +70 val_70 +397 val_397 480 val_480 291 val_291 24 val_24 +351 val_351 255 val_255 +104 val_104 +70 val_70 163 val_163 +438 val_438 +119 val_119 +414 val_414 200 val_200 491 val_491 237 val_237 439 val_439 +360 val_360 248 val_248 479 val_479 305 val_305 417 val_417 +199 val_199 444 val_444 +120 val_120 +429 val_429 169 val_169 +443 val_443 323 val_323 325 val_325 277 val_277 +230 val_230 +478 val_478 178 val_178 468 val_468 310 val_310 +317 val_317 +333 val_333 493 val_493 460 val_460 +207 val_207 +249 val_249 +265 val_265 480 val_480 +83 val_83 136 val_136 +353 val_353 172 val_172 +214 val_214 462 val_462 233 val_233 406 val_406 +133 val_133 +175 val_175 189 val_189 +454 val_454 +375 val_375 +401 val_401 +421 val_421 +407 val_407 +384 val_384 +256 val_256 26 val_26 134 val_134 +67 val_67 +384 val_384 +379 val_379 +18 val_18 462 val_462 +492 val_492 +100 val_100 +298 val_298 +9 val_9 341 val_341 +498 val_498 +146 val_146 +458 val_458 +362 val_362 +186 val_186 +285 val_285 +348 val_348 167 val_167 +18 val_18 273 val_273 183 val_183 +281 val_281 +344 val_344 97 val_97 +469 val_469 +315 val_315 84 val_84 28 val_28 37 val_37 448 val_448 152 val_152 +348 val_348 307 val_307 194 val_194 +414 val_414 477 val_477 222 val_222 +126 val_126 +90 val_90 169 val_169 +403 val_403 400 val_400 200 val_200 97 val_97 -291 val_292 -62 val_63 -271 val_272 -217 val_218 -167 val_168 -468 val_469 -413 val_414 -455 val_456 -231 val_232 -448 val_449 -246 val_247 -440 val_441 -31 val_32 -147 val_148 -428 val_429 -273 val_274 -356 val_357 -217 val_218 -33 val_34 -35 val_36 -402 val_403 -226 val_227 -15 val_16 -110 val_111 -343 val_344 -275 val_276 -0 val_1 -293 val_294 -240 val_241 -286 val_287 -408 val_409 -477 val_478 -455 val_456 -99 val_100 -482 val_483 -367 val_368 -59 val_60 -48 val_49 -424 val_425 -226 val_227 -488 val_489 -349 val_350 -11 val_12 -161 val_162 -123 val_124 -402 val_403 -468 val_469 -314 val_315 -497 val_498 -224 val_225 -4 val_5 -277 val_278 -206 val_207 -101 val_102 -114 val_115 -239 val_240 -389 val_390 -114 val_115 -235 val_236 -390 val_391 -165 val_166 -264 val_265 -196 val_197 -20 val_21 -11 val_12 -129 val_130 -257 val_258 -71 val_72 -453 val_454 -82 val_83 -86 val_87 -323 val_324 -222 val_223 -42 val_43 -411 val_412 -338 val_339 -68 val_69 -185 val_186 -147 val_148 -473 val_474 -376 val_377 -347 val_348 -213 val_214 -291 val_292 -426 val_427 -132 val_133 -446 val_447 -121 val_122 -284 val_285 -129 val_130 -246 val_247 -491 val_492 -369 val_370 -262 val_263 -390 val_391 -352 val_353 -226 val_227 -349 val_350 -480 val_481 -497 val_498 -480 val_481 -475 val_476 -183 val_184 -15 val_16 -318 val_319 -149 val_150 -392 val_393 -77 val_78 -275 val_276 -174 val_175 -114 val_115 -161 val_162 -75 val_76 -264 val_265 -48 val_49 -336 val_337 -390 val_391 -484 val_485 -189 val_190 -305 val_306 -367 val_368 -6 val_7 -260 val_261 -2 val_3 -349 val_350 -170 val_171 -51 val_52 -363 val_364 -118 val_119 -310 val_311 -19 val_20 -80 val_81 -121 val_122 -239 val_240 -358 val_359 -11 val_12 -143 val_144 -341 val_342 -105 val_106 -42 val_43 -363 val_364 -347 val_348 -77 val_78 -293 val_294 -20 val_21 -40 val_41 -8 val_9 -310 val_311 -206 val_207 -381 val_382 -404 val_405 -206 val_207 -385 val_386 -259 val_260 -226 val_227 -262 val_263 -303 val_304 -260 val_261 -404 val_405 -116 val_117 -42 val_43 -303 val_304 -93 val_94 -277 val_278 -406 val_407 -451 val_452 -53 val_54 -132 val_133 -192 val_193 -356 val_357 -352 val_353 -330 val_331 -138 val_139 -257 val_258 -174 val_175 -295 val_296 -105 val_106 -204 val_205 -439 val_440 -374 val_375 -457 val_458 -93 val_94 -196 val_197 -437 val_438 -398 val_399 -35 val_36 -334 val_335 -435 val_436 -60 val_61 -442 val_443 -404 val_405 -446 val_447 -0 val_1 -268 val_269 -491 val_492 -392 val_393 -295 val_296 -93 val_94 -439 val_440 -156 val_157 -341 val_342 -118 val_119 -172 val_173 -244 val_245 -6 val_7 -284 val_285 -136 val_137 -349 val_350 -462 val_463 -123 val_124 -408 val_409 -152 val_153 -310 val_311 -486 val_487 -152 val_153 -378 val_379 -97 val_98 -495 val_496 -385 val_386 -40 val_41 -389 val_390 -259 val_260 -97 val_98 -125 val_126 -178 val_179 -415 val_416 -156 val_157 -228 val_229 -363 val_364 -22 val_23 -248 val_249 -459 val_460 -402 val_403 -62 val_63 -244 val_245 -46 val_47 -341 val_342 -215 val_216 -134 val_135 -196 val_197 -431 val_432 -82 val_83 -440 val_441 -309 val_310 -161 val_162 -468 val_469 -152 val_153 -138 val_139 -161 val_162 -91 val_92 -118 val_119 -178 val_179 -457 val_458 -242 val_243 -35 val_36 -244 val_245 Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby2_map.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby2_map.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby2_map.q.out (revision 0) @@ -0,0 +1,117 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (TOK_COLREF src key) 0 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (TOK_COLREF src key) 0 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4))))) (TOK_GROUPBY (TOK_FUNCTION substr (TOK_COLREF src key) 0 1)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Group By Operator + + expr: count(DISTINCT substr(value, 4)) + expr: sum(UDFToDouble(substr(value, 4))) + keys: + expr: substr(key, 0, 1) + type: string + expr: substr(value, 4) + type: string + mode: hash + Reduce Output Operator + key expressions: + expr: 0 + type: string + expr: 1 + type: string + Map-reduce partition columns: + expr: 0 + type: string + expr: 1 + type: string + tag: -1 + value expressions: + expr: 2 + type: bigint + expr: 3 + type: double + Reduce Operator Tree: + Group By Operator + + expr: count(DISTINCT KEY.1) + expr: sum(VALUE.1) + keys: + expr: KEY.0 + type: string + mode: partial2 + File Output Operator + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/781819455/901894899.10001 + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: -1 + value expressions: + expr: 1 + type: bigint + expr: 2 + type: double + Reduce Operator Tree: + Group By Operator + + expr: count(VALUE.0) + expr: sum(VALUE.1) + keys: + expr: KEY.0 + type: string + mode: unknown + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: bigint + expr: concat(0, UDFToString(2)) + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +0 1 00.0 +1 71 116414.0 +2 69 225571.0 +3 62 332004.0 +4 74 452763.0 +5 6 5397.0 +6 5 6398.0 +7 6 7735.0 +8 8 8762.0 +9 7 91047.0 Index: src/contrib/hive/ql/src/test/results/clientpositive/inputddl3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/inputddl3.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/inputddl3.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_CREATETABLE INPUTDDL3 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEROWFORMAT (TOK_TABLEROWFORMATFIELD '\t'))) + (TOK_CREATETABLE INPUTDDL3 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEROWFORMAT (TOK_TABLEROWFORMATFIELD '\t')) TOK_TBLTEXTFILE) STAGE DEPENDENCIES: Stage-0 is a root stage Index: src/contrib/hive/ql/src/test/results/clientpositive/sample6.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/sample6.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/sample6.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 4 key) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) + (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 4 (TOK_COLREF key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (((default_sample_hashfn(key) & 2147483647) % 4) = 0) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -21,7 +21,7 @@ expr: value type: string File Output Operator - directory: /tmp/hive-njain/391691394/413823912.10000.insclause-0 + directory: /tmp/hive-njain/57994069/1680944185.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -30,7 +30,6 @@ serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -40,18 +39,19 @@ name: dest1 Needs Tagging: Path -> Alias: - file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt Path -> Partition: - file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt Partition input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name srcbucket + bucket_field_name key serialization.ddl struct srcbucket { string key, string value} - serialization.format 1 columns key,value + serialization.format 1 bucket_count 2 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -64,7 +64,7 @@ Move Operator tables: replace: - source: /tmp/hive-njain/391691394/413823912.10000.insclause-0 + source: /tmp/hive-njain/57994069/1680944185.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -73,7 +73,6 @@ serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -201,138 +200,3 @@ 477 val_477 169 val_169 400 val_400 -62 val_63 -271 val_272 -217 val_218 -455 val_456 -231 val_232 -448 val_449 -246 val_247 -440 val_441 -147 val_148 -356 val_357 -217 val_218 -33 val_34 -15 val_16 -110 val_111 -275 val_276 -0 val_1 -293 val_294 -286 val_287 -408 val_409 -477 val_478 -455 val_456 -99 val_100 -367 val_368 -59 val_60 -48 val_49 -488 val_489 -349 val_350 -11 val_12 -161 val_162 -224 val_225 -4 val_5 -206 val_207 -114 val_115 -239 val_240 -389 val_390 -114 val_115 -235 val_236 -165 val_166 -264 val_265 -11 val_12 -129 val_130 -257 val_258 -323 val_324 -411 val_412 -338 val_339 -147 val_148 -473 val_474 -213 val_214 -426 val_427 -132 val_133 -121 val_122 -129 val_130 -246 val_247 -491 val_492 -352 val_353 -349 val_350 -480 val_481 -480 val_481 -183 val_184 -15 val_16 -392 val_393 -77 val_78 -275 val_276 -114 val_115 -161 val_162 -264 val_265 -48 val_49 -484 val_485 -305 val_306 -367 val_368 -260 val_261 -349 val_350 -51 val_52 -363 val_364 -118 val_119 -19 val_20 -80 val_81 -121 val_122 -239 val_240 -11 val_12 -143 val_144 -341 val_342 -363 val_364 -77 val_78 -293 val_294 -40 val_41 -8 val_9 -206 val_207 -381 val_382 -404 val_405 -206 val_207 -385 val_386 -260 val_261 -404 val_405 -451 val_452 -132 val_133 -356 val_357 -352 val_353 -330 val_331 -257 val_258 -374 val_375 -437 val_438 -334 val_335 -404 val_405 -0 val_1 -268 val_269 -491 val_492 -392 val_393 -341 val_342 -118 val_119 -172 val_173 -136 val_137 -349 val_350 -462 val_463 -408 val_409 -378 val_379 -495 val_496 -385 val_386 -40 val_41 -389 val_390 -125 val_126 -415 val_416 -228 val_229 -363 val_364 -22 val_23 -459 val_460 -62 val_63 -341 val_342 -440 val_441 -309 val_310 -161 val_162 -161 val_162 -91 val_92 -118 val_119 -242 val_243 Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby4_map.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby4_map.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby4_map.q.out (revision 0) @@ -0,0 +1,53 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Select Operator + Group By Operator + + expr: count(1) + mode: hash + Reduce Output Operator + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: 0 + type: bigint + Reduce Operator Tree: + Group By Operator + + expr: count(VALUE.0) + mode: unknown + Select Operator + expressions: + expr: 0 + type: bigint + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +500 Index: src/contrib/hive/ql/src/test/results/clientpositive/inputddl7.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/inputddl7.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/inputddl7.q.out (revision 0) @@ -0,0 +1,20 @@ +500 +500 +500 +500 +name string +Detailed Table Information: +Table(tableName:t1,dbName:default,owner:njain,createTime:1225994187,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/t1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{}) +name string +Detailed Table Information: +Table(tableName:t2,dbName:default,owner:njain,createTime:1225994193,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/t2,inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat,outputFormat:org.apache.hadoop.mapred.SequenceFileOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{}) +name string +ds string + +Detailed Partition Information: +Partition(values:[2008-04-09],dbName:default,tableName:t3,createTime:0,lastAccessTime:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/t3/ds=2008-04-09,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),parameters:{}) +name string +ds string + +Detailed Partition Information: +Partition(values:[2008-04-09],dbName:default,tableName:t4,createTime:0,lastAccessTime:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/t4/ds=2008-04-09,inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat,outputFormat:org.apache.hadoop.mapred.SequenceFileOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),parameters:{}) Index: src/contrib/hive/ql/src/test/results/clientpositive/notable_alias1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/notable_alias1.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/notable_alias1.q.out (revision 0) @@ -0,0 +1,152 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR '1234') (TOK_SELEXPR (TOK_COLREF key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (< (TOK_COLREF src key) 100)) (TOK_GROUPBY (TOK_COLREF key)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Select Operator + expressions: + expr: key + type: string + Filter Operator + predicate: + expr: (0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: 1 + type: int + Reduce Operator Tree: + Group By Operator + + expr: count(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/252553932/208217017.10001 + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: -1 + value expressions: + expr: 1 + type: bigint + Reduce Operator Tree: + Group By Operator + + expr: count(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: unknown + Select Operator + expressions: + expr: '1234' + type: string + expr: 0 + type: string + expr: 1 + type: bigint + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +1234 0 3 +1234 10 1 +1234 11 1 +1234 12 2 +1234 15 2 +1234 17 1 +1234 18 2 +1234 19 1 +1234 2 1 +1234 20 1 +1234 24 2 +1234 26 2 +1234 27 1 +1234 28 1 +1234 30 1 +1234 33 1 +1234 34 1 +1234 35 3 +1234 37 2 +1234 4 1 +1234 41 1 +1234 42 2 +1234 43 1 +1234 44 1 +1234 47 1 +1234 5 3 +1234 51 2 +1234 53 1 +1234 54 1 +1234 57 1 +1234 58 2 +1234 64 1 +1234 65 1 +1234 66 1 +1234 67 2 +1234 69 1 +1234 70 3 +1234 72 2 +1234 74 1 +1234 76 2 +1234 77 1 +1234 78 1 +1234 8 1 +1234 80 1 +1234 82 1 +1234 83 2 +1234 84 2 +1234 85 1 +1234 86 1 +1234 87 1 +1234 9 1 +1234 90 3 +1234 92 1 +1234 95 2 +1234 96 1 +1234 97 2 +1234 98 2 Index: src/contrib/hive/ql/src/test/results/clientpositive/input0.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input0.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/input0.q.out (revision 0) @@ -0,0 +1,512 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + + +238 val_238 +86 val_86 +311 val_311 +27 val_27 +165 val_165 +409 val_409 +255 val_255 +278 val_278 +98 val_98 +484 val_484 +265 val_265 +193 val_193 +401 val_401 +150 val_150 +273 val_273 +224 val_224 +369 val_369 +66 val_66 +128 val_128 +213 val_213 +146 val_146 +406 val_406 +429 val_429 +374 val_374 +152 val_152 +469 val_469 +145 val_145 +495 val_495 +37 val_37 +327 val_327 +281 val_281 +277 val_277 +209 val_209 +15 val_15 +82 val_82 +403 val_403 +166 val_166 +417 val_417 +430 val_430 +252 val_252 +292 val_292 +219 val_219 +287 val_287 +153 val_153 +193 val_193 +338 val_338 +446 val_446 +459 val_459 +394 val_394 +237 val_237 +482 val_482 +174 val_174 +413 val_413 +494 val_494 +207 val_207 +199 val_199 +466 val_466 +208 val_208 +174 val_174 +399 val_399 +396 val_396 +247 val_247 +417 val_417 +489 val_489 +162 val_162 +377 val_377 +397 val_397 +309 val_309 +365 val_365 +266 val_266 +439 val_439 +342 val_342 +367 val_367 +325 val_325 +167 val_167 +195 val_195 +475 val_475 +17 val_17 +113 val_113 +155 val_155 +203 val_203 +339 val_339 +0 val_0 +455 val_455 +128 val_128 +311 val_311 +316 val_316 +57 val_57 +302 val_302 +205 val_205 +149 val_149 +438 val_438 +345 val_345 +129 val_129 +170 val_170 +20 val_20 +489 val_489 +157 val_157 +378 val_378 +221 val_221 +92 val_92 +111 val_111 +47 val_47 +72 val_72 +4 val_4 +280 val_280 +35 val_35 +427 val_427 +277 val_277 +208 val_208 +356 val_356 +399 val_399 +169 val_169 +382 val_382 +498 val_498 +125 val_125 +386 val_386 +437 val_437 +469 val_469 +192 val_192 +286 val_286 +187 val_187 +176 val_176 +54 val_54 +459 val_459 +51 val_51 +138 val_138 +103 val_103 +239 val_239 +213 val_213 +216 val_216 +430 val_430 +278 val_278 +176 val_176 +289 val_289 +221 val_221 +65 val_65 +318 val_318 +332 val_332 +311 val_311 +275 val_275 +137 val_137 +241 val_241 +83 val_83 +333 val_333 +180 val_180 +284 val_284 +12 val_12 +230 val_230 +181 val_181 +67 val_67 +260 val_260 +404 val_404 +384 val_384 +489 val_489 +353 val_353 +373 val_373 +272 val_272 +138 val_138 +217 val_217 +84 val_84 +348 val_348 +466 val_466 +58 val_58 +8 val_8 +411 val_411 +230 val_230 +208 val_208 +348 val_348 +24 val_24 +463 val_463 +431 val_431 +179 val_179 +172 val_172 +42 val_42 +129 val_129 +158 val_158 +119 val_119 +496 val_496 +0 val_0 +322 val_322 +197 val_197 +468 val_468 +393 val_393 +454 val_454 +100 val_100 +298 val_298 +199 val_199 +191 val_191 +418 val_418 +96 val_96 +26 val_26 +165 val_165 +327 val_327 +230 val_230 +205 val_205 +120 val_120 +131 val_131 +51 val_51 +404 val_404 +43 val_43 +436 val_436 +156 val_156 +469 val_469 +468 val_468 +308 val_308 +95 val_95 +196 val_196 +288 val_288 +481 val_481 +457 val_457 +98 val_98 +282 val_282 +197 val_197 +187 val_187 +318 val_318 +318 val_318 +409 val_409 +470 val_470 +137 val_137 +369 val_369 +316 val_316 +169 val_169 +413 val_413 +85 val_85 +77 val_77 +0 val_0 +490 val_490 +87 val_87 +364 val_364 +179 val_179 +118 val_118 +134 val_134 +395 val_395 +282 val_282 +138 val_138 +238 val_238 +419 val_419 +15 val_15 +118 val_118 +72 val_72 +90 val_90 +307 val_307 +19 val_19 +435 val_435 +10 val_10 +277 val_277 +273 val_273 +306 val_306 +224 val_224 +309 val_309 +389 val_389 +327 val_327 +242 val_242 +369 val_369 +392 val_392 +272 val_272 +331 val_331 +401 val_401 +242 val_242 +452 val_452 +177 val_177 +226 val_226 +5 val_5 +497 val_497 +402 val_402 +396 val_396 +317 val_317 +395 val_395 +58 val_58 +35 val_35 +336 val_336 +95 val_95 +11 val_11 +168 val_168 +34 val_34 +229 val_229 +233 val_233 +143 val_143 +472 val_472 +322 val_322 +498 val_498 +160 val_160 +195 val_195 +42 val_42 +321 val_321 +430 val_430 +119 val_119 +489 val_489 +458 val_458 +78 val_78 +76 val_76 +41 val_41 +223 val_223 +492 val_492 +149 val_149 +449 val_449 +218 val_218 +228 val_228 +138 val_138 +453 val_453 +30 val_30 +209 val_209 +64 val_64 +468 val_468 +76 val_76 +74 val_74 +342 val_342 +69 val_69 +230 val_230 +33 val_33 +368 val_368 +103 val_103 +296 val_296 +113 val_113 +216 val_216 +367 val_367 +344 val_344 +167 val_167 +274 val_274 +219 val_219 +239 val_239 +485 val_485 +116 val_116 +223 val_223 +256 val_256 +263 val_263 +70 val_70 +487 val_487 +480 val_480 +401 val_401 +288 val_288 +191 val_191 +5 val_5 +244 val_244 +438 val_438 +128 val_128 +467 val_467 +432 val_432 +202 val_202 +316 val_316 +229 val_229 +469 val_469 +463 val_463 +280 val_280 +2 val_2 +35 val_35 +283 val_283 +331 val_331 +235 val_235 +80 val_80 +44 val_44 +193 val_193 +321 val_321 +335 val_335 +104 val_104 +466 val_466 +366 val_366 +175 val_175 +403 val_403 +483 val_483 +53 val_53 +105 val_105 +257 val_257 +406 val_406 +409 val_409 +190 val_190 +406 val_406 +401 val_401 +114 val_114 +258 val_258 +90 val_90 +203 val_203 +262 val_262 +348 val_348 +424 val_424 +12 val_12 +396 val_396 +201 val_201 +217 val_217 +164 val_164 +431 val_431 +454 val_454 +478 val_478 +298 val_298 +125 val_125 +431 val_431 +164 val_164 +424 val_424 +187 val_187 +382 val_382 +5 val_5 +70 val_70 +397 val_397 +480 val_480 +291 val_291 +24 val_24 +351 val_351 +255 val_255 +104 val_104 +70 val_70 +163 val_163 +438 val_438 +119 val_119 +414 val_414 +200 val_200 +491 val_491 +237 val_237 +439 val_439 +360 val_360 +248 val_248 +479 val_479 +305 val_305 +417 val_417 +199 val_199 +444 val_444 +120 val_120 +429 val_429 +169 val_169 +443 val_443 +323 val_323 +325 val_325 +277 val_277 +230 val_230 +478 val_478 +178 val_178 +468 val_468 +310 val_310 +317 val_317 +333 val_333 +493 val_493 +460 val_460 +207 val_207 +249 val_249 +265 val_265 +480 val_480 +83 val_83 +136 val_136 +353 val_353 +172 val_172 +214 val_214 +462 val_462 +233 val_233 +406 val_406 +133 val_133 +175 val_175 +189 val_189 +454 val_454 +375 val_375 +401 val_401 +421 val_421 +407 val_407 +384 val_384 +256 val_256 +26 val_26 +134 val_134 +67 val_67 +384 val_384 +379 val_379 +18 val_18 +462 val_462 +492 val_492 +100 val_100 +298 val_298 +9 val_9 +341 val_341 +498 val_498 +146 val_146 +458 val_458 +362 val_362 +186 val_186 +285 val_285 +348 val_348 +167 val_167 +18 val_18 +273 val_273 +183 val_183 +281 val_281 +344 val_344 +97 val_97 +469 val_469 +315 val_315 +84 val_84 +28 val_28 +37 val_37 +448 val_448 +152 val_152 +348 val_348 +307 val_307 +194 val_194 +414 val_414 +477 val_477 +222 val_222 +126 val_126 +90 val_90 +169 val_169 +403 val_403 +400 val_400 +200 val_200 +97 val_97 Index: src/contrib/hive/ql/src/test/results/clientpositive/join1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/join1.q.out (working copy) @@ -14,7 +14,9 @@ key expressions: expr: key type: string - # partition fields: 1 + Map-reduce partition columns: + expr: key + type: string tag: 1 value expressions: expr: key @@ -22,29 +24,33 @@ expr: value type: string src1 - Reduce Output Operator - key expressions: + Select Operator + expressions: expr: key type: string - # partition fields: 1 - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 0 + value expressions: + expr: 0 + type: string Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE.0} {VALUE.1} + 0 {VALUE.0} 1 {VALUE.0} {VALUE.1} Select Operator expressions: expr: 0 type: string - expr: 3 + expr: 2 type: string File Output Operator table: Index: src/contrib/hive/ql/src/test/results/clientpositive/join3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join3.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/join3.q.out (working copy) @@ -10,23 +10,29 @@ Map Reduce Alias -> Map Operator Tree: src2 - Reduce Output Operator - key expressions: + Select Operator + expressions: expr: key type: string - # partition fields: 1 - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 1 + value expressions: + expr: 0 + type: string src3 Reduce Output Operator key expressions: expr: key type: string - # partition fields: 1 + Map-reduce partition columns: + expr: key + type: string tag: 2 value expressions: expr: key @@ -34,31 +40,35 @@ expr: value type: string src1 - Reduce Output Operator - key expressions: + Select Operator + expressions: expr: key type: string - # partition fields: 1 - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 0 + value expressions: + expr: 0 + type: string Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 Inner Join 0 to 1 condition expressions: - 0 {VALUE.0} {VALUE.1} - 1 {VALUE.0} {VALUE.1} + 0 {VALUE.0} + 1 {VALUE.0} 2 {VALUE.0} {VALUE.1} Select Operator expressions: expr: 0 type: string - expr: 5 + expr: 3 type: string File Output Operator table: Index: src/contrib/hive/ql/src/test/results/clientpositive/input4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input4.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input4.q.out (working copy) @@ -9,7 +9,7 @@ Stage: Stage-0 Copy source: file:/home/njain/workspace/hadoop-0.17/src/contrib/hive/data/files/kv1.txt - destination: file:/tmp/hive-njain/-478939701 + destination: file:/tmp/hive-njain/-1533162426 Stage: Stage-1 Move Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/join5.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join5.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/join5.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: ((key > 15) and (key < 25)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -24,7 +24,9 @@ key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: 1 value expressions: expr: 0 @@ -35,7 +37,7 @@ Filter Operator predicate: expr: ((key > 10) and (key < 20)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -46,7 +48,9 @@ key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: 0 value expressions: expr: 0 Index: src/contrib/hive/ql/src/test/results/clientpositive/input_testxpath2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input_testxpath2.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input_testxpath2.q.out (working copy) @@ -10,24 +10,32 @@ Map Reduce Alias -> Map Operator Tree: src_thrift - Filter Operator - predicate: - expr: (lint is not null and not mstringstring is null) - type: Boolean - Select Operator - expressions: - expr: size(lint) - type: int - expr: size(lintstring) - type: int - expr: size(mstringstring) - type: int - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - name: dest1 + Select Operator + expressions: + expr: lint + type: array + expr: lintstring + type: array + expr: mstringstring + type: map + Filter Operator + predicate: + expr: (0 is not null and not 2 is null) + type: boolean + Select Operator + expressions: + expr: size(0) + type: int + expr: size(1) + type: int + expr: size(2) + type: int + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/input6.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input6.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input6.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: key is null - type: Boolean + type: boolean Select Operator expressions: expr: key Index: src/contrib/hive/ql/src/test/results/clientpositive/join7.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join7.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/join7.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: ((key > 15) and (key < 25)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -24,7 +24,9 @@ key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: 1 value expressions: expr: 0 @@ -35,7 +37,7 @@ Filter Operator predicate: expr: ((key > 10) and (key < 20)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -46,7 +48,9 @@ key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: 0 value expressions: expr: 0 @@ -57,7 +61,7 @@ Filter Operator predicate: expr: ((key > 20) and (key < 25)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -68,7 +72,9 @@ key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: 2 value expressions: expr: 0 Index: src/contrib/hive/ql/src/test/results/clientpositive/input8.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input8.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input8.q.out (working copy) @@ -12,18 +12,22 @@ src1 Select Operator expressions: - expr: (4 + null) - type: int - expr: (UDFToDouble(key) - null) - type: double - expr: (null + null) - type: Byte - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - name: dest1 + expr: key + type: string + Select Operator + expressions: + expr: (4 + null) + type: int + expr: (UDFToDouble(0) - null) + type: double + expr: (null + null) + type: tinyint + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/join9.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join9.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/join9.q.out (working copy) @@ -14,7 +14,9 @@ key expressions: expr: key type: string - # partition fields: 1 + Map-reduce partition columns: + expr: key + type: string tag: 1 value expressions: expr: key @@ -22,21 +24,29 @@ expr: value type: string src1 - Reduce Output Operator - key expressions: + Select Operator + expressions: expr: key type: string - # partition fields: 1 - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string expr: ds type: string expr: hr type: string + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 0 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string Needs Tagging: Path -> Alias: file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -85,20 +95,20 @@ condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE.0} {VALUE.1} {VALUE.2} {VALUE.3} + 0 {VALUE.0} {VALUE.1} {VALUE.2} 1 {VALUE.0} {VALUE.1} Filter Operator predicate: - expr: ((2 = '2008-04-08') and (3 = '12')) - type: Boolean + expr: ((1 = '2008-04-08') and (2 = '12')) + type: boolean Select Operator expressions: expr: 0 type: string - expr: 5 + expr: 4 type: string File Output Operator - directory: /tmp/hive-njain/465936460/211979279.10000.insclause-0 + directory: /tmp/hive-njain/653311979.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -107,7 +117,6 @@ serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -120,7 +129,7 @@ Move Operator tables: replace: - source: /tmp/hive-njain/465936460/211979279.10000.insclause-0 + source: /tmp/hive-njain/653311979.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -129,7 +138,6 @@ serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/union.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/union.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/union.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (key < 100) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -34,7 +34,7 @@ Filter Operator predicate: expr: (key > 100) - type: Boolean + type: boolean Select Operator expressions: expr: key Index: src/contrib/hive/ql/src/test/results/clientpositive/join11.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join11.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/join11.q.out (revision 0) @@ -0,0 +1,226 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key) c1) (TOK_SELEXPR (TOK_COLREF src value) c2)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key) c3) (TOK_SELEXPR (TOK_COLREF src value) c4)))) src2) (AND (= (TOK_COLREF src1 c1) (TOK_COLREF src2 c3)) (< (TOK_COLREF src1 c1) 100)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 c1)) (TOK_SELEXPR (TOK_COLREF src2 c4))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src2:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 1 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + src1:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Filter Operator + predicate: + expr: (0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 0 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 0 + type: string + expr: 3 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +10 val_10 +11 val_11 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +17 val_17 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +19 val_19 +2 val_2 +20 val_20 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +27 val_27 +28 val_28 +30 val_30 +33 val_33 +34 val_34 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +4 val_4 +41 val_41 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +43 val_43 +44 val_44 +47 val_47 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +51 val_51 +51 val_51 +51 val_51 +51 val_51 +53 val_53 +54 val_54 +57 val_57 +58 val_58 +58 val_58 +58 val_58 +58 val_58 +64 val_64 +65 val_65 +66 val_66 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +69 val_69 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +72 val_72 +72 val_72 +72 val_72 +72 val_72 +74 val_74 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +77 val_77 +78 val_78 +8 val_8 +80 val_80 +82 val_82 +83 val_83 +83 val_83 +83 val_83 +83 val_83 +84 val_84 +84 val_84 +84 val_84 +84 val_84 +85 val_85 +86 val_86 +87 val_87 +9 val_9 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +92 val_92 +95 val_95 +95 val_95 +95 val_95 +95 val_95 +96 val_96 +97 val_97 +97 val_97 +97 val_97 +97 val_97 +98 val_98 +98 val_98 +98 val_98 +98 val_98 Index: src/contrib/hive/ql/src/test/results/clientpositive/udf4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/udf4.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/udf4.q.out (working copy) @@ -11,43 +11,44 @@ Alias -> Map Operator Tree: dest1 Select Operator - expressions: - expr: round(1.0) - type: bigint - expr: round(1.5) - type: bigint - expr: round(- 1.5) - type: bigint - expr: floor(1.0) - type: bigint - expr: floor(1.5) - type: bigint - expr: floor(- 1.5) - type: bigint - expr: ceiling(1.0) - type: bigint - expr: ceiling(1.5) - type: bigint - expr: ceiling(- 1.5) - type: bigint - expr: ceiling(1.0) - type: bigint - expr: rand(UDFToLong(3)) - type: double - expr: 3 - type: int - expr: - 3 - type: int - expr: (1 + 2) - type: int - expr: (1 + - 2) - type: int - expr: ~ 1 - type: int - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: round(1.0) + type: bigint + expr: round(1.5) + type: bigint + expr: round(- 1.5) + type: bigint + expr: floor(1.0) + type: bigint + expr: floor(1.5) + type: bigint + expr: floor(- 1.5) + type: bigint + expr: ceiling(1.0) + type: bigint + expr: ceiling(1.5) + type: bigint + expr: ceiling(- 1.5) + type: bigint + expr: ceiling(1.0) + type: bigint + expr: rand(UDFToLong(3)) + type: double + expr: 3 + type: int + expr: - 3 + type: int + expr: (1 + 2) + type: int + expr: (1 + - 2) + type: int + expr: ~ 1 + type: int + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/input12.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input12.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input12.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (key < 100) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -29,7 +29,7 @@ Filter Operator predicate: expr: ((key >= 100) and (key < 200)) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -45,7 +45,7 @@ Filter Operator predicate: expr: (key >= 200) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -272,314 +272,314 @@ 194 val_194 126 val_126 169 val_169 -238 NULL 2008-04-08 12 -311 NULL 2008-04-08 12 -409 NULL 2008-04-08 12 -255 NULL 2008-04-08 12 -278 NULL 2008-04-08 12 -484 NULL 2008-04-08 12 -265 NULL 2008-04-08 12 -401 NULL 2008-04-08 12 -273 NULL 2008-04-08 12 -224 NULL 2008-04-08 12 -369 NULL 2008-04-08 12 -213 NULL 2008-04-08 12 -406 NULL 2008-04-08 12 -429 NULL 2008-04-08 12 -374 NULL 2008-04-08 12 -469 NULL 2008-04-08 12 -495 NULL 2008-04-08 12 -327 NULL 2008-04-08 12 -281 NULL 2008-04-08 12 -277 NULL 2008-04-08 12 -209 NULL 2008-04-08 12 -403 NULL 2008-04-08 12 -417 NULL 2008-04-08 12 -430 NULL 2008-04-08 12 -252 NULL 2008-04-08 12 -292 NULL 2008-04-08 12 -219 NULL 2008-04-08 12 -287 NULL 2008-04-08 12 -338 NULL 2008-04-08 12 -446 NULL 2008-04-08 12 -459 NULL 2008-04-08 12 -394 NULL 2008-04-08 12 -237 NULL 2008-04-08 12 -482 NULL 2008-04-08 12 -413 NULL 2008-04-08 12 -494 NULL 2008-04-08 12 -207 NULL 2008-04-08 12 -466 NULL 2008-04-08 12 -208 NULL 2008-04-08 12 -399 NULL 2008-04-08 12 -396 NULL 2008-04-08 12 -247 NULL 2008-04-08 12 -417 NULL 2008-04-08 12 -489 NULL 2008-04-08 12 -377 NULL 2008-04-08 12 -397 NULL 2008-04-08 12 -309 NULL 2008-04-08 12 -365 NULL 2008-04-08 12 -266 NULL 2008-04-08 12 -439 NULL 2008-04-08 12 -342 NULL 2008-04-08 12 -367 NULL 2008-04-08 12 -325 NULL 2008-04-08 12 -475 NULL 2008-04-08 12 -203 NULL 2008-04-08 12 -339 NULL 2008-04-08 12 -455 NULL 2008-04-08 12 -311 NULL 2008-04-08 12 -316 NULL 2008-04-08 12 -302 NULL 2008-04-08 12 -205 NULL 2008-04-08 12 -438 NULL 2008-04-08 12 -345 NULL 2008-04-08 12 -489 NULL 2008-04-08 12 -378 NULL 2008-04-08 12 -221 NULL 2008-04-08 12 -280 NULL 2008-04-08 12 -427 NULL 2008-04-08 12 -277 NULL 2008-04-08 12 -208 NULL 2008-04-08 12 -356 NULL 2008-04-08 12 -399 NULL 2008-04-08 12 -382 NULL 2008-04-08 12 -498 NULL 2008-04-08 12 -386 NULL 2008-04-08 12 -437 NULL 2008-04-08 12 -469 NULL 2008-04-08 12 -286 NULL 2008-04-08 12 -459 NULL 2008-04-08 12 -239 NULL 2008-04-08 12 -213 NULL 2008-04-08 12 -216 NULL 2008-04-08 12 -430 NULL 2008-04-08 12 -278 NULL 2008-04-08 12 -289 NULL 2008-04-08 12 -221 NULL 2008-04-08 12 -318 NULL 2008-04-08 12 -332 NULL 2008-04-08 12 -311 NULL 2008-04-08 12 -275 NULL 2008-04-08 12 -241 NULL 2008-04-08 12 -333 NULL 2008-04-08 12 -284 NULL 2008-04-08 12 -230 NULL 2008-04-08 12 -260 NULL 2008-04-08 12 -404 NULL 2008-04-08 12 -384 NULL 2008-04-08 12 -489 NULL 2008-04-08 12 -353 NULL 2008-04-08 12 -373 NULL 2008-04-08 12 -272 NULL 2008-04-08 12 -217 NULL 2008-04-08 12 -348 NULL 2008-04-08 12 -466 NULL 2008-04-08 12 -411 NULL 2008-04-08 12 -230 NULL 2008-04-08 12 -208 NULL 2008-04-08 12 -348 NULL 2008-04-08 12 -463 NULL 2008-04-08 12 -431 NULL 2008-04-08 12 -496 NULL 2008-04-08 12 -322 NULL 2008-04-08 12 -468 NULL 2008-04-08 12 -393 NULL 2008-04-08 12 -454 NULL 2008-04-08 12 -298 NULL 2008-04-08 12 -418 NULL 2008-04-08 12 -327 NULL 2008-04-08 12 -230 NULL 2008-04-08 12 -205 NULL 2008-04-08 12 -404 NULL 2008-04-08 12 -436 NULL 2008-04-08 12 -469 NULL 2008-04-08 12 -468 NULL 2008-04-08 12 -308 NULL 2008-04-08 12 -288 NULL 2008-04-08 12 -481 NULL 2008-04-08 12 -457 NULL 2008-04-08 12 -282 NULL 2008-04-08 12 -318 NULL 2008-04-08 12 -318 NULL 2008-04-08 12 -409 NULL 2008-04-08 12 -470 NULL 2008-04-08 12 -369 NULL 2008-04-08 12 -316 NULL 2008-04-08 12 -413 NULL 2008-04-08 12 -490 NULL 2008-04-08 12 -364 NULL 2008-04-08 12 -395 NULL 2008-04-08 12 -282 NULL 2008-04-08 12 -238 NULL 2008-04-08 12 -419 NULL 2008-04-08 12 -307 NULL 2008-04-08 12 -435 NULL 2008-04-08 12 -277 NULL 2008-04-08 12 -273 NULL 2008-04-08 12 -306 NULL 2008-04-08 12 -224 NULL 2008-04-08 12 -309 NULL 2008-04-08 12 -389 NULL 2008-04-08 12 -327 NULL 2008-04-08 12 -242 NULL 2008-04-08 12 -369 NULL 2008-04-08 12 -392 NULL 2008-04-08 12 -272 NULL 2008-04-08 12 -331 NULL 2008-04-08 12 -401 NULL 2008-04-08 12 -242 NULL 2008-04-08 12 -452 NULL 2008-04-08 12 -226 NULL 2008-04-08 12 -497 NULL 2008-04-08 12 -402 NULL 2008-04-08 12 -396 NULL 2008-04-08 12 -317 NULL 2008-04-08 12 -395 NULL 2008-04-08 12 -336 NULL 2008-04-08 12 -229 NULL 2008-04-08 12 -233 NULL 2008-04-08 12 -472 NULL 2008-04-08 12 -322 NULL 2008-04-08 12 -498 NULL 2008-04-08 12 -321 NULL 2008-04-08 12 -430 NULL 2008-04-08 12 -489 NULL 2008-04-08 12 -458 NULL 2008-04-08 12 -223 NULL 2008-04-08 12 -492 NULL 2008-04-08 12 -449 NULL 2008-04-08 12 -218 NULL 2008-04-08 12 -228 NULL 2008-04-08 12 -453 NULL 2008-04-08 12 -209 NULL 2008-04-08 12 -468 NULL 2008-04-08 12 -342 NULL 2008-04-08 12 -230 NULL 2008-04-08 12 -368 NULL 2008-04-08 12 -296 NULL 2008-04-08 12 -216 NULL 2008-04-08 12 -367 NULL 2008-04-08 12 -344 NULL 2008-04-08 12 -274 NULL 2008-04-08 12 -219 NULL 2008-04-08 12 -239 NULL 2008-04-08 12 -485 NULL 2008-04-08 12 -223 NULL 2008-04-08 12 -256 NULL 2008-04-08 12 -263 NULL 2008-04-08 12 -487 NULL 2008-04-08 12 -480 NULL 2008-04-08 12 -401 NULL 2008-04-08 12 -288 NULL 2008-04-08 12 -244 NULL 2008-04-08 12 -438 NULL 2008-04-08 12 -467 NULL 2008-04-08 12 -432 NULL 2008-04-08 12 -202 NULL 2008-04-08 12 -316 NULL 2008-04-08 12 -229 NULL 2008-04-08 12 -469 NULL 2008-04-08 12 -463 NULL 2008-04-08 12 -280 NULL 2008-04-08 12 -283 NULL 2008-04-08 12 -331 NULL 2008-04-08 12 -235 NULL 2008-04-08 12 -321 NULL 2008-04-08 12 -335 NULL 2008-04-08 12 -466 NULL 2008-04-08 12 -366 NULL 2008-04-08 12 -403 NULL 2008-04-08 12 -483 NULL 2008-04-08 12 -257 NULL 2008-04-08 12 -406 NULL 2008-04-08 12 -409 NULL 2008-04-08 12 -406 NULL 2008-04-08 12 -401 NULL 2008-04-08 12 -258 NULL 2008-04-08 12 -203 NULL 2008-04-08 12 -262 NULL 2008-04-08 12 -348 NULL 2008-04-08 12 -424 NULL 2008-04-08 12 -396 NULL 2008-04-08 12 -201 NULL 2008-04-08 12 -217 NULL 2008-04-08 12 -431 NULL 2008-04-08 12 -454 NULL 2008-04-08 12 -478 NULL 2008-04-08 12 -298 NULL 2008-04-08 12 -431 NULL 2008-04-08 12 -424 NULL 2008-04-08 12 -382 NULL 2008-04-08 12 -397 NULL 2008-04-08 12 -480 NULL 2008-04-08 12 -291 NULL 2008-04-08 12 -351 NULL 2008-04-08 12 -255 NULL 2008-04-08 12 -438 NULL 2008-04-08 12 -414 NULL 2008-04-08 12 -200 NULL 2008-04-08 12 -491 NULL 2008-04-08 12 -237 NULL 2008-04-08 12 -439 NULL 2008-04-08 12 -360 NULL 2008-04-08 12 -248 NULL 2008-04-08 12 -479 NULL 2008-04-08 12 -305 NULL 2008-04-08 12 -417 NULL 2008-04-08 12 -444 NULL 2008-04-08 12 -429 NULL 2008-04-08 12 -443 NULL 2008-04-08 12 -323 NULL 2008-04-08 12 -325 NULL 2008-04-08 12 -277 NULL 2008-04-08 12 -230 NULL 2008-04-08 12 -478 NULL 2008-04-08 12 -468 NULL 2008-04-08 12 -310 NULL 2008-04-08 12 -317 NULL 2008-04-08 12 -333 NULL 2008-04-08 12 -493 NULL 2008-04-08 12 -460 NULL 2008-04-08 12 -207 NULL 2008-04-08 12 -249 NULL 2008-04-08 12 -265 NULL 2008-04-08 12 -480 NULL 2008-04-08 12 -353 NULL 2008-04-08 12 -214 NULL 2008-04-08 12 -462 NULL 2008-04-08 12 -233 NULL 2008-04-08 12 -406 NULL 2008-04-08 12 -454 NULL 2008-04-08 12 -375 NULL 2008-04-08 12 -401 NULL 2008-04-08 12 -421 NULL 2008-04-08 12 -407 NULL 2008-04-08 12 -384 NULL 2008-04-08 12 -256 NULL 2008-04-08 12 -384 NULL 2008-04-08 12 -379 NULL 2008-04-08 12 -462 NULL 2008-04-08 12 -492 NULL 2008-04-08 12 -298 NULL 2008-04-08 12 -341 NULL 2008-04-08 12 -498 NULL 2008-04-08 12 -458 NULL 2008-04-08 12 -362 NULL 2008-04-08 12 -285 NULL 2008-04-08 12 -348 NULL 2008-04-08 12 -273 NULL 2008-04-08 12 -281 NULL 2008-04-08 12 -344 NULL 2008-04-08 12 -469 NULL 2008-04-08 12 -315 NULL 2008-04-08 12 -448 NULL 2008-04-08 12 -348 NULL 2008-04-08 12 -307 NULL 2008-04-08 12 -414 NULL 2008-04-08 12 -477 NULL 2008-04-08 12 -222 NULL 2008-04-08 12 -403 NULL 2008-04-08 12 -400 NULL 2008-04-08 12 -200 NULL 2008-04-08 12 +238 2008-04-08 12 +311 2008-04-08 12 +409 2008-04-08 12 +255 2008-04-08 12 +278 2008-04-08 12 +484 2008-04-08 12 +265 2008-04-08 12 +401 2008-04-08 12 +273 2008-04-08 12 +224 2008-04-08 12 +369 2008-04-08 12 +213 2008-04-08 12 +406 2008-04-08 12 +429 2008-04-08 12 +374 2008-04-08 12 +469 2008-04-08 12 +495 2008-04-08 12 +327 2008-04-08 12 +281 2008-04-08 12 +277 2008-04-08 12 +209 2008-04-08 12 +403 2008-04-08 12 +417 2008-04-08 12 +430 2008-04-08 12 +252 2008-04-08 12 +292 2008-04-08 12 +219 2008-04-08 12 +287 2008-04-08 12 +338 2008-04-08 12 +446 2008-04-08 12 +459 2008-04-08 12 +394 2008-04-08 12 +237 2008-04-08 12 +482 2008-04-08 12 +413 2008-04-08 12 +494 2008-04-08 12 +207 2008-04-08 12 +466 2008-04-08 12 +208 2008-04-08 12 +399 2008-04-08 12 +396 2008-04-08 12 +247 2008-04-08 12 +417 2008-04-08 12 +489 2008-04-08 12 +377 2008-04-08 12 +397 2008-04-08 12 +309 2008-04-08 12 +365 2008-04-08 12 +266 2008-04-08 12 +439 2008-04-08 12 +342 2008-04-08 12 +367 2008-04-08 12 +325 2008-04-08 12 +475 2008-04-08 12 +203 2008-04-08 12 +339 2008-04-08 12 +455 2008-04-08 12 +311 2008-04-08 12 +316 2008-04-08 12 +302 2008-04-08 12 +205 2008-04-08 12 +438 2008-04-08 12 +345 2008-04-08 12 +489 2008-04-08 12 +378 2008-04-08 12 +221 2008-04-08 12 +280 2008-04-08 12 +427 2008-04-08 12 +277 2008-04-08 12 +208 2008-04-08 12 +356 2008-04-08 12 +399 2008-04-08 12 +382 2008-04-08 12 +498 2008-04-08 12 +386 2008-04-08 12 +437 2008-04-08 12 +469 2008-04-08 12 +286 2008-04-08 12 +459 2008-04-08 12 +239 2008-04-08 12 +213 2008-04-08 12 +216 2008-04-08 12 +430 2008-04-08 12 +278 2008-04-08 12 +289 2008-04-08 12 +221 2008-04-08 12 +318 2008-04-08 12 +332 2008-04-08 12 +311 2008-04-08 12 +275 2008-04-08 12 +241 2008-04-08 12 +333 2008-04-08 12 +284 2008-04-08 12 +230 2008-04-08 12 +260 2008-04-08 12 +404 2008-04-08 12 +384 2008-04-08 12 +489 2008-04-08 12 +353 2008-04-08 12 +373 2008-04-08 12 +272 2008-04-08 12 +217 2008-04-08 12 +348 2008-04-08 12 +466 2008-04-08 12 +411 2008-04-08 12 +230 2008-04-08 12 +208 2008-04-08 12 +348 2008-04-08 12 +463 2008-04-08 12 +431 2008-04-08 12 +496 2008-04-08 12 +322 2008-04-08 12 +468 2008-04-08 12 +393 2008-04-08 12 +454 2008-04-08 12 +298 2008-04-08 12 +418 2008-04-08 12 +327 2008-04-08 12 +230 2008-04-08 12 +205 2008-04-08 12 +404 2008-04-08 12 +436 2008-04-08 12 +469 2008-04-08 12 +468 2008-04-08 12 +308 2008-04-08 12 +288 2008-04-08 12 +481 2008-04-08 12 +457 2008-04-08 12 +282 2008-04-08 12 +318 2008-04-08 12 +318 2008-04-08 12 +409 2008-04-08 12 +470 2008-04-08 12 +369 2008-04-08 12 +316 2008-04-08 12 +413 2008-04-08 12 +490 2008-04-08 12 +364 2008-04-08 12 +395 2008-04-08 12 +282 2008-04-08 12 +238 2008-04-08 12 +419 2008-04-08 12 +307 2008-04-08 12 +435 2008-04-08 12 +277 2008-04-08 12 +273 2008-04-08 12 +306 2008-04-08 12 +224 2008-04-08 12 +309 2008-04-08 12 +389 2008-04-08 12 +327 2008-04-08 12 +242 2008-04-08 12 +369 2008-04-08 12 +392 2008-04-08 12 +272 2008-04-08 12 +331 2008-04-08 12 +401 2008-04-08 12 +242 2008-04-08 12 +452 2008-04-08 12 +226 2008-04-08 12 +497 2008-04-08 12 +402 2008-04-08 12 +396 2008-04-08 12 +317 2008-04-08 12 +395 2008-04-08 12 +336 2008-04-08 12 +229 2008-04-08 12 +233 2008-04-08 12 +472 2008-04-08 12 +322 2008-04-08 12 +498 2008-04-08 12 +321 2008-04-08 12 +430 2008-04-08 12 +489 2008-04-08 12 +458 2008-04-08 12 +223 2008-04-08 12 +492 2008-04-08 12 +449 2008-04-08 12 +218 2008-04-08 12 +228 2008-04-08 12 +453 2008-04-08 12 +209 2008-04-08 12 +468 2008-04-08 12 +342 2008-04-08 12 +230 2008-04-08 12 +368 2008-04-08 12 +296 2008-04-08 12 +216 2008-04-08 12 +367 2008-04-08 12 +344 2008-04-08 12 +274 2008-04-08 12 +219 2008-04-08 12 +239 2008-04-08 12 +485 2008-04-08 12 +223 2008-04-08 12 +256 2008-04-08 12 +263 2008-04-08 12 +487 2008-04-08 12 +480 2008-04-08 12 +401 2008-04-08 12 +288 2008-04-08 12 +244 2008-04-08 12 +438 2008-04-08 12 +467 2008-04-08 12 +432 2008-04-08 12 +202 2008-04-08 12 +316 2008-04-08 12 +229 2008-04-08 12 +469 2008-04-08 12 +463 2008-04-08 12 +280 2008-04-08 12 +283 2008-04-08 12 +331 2008-04-08 12 +235 2008-04-08 12 +321 2008-04-08 12 +335 2008-04-08 12 +466 2008-04-08 12 +366 2008-04-08 12 +403 2008-04-08 12 +483 2008-04-08 12 +257 2008-04-08 12 +406 2008-04-08 12 +409 2008-04-08 12 +406 2008-04-08 12 +401 2008-04-08 12 +258 2008-04-08 12 +203 2008-04-08 12 +262 2008-04-08 12 +348 2008-04-08 12 +424 2008-04-08 12 +396 2008-04-08 12 +201 2008-04-08 12 +217 2008-04-08 12 +431 2008-04-08 12 +454 2008-04-08 12 +478 2008-04-08 12 +298 2008-04-08 12 +431 2008-04-08 12 +424 2008-04-08 12 +382 2008-04-08 12 +397 2008-04-08 12 +480 2008-04-08 12 +291 2008-04-08 12 +351 2008-04-08 12 +255 2008-04-08 12 +438 2008-04-08 12 +414 2008-04-08 12 +200 2008-04-08 12 +491 2008-04-08 12 +237 2008-04-08 12 +439 2008-04-08 12 +360 2008-04-08 12 +248 2008-04-08 12 +479 2008-04-08 12 +305 2008-04-08 12 +417 2008-04-08 12 +444 2008-04-08 12 +429 2008-04-08 12 +443 2008-04-08 12 +323 2008-04-08 12 +325 2008-04-08 12 +277 2008-04-08 12 +230 2008-04-08 12 +478 2008-04-08 12 +468 2008-04-08 12 +310 2008-04-08 12 +317 2008-04-08 12 +333 2008-04-08 12 +493 2008-04-08 12 +460 2008-04-08 12 +207 2008-04-08 12 +249 2008-04-08 12 +265 2008-04-08 12 +480 2008-04-08 12 +353 2008-04-08 12 +214 2008-04-08 12 +462 2008-04-08 12 +233 2008-04-08 12 +406 2008-04-08 12 +454 2008-04-08 12 +375 2008-04-08 12 +401 2008-04-08 12 +421 2008-04-08 12 +407 2008-04-08 12 +384 2008-04-08 12 +256 2008-04-08 12 +384 2008-04-08 12 +379 2008-04-08 12 +462 2008-04-08 12 +492 2008-04-08 12 +298 2008-04-08 12 +341 2008-04-08 12 +498 2008-04-08 12 +458 2008-04-08 12 +362 2008-04-08 12 +285 2008-04-08 12 +348 2008-04-08 12 +273 2008-04-08 12 +281 2008-04-08 12 +344 2008-04-08 12 +469 2008-04-08 12 +315 2008-04-08 12 +448 2008-04-08 12 +348 2008-04-08 12 +307 2008-04-08 12 +414 2008-04-08 12 +477 2008-04-08 12 +222 2008-04-08 12 +403 2008-04-08 12 +400 2008-04-08 12 +200 2008-04-08 12 Index: src/contrib/hive/ql/src/test/results/clientpositive/join13.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join13.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/join13.q.out (revision 0) @@ -0,0 +1,301 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key) c1) (TOK_SELEXPR (TOK_COLREF src value) c2)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key) c3) (TOK_SELEXPR (TOK_COLREF src value) c4)))) src2) (AND (= (TOK_COLREF src1 c1) (TOK_COLREF src2 c3)) (< (TOK_COLREF src1 c1) 100))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key) c5) (TOK_SELEXPR (TOK_COLREF src value) c6)))) src3) (AND (= (+ (TOK_COLREF src1 c1) (TOK_COLREF src2 c3)) (TOK_COLREF src3 c5)) (< (TOK_COLREF src3 c5) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 c1)) (TOK_SELEXPR (TOK_COLREF src2 c4))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src2:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 1 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + src1:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Filter Operator + predicate: + expr: (0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: 0 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + File Output Operator + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + $INTNAME + Reduce Output Operator + key expressions: + expr: (UDFToDouble(0) + UDFToDouble(2)) + type: double + Map-reduce partition columns: + expr: (UDFToDouble(0) + UDFToDouble(2)) + type: double + tag: 0 + value expressions: + expr: 2 + type: string + expr: 3 + type: string + expr: 0 + type: string + expr: 1 + type: string + src3:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Filter Operator + predicate: + expr: (0 < 200) + type: boolean + Reduce Output Operator + key expressions: + expr: UDFToDouble(0) + type: double + Map-reduce partition columns: + expr: UDFToDouble(0) + type: double + tag: 1 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} {VALUE.2} {VALUE.3} + 1 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 2 + type: string + expr: 1 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +2 val_2 +4 val_4 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +9 val_9 +9 val_9 +10 val_10 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +17 val_17 +27 val_27 +33 val_33 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +41 val_41 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +43 val_43 +57 val_57 +58 val_58 +58 val_58 +58 val_58 +58 val_58 +64 val_64 +64 val_64 +64 val_64 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +69 val_69 +69 val_69 +69 val_69 +69 val_69 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +78 val_78 +80 val_80 +82 val_82 +82 val_82 +83 val_83 +83 val_83 +83 val_83 +83 val_83 +84 val_84 +84 val_84 +84 val_84 +84 val_84 +85 val_85 +86 val_86 +86 val_86 +87 val_87 +87 val_87 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +95 val_95 +95 val_95 +95 val_95 +95 val_95 +96 val_96 +97 val_97 +97 val_97 +97 val_97 +97 val_97 +98 val_98 +98 val_98 +98 val_98 +98 val_98 Index: src/contrib/hive/ql/src/test/results/clientpositive/input14.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input14.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input14.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_COLLIST (TOK_COLREF src key) (TOK_COLREF src value)) (TOK_ALIASLIST tkey tvalue) '/bin/cat'))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))) (TOK_WHERE (< (TOK_COLREF tmap tkey) 100)))) + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))) (TOK_WHERE (< (TOK_COLREF tmap tkey) 100)))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -25,7 +25,9 @@ key expressions: expr: tkey type: string - # partition fields: 1 + Map-reduce partition columns: + expr: tkey + type: string tag: -1 value expressions: expr: tkey @@ -37,7 +39,7 @@ Filter Operator predicate: expr: (0 < 100) - type: Boolean + type: boolean Select Operator expressions: expr: 0 Index: src/contrib/hive/ql/src/test/results/clientpositive/join15.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/join15.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/join15.q.out (revision 0) @@ -0,0 +1,1094 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2) (= (TOK_COLREF src1 key) (TOK_COLREF src2 key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src2 + Reduce Output Operator + key expressions: + expr: key + type: string + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + src1 + Reduce Output Operator + key expressions: + expr: key + type: string + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +10 val_10 10 val_10 +100 val_100 100 val_100 +100 val_100 100 val_100 +100 val_100 100 val_100 +100 val_100 100 val_100 +103 val_103 103 val_103 +103 val_103 103 val_103 +103 val_103 103 val_103 +103 val_103 103 val_103 +104 val_104 104 val_104 +104 val_104 104 val_104 +104 val_104 104 val_104 +104 val_104 104 val_104 +105 val_105 105 val_105 +11 val_11 11 val_11 +111 val_111 111 val_111 +113 val_113 113 val_113 +113 val_113 113 val_113 +113 val_113 113 val_113 +113 val_113 113 val_113 +114 val_114 114 val_114 +116 val_116 116 val_116 +118 val_118 118 val_118 +118 val_118 118 val_118 +118 val_118 118 val_118 +118 val_118 118 val_118 +119 val_119 119 val_119 +119 val_119 119 val_119 +119 val_119 119 val_119 +119 val_119 119 val_119 +119 val_119 119 val_119 +119 val_119 119 val_119 +119 val_119 119 val_119 +119 val_119 119 val_119 +119 val_119 119 val_119 +12 val_12 12 val_12 +12 val_12 12 val_12 +12 val_12 12 val_12 +12 val_12 12 val_12 +120 val_120 120 val_120 +120 val_120 120 val_120 +120 val_120 120 val_120 +120 val_120 120 val_120 +125 val_125 125 val_125 +125 val_125 125 val_125 +125 val_125 125 val_125 +125 val_125 125 val_125 +126 val_126 126 val_126 +128 val_128 128 val_128 +128 val_128 128 val_128 +128 val_128 128 val_128 +128 val_128 128 val_128 +128 val_128 128 val_128 +128 val_128 128 val_128 +128 val_128 128 val_128 +128 val_128 128 val_128 +128 val_128 128 val_128 +129 val_129 129 val_129 +129 val_129 129 val_129 +129 val_129 129 val_129 +129 val_129 129 val_129 +131 val_131 131 val_131 +133 val_133 133 val_133 +134 val_134 134 val_134 +134 val_134 134 val_134 +134 val_134 134 val_134 +134 val_134 134 val_134 +136 val_136 136 val_136 +137 val_137 137 val_137 +137 val_137 137 val_137 +137 val_137 137 val_137 +137 val_137 137 val_137 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +138 val_138 138 val_138 +143 val_143 143 val_143 +145 val_145 145 val_145 +146 val_146 146 val_146 +146 val_146 146 val_146 +146 val_146 146 val_146 +146 val_146 146 val_146 +149 val_149 149 val_149 +149 val_149 149 val_149 +149 val_149 149 val_149 +149 val_149 149 val_149 +15 val_15 15 val_15 +15 val_15 15 val_15 +15 val_15 15 val_15 +15 val_15 15 val_15 +150 val_150 150 val_150 +152 val_152 152 val_152 +152 val_152 152 val_152 +152 val_152 152 val_152 +152 val_152 152 val_152 +153 val_153 153 val_153 +155 val_155 155 val_155 +156 val_156 156 val_156 +157 val_157 157 val_157 +158 val_158 158 val_158 +160 val_160 160 val_160 +162 val_162 162 val_162 +163 val_163 163 val_163 +164 val_164 164 val_164 +164 val_164 164 val_164 +164 val_164 164 val_164 +164 val_164 164 val_164 +165 val_165 165 val_165 +165 val_165 165 val_165 +165 val_165 165 val_165 +165 val_165 165 val_165 +166 val_166 166 val_166 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +168 val_168 168 val_168 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +17 val_17 17 val_17 +170 val_170 170 val_170 +172 val_172 172 val_172 +172 val_172 172 val_172 +172 val_172 172 val_172 +172 val_172 172 val_172 +174 val_174 174 val_174 +174 val_174 174 val_174 +174 val_174 174 val_174 +174 val_174 174 val_174 +175 val_175 175 val_175 +175 val_175 175 val_175 +175 val_175 175 val_175 +175 val_175 175 val_175 +176 val_176 176 val_176 +176 val_176 176 val_176 +176 val_176 176 val_176 +176 val_176 176 val_176 +177 val_177 177 val_177 +178 val_178 178 val_178 +179 val_179 179 val_179 +179 val_179 179 val_179 +179 val_179 179 val_179 +179 val_179 179 val_179 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +180 val_180 180 val_180 +181 val_181 181 val_181 +183 val_183 183 val_183 +186 val_186 186 val_186 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +189 val_189 189 val_189 +19 val_19 19 val_19 +190 val_190 190 val_190 +191 val_191 191 val_191 +191 val_191 191 val_191 +191 val_191 191 val_191 +191 val_191 191 val_191 +192 val_192 192 val_192 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +194 val_194 194 val_194 +195 val_195 195 val_195 +195 val_195 195 val_195 +195 val_195 195 val_195 +195 val_195 195 val_195 +196 val_196 196 val_196 +197 val_197 197 val_197 +197 val_197 197 val_197 +197 val_197 197 val_197 +197 val_197 197 val_197 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +2 val_2 2 val_2 +20 val_20 20 val_20 +200 val_200 200 val_200 +200 val_200 200 val_200 +200 val_200 200 val_200 +200 val_200 200 val_200 +201 val_201 201 val_201 +202 val_202 202 val_202 +203 val_203 203 val_203 +203 val_203 203 val_203 +203 val_203 203 val_203 +203 val_203 203 val_203 +205 val_205 205 val_205 +205 val_205 205 val_205 +205 val_205 205 val_205 +205 val_205 205 val_205 +207 val_207 207 val_207 +207 val_207 207 val_207 +207 val_207 207 val_207 +207 val_207 207 val_207 +208 val_208 208 val_208 +208 val_208 208 val_208 +208 val_208 208 val_208 +208 val_208 208 val_208 +208 val_208 208 val_208 +208 val_208 208 val_208 +208 val_208 208 val_208 +208 val_208 208 val_208 +208 val_208 208 val_208 +209 val_209 209 val_209 +209 val_209 209 val_209 +209 val_209 209 val_209 +209 val_209 209 val_209 +213 val_213 213 val_213 +213 val_213 213 val_213 +213 val_213 213 val_213 +213 val_213 213 val_213 +214 val_214 214 val_214 +216 val_216 216 val_216 +216 val_216 216 val_216 +216 val_216 216 val_216 +216 val_216 216 val_216 +217 val_217 217 val_217 +217 val_217 217 val_217 +217 val_217 217 val_217 +217 val_217 217 val_217 +218 val_218 218 val_218 +219 val_219 219 val_219 +219 val_219 219 val_219 +219 val_219 219 val_219 +219 val_219 219 val_219 +221 val_221 221 val_221 +221 val_221 221 val_221 +221 val_221 221 val_221 +221 val_221 221 val_221 +222 val_222 222 val_222 +223 val_223 223 val_223 +223 val_223 223 val_223 +223 val_223 223 val_223 +223 val_223 223 val_223 +224 val_224 224 val_224 +224 val_224 224 val_224 +224 val_224 224 val_224 +224 val_224 224 val_224 +226 val_226 226 val_226 +228 val_228 228 val_228 +229 val_229 229 val_229 +229 val_229 229 val_229 +229 val_229 229 val_229 +229 val_229 229 val_229 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +230 val_230 230 val_230 +233 val_233 233 val_233 +233 val_233 233 val_233 +233 val_233 233 val_233 +233 val_233 233 val_233 +235 val_235 235 val_235 +237 val_237 237 val_237 +237 val_237 237 val_237 +237 val_237 237 val_237 +237 val_237 237 val_237 +238 val_238 238 val_238 +238 val_238 238 val_238 +238 val_238 238 val_238 +238 val_238 238 val_238 +239 val_239 239 val_239 +239 val_239 239 val_239 +239 val_239 239 val_239 +239 val_239 239 val_239 +24 val_24 24 val_24 +24 val_24 24 val_24 +24 val_24 24 val_24 +24 val_24 24 val_24 +241 val_241 241 val_241 +242 val_242 242 val_242 +242 val_242 242 val_242 +242 val_242 242 val_242 +242 val_242 242 val_242 +244 val_244 244 val_244 +247 val_247 247 val_247 +248 val_248 248 val_248 +249 val_249 249 val_249 +252 val_252 252 val_252 +255 val_255 255 val_255 +255 val_255 255 val_255 +255 val_255 255 val_255 +255 val_255 255 val_255 +256 val_256 256 val_256 +256 val_256 256 val_256 +256 val_256 256 val_256 +256 val_256 256 val_256 +257 val_257 257 val_257 +258 val_258 258 val_258 +26 val_26 26 val_26 +26 val_26 26 val_26 +26 val_26 26 val_26 +26 val_26 26 val_26 +260 val_260 260 val_260 +262 val_262 262 val_262 +263 val_263 263 val_263 +265 val_265 265 val_265 +265 val_265 265 val_265 +265 val_265 265 val_265 +265 val_265 265 val_265 +266 val_266 266 val_266 +27 val_27 27 val_27 +272 val_272 272 val_272 +272 val_272 272 val_272 +272 val_272 272 val_272 +272 val_272 272 val_272 +273 val_273 273 val_273 +273 val_273 273 val_273 +273 val_273 273 val_273 +273 val_273 273 val_273 +273 val_273 273 val_273 +273 val_273 273 val_273 +273 val_273 273 val_273 +273 val_273 273 val_273 +273 val_273 273 val_273 +274 val_274 274 val_274 +275 val_275 275 val_275 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +277 val_277 277 val_277 +278 val_278 278 val_278 +278 val_278 278 val_278 +278 val_278 278 val_278 +278 val_278 278 val_278 +28 val_28 28 val_28 +280 val_280 280 val_280 +280 val_280 280 val_280 +280 val_280 280 val_280 +280 val_280 280 val_280 +281 val_281 281 val_281 +281 val_281 281 val_281 +281 val_281 281 val_281 +281 val_281 281 val_281 +282 val_282 282 val_282 +282 val_282 282 val_282 +282 val_282 282 val_282 +282 val_282 282 val_282 +283 val_283 283 val_283 +284 val_284 284 val_284 +285 val_285 285 val_285 +286 val_286 286 val_286 +287 val_287 287 val_287 +288 val_288 288 val_288 +288 val_288 288 val_288 +288 val_288 288 val_288 +288 val_288 288 val_288 +289 val_289 289 val_289 +291 val_291 291 val_291 +292 val_292 292 val_292 +296 val_296 296 val_296 +298 val_298 298 val_298 +298 val_298 298 val_298 +298 val_298 298 val_298 +298 val_298 298 val_298 +298 val_298 298 val_298 +298 val_298 298 val_298 +298 val_298 298 val_298 +298 val_298 298 val_298 +298 val_298 298 val_298 +30 val_30 30 val_30 +302 val_302 302 val_302 +305 val_305 305 val_305 +306 val_306 306 val_306 +307 val_307 307 val_307 +307 val_307 307 val_307 +307 val_307 307 val_307 +307 val_307 307 val_307 +308 val_308 308 val_308 +309 val_309 309 val_309 +309 val_309 309 val_309 +309 val_309 309 val_309 +309 val_309 309 val_309 +310 val_310 310 val_310 +311 val_311 311 val_311 +311 val_311 311 val_311 +311 val_311 311 val_311 +311 val_311 311 val_311 +311 val_311 311 val_311 +311 val_311 311 val_311 +311 val_311 311 val_311 +311 val_311 311 val_311 +311 val_311 311 val_311 +315 val_315 315 val_315 +316 val_316 316 val_316 +316 val_316 316 val_316 +316 val_316 316 val_316 +316 val_316 316 val_316 +316 val_316 316 val_316 +316 val_316 316 val_316 +316 val_316 316 val_316 +316 val_316 316 val_316 +316 val_316 316 val_316 +317 val_317 317 val_317 +317 val_317 317 val_317 +317 val_317 317 val_317 +317 val_317 317 val_317 +318 val_318 318 val_318 +318 val_318 318 val_318 +318 val_318 318 val_318 +318 val_318 318 val_318 +318 val_318 318 val_318 +318 val_318 318 val_318 +318 val_318 318 val_318 +318 val_318 318 val_318 +318 val_318 318 val_318 +321 val_321 321 val_321 +321 val_321 321 val_321 +321 val_321 321 val_321 +321 val_321 321 val_321 +322 val_322 322 val_322 +322 val_322 322 val_322 +322 val_322 322 val_322 +322 val_322 322 val_322 +323 val_323 323 val_323 +325 val_325 325 val_325 +325 val_325 325 val_325 +325 val_325 325 val_325 +325 val_325 325 val_325 +327 val_327 327 val_327 +327 val_327 327 val_327 +327 val_327 327 val_327 +327 val_327 327 val_327 +327 val_327 327 val_327 +327 val_327 327 val_327 +327 val_327 327 val_327 +327 val_327 327 val_327 +327 val_327 327 val_327 +33 val_33 33 val_33 +331 val_331 331 val_331 +331 val_331 331 val_331 +331 val_331 331 val_331 +331 val_331 331 val_331 +332 val_332 332 val_332 +333 val_333 333 val_333 +333 val_333 333 val_333 +333 val_333 333 val_333 +333 val_333 333 val_333 +335 val_335 335 val_335 +336 val_336 336 val_336 +338 val_338 338 val_338 +339 val_339 339 val_339 +34 val_34 34 val_34 +341 val_341 341 val_341 +342 val_342 342 val_342 +342 val_342 342 val_342 +342 val_342 342 val_342 +342 val_342 342 val_342 +344 val_344 344 val_344 +344 val_344 344 val_344 +344 val_344 344 val_344 +344 val_344 344 val_344 +345 val_345 345 val_345 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +348 val_348 348 val_348 +35 val_35 35 val_35 +35 val_35 35 val_35 +35 val_35 35 val_35 +35 val_35 35 val_35 +35 val_35 35 val_35 +35 val_35 35 val_35 +35 val_35 35 val_35 +35 val_35 35 val_35 +35 val_35 35 val_35 +351 val_351 351 val_351 +353 val_353 353 val_353 +353 val_353 353 val_353 +353 val_353 353 val_353 +353 val_353 353 val_353 +356 val_356 356 val_356 +360 val_360 360 val_360 +362 val_362 362 val_362 +364 val_364 364 val_364 +365 val_365 365 val_365 +366 val_366 366 val_366 +367 val_367 367 val_367 +367 val_367 367 val_367 +367 val_367 367 val_367 +367 val_367 367 val_367 +368 val_368 368 val_368 +369 val_369 369 val_369 +369 val_369 369 val_369 +369 val_369 369 val_369 +369 val_369 369 val_369 +369 val_369 369 val_369 +369 val_369 369 val_369 +369 val_369 369 val_369 +369 val_369 369 val_369 +369 val_369 369 val_369 +37 val_37 37 val_37 +37 val_37 37 val_37 +37 val_37 37 val_37 +37 val_37 37 val_37 +373 val_373 373 val_373 +374 val_374 374 val_374 +375 val_375 375 val_375 +377 val_377 377 val_377 +378 val_378 378 val_378 +379 val_379 379 val_379 +382 val_382 382 val_382 +382 val_382 382 val_382 +382 val_382 382 val_382 +382 val_382 382 val_382 +384 val_384 384 val_384 +384 val_384 384 val_384 +384 val_384 384 val_384 +384 val_384 384 val_384 +384 val_384 384 val_384 +384 val_384 384 val_384 +384 val_384 384 val_384 +384 val_384 384 val_384 +384 val_384 384 val_384 +386 val_386 386 val_386 +389 val_389 389 val_389 +392 val_392 392 val_392 +393 val_393 393 val_393 +394 val_394 394 val_394 +395 val_395 395 val_395 +395 val_395 395 val_395 +395 val_395 395 val_395 +395 val_395 395 val_395 +396 val_396 396 val_396 +396 val_396 396 val_396 +396 val_396 396 val_396 +396 val_396 396 val_396 +396 val_396 396 val_396 +396 val_396 396 val_396 +396 val_396 396 val_396 +396 val_396 396 val_396 +396 val_396 396 val_396 +397 val_397 397 val_397 +397 val_397 397 val_397 +397 val_397 397 val_397 +397 val_397 397 val_397 +399 val_399 399 val_399 +399 val_399 399 val_399 +399 val_399 399 val_399 +399 val_399 399 val_399 +4 val_4 4 val_4 +400 val_400 400 val_400 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +401 val_401 401 val_401 +402 val_402 402 val_402 +403 val_403 403 val_403 +403 val_403 403 val_403 +403 val_403 403 val_403 +403 val_403 403 val_403 +403 val_403 403 val_403 +403 val_403 403 val_403 +403 val_403 403 val_403 +403 val_403 403 val_403 +403 val_403 403 val_403 +404 val_404 404 val_404 +404 val_404 404 val_404 +404 val_404 404 val_404 +404 val_404 404 val_404 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +406 val_406 406 val_406 +407 val_407 407 val_407 +409 val_409 409 val_409 +409 val_409 409 val_409 +409 val_409 409 val_409 +409 val_409 409 val_409 +409 val_409 409 val_409 +409 val_409 409 val_409 +409 val_409 409 val_409 +409 val_409 409 val_409 +409 val_409 409 val_409 +41 val_41 41 val_41 +411 val_411 411 val_411 +413 val_413 413 val_413 +413 val_413 413 val_413 +413 val_413 413 val_413 +413 val_413 413 val_413 +414 val_414 414 val_414 +414 val_414 414 val_414 +414 val_414 414 val_414 +414 val_414 414 val_414 +417 val_417 417 val_417 +417 val_417 417 val_417 +417 val_417 417 val_417 +417 val_417 417 val_417 +417 val_417 417 val_417 +417 val_417 417 val_417 +417 val_417 417 val_417 +417 val_417 417 val_417 +417 val_417 417 val_417 +418 val_418 418 val_418 +419 val_419 419 val_419 +42 val_42 42 val_42 +42 val_42 42 val_42 +42 val_42 42 val_42 +42 val_42 42 val_42 +421 val_421 421 val_421 +424 val_424 424 val_424 +424 val_424 424 val_424 +424 val_424 424 val_424 +424 val_424 424 val_424 +427 val_427 427 val_427 +429 val_429 429 val_429 +429 val_429 429 val_429 +429 val_429 429 val_429 +429 val_429 429 val_429 +43 val_43 43 val_43 +430 val_430 430 val_430 +430 val_430 430 val_430 +430 val_430 430 val_430 +430 val_430 430 val_430 +430 val_430 430 val_430 +430 val_430 430 val_430 +430 val_430 430 val_430 +430 val_430 430 val_430 +430 val_430 430 val_430 +431 val_431 431 val_431 +431 val_431 431 val_431 +431 val_431 431 val_431 +431 val_431 431 val_431 +431 val_431 431 val_431 +431 val_431 431 val_431 +431 val_431 431 val_431 +431 val_431 431 val_431 +431 val_431 431 val_431 +432 val_432 432 val_432 +435 val_435 435 val_435 +436 val_436 436 val_436 +437 val_437 437 val_437 +438 val_438 438 val_438 +438 val_438 438 val_438 +438 val_438 438 val_438 +438 val_438 438 val_438 +438 val_438 438 val_438 +438 val_438 438 val_438 +438 val_438 438 val_438 +438 val_438 438 val_438 +438 val_438 438 val_438 +439 val_439 439 val_439 +439 val_439 439 val_439 +439 val_439 439 val_439 +439 val_439 439 val_439 +44 val_44 44 val_44 +443 val_443 443 val_443 +444 val_444 444 val_444 +446 val_446 446 val_446 +448 val_448 448 val_448 +449 val_449 449 val_449 +452 val_452 452 val_452 +453 val_453 453 val_453 +454 val_454 454 val_454 +454 val_454 454 val_454 +454 val_454 454 val_454 +454 val_454 454 val_454 +454 val_454 454 val_454 +454 val_454 454 val_454 +454 val_454 454 val_454 +454 val_454 454 val_454 +454 val_454 454 val_454 +455 val_455 455 val_455 +457 val_457 457 val_457 +458 val_458 458 val_458 +458 val_458 458 val_458 +458 val_458 458 val_458 +458 val_458 458 val_458 +459 val_459 459 val_459 +459 val_459 459 val_459 +459 val_459 459 val_459 +459 val_459 459 val_459 +460 val_460 460 val_460 +462 val_462 462 val_462 +462 val_462 462 val_462 +462 val_462 462 val_462 +462 val_462 462 val_462 +463 val_463 463 val_463 +463 val_463 463 val_463 +463 val_463 463 val_463 +463 val_463 463 val_463 +466 val_466 466 val_466 +466 val_466 466 val_466 +466 val_466 466 val_466 +466 val_466 466 val_466 +466 val_466 466 val_466 +466 val_466 466 val_466 +466 val_466 466 val_466 +466 val_466 466 val_466 +466 val_466 466 val_466 +467 val_467 467 val_467 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +468 val_468 468 val_468 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +469 val_469 469 val_469 +47 val_47 47 val_47 +470 val_470 470 val_470 +472 val_472 472 val_472 +475 val_475 475 val_475 +477 val_477 477 val_477 +478 val_478 478 val_478 +478 val_478 478 val_478 +478 val_478 478 val_478 +478 val_478 478 val_478 +479 val_479 479 val_479 +480 val_480 480 val_480 +480 val_480 480 val_480 +480 val_480 480 val_480 +480 val_480 480 val_480 +480 val_480 480 val_480 +480 val_480 480 val_480 +480 val_480 480 val_480 +480 val_480 480 val_480 +480 val_480 480 val_480 +481 val_481 481 val_481 +482 val_482 482 val_482 +483 val_483 483 val_483 +484 val_484 484 val_484 +485 val_485 485 val_485 +487 val_487 487 val_487 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +489 val_489 489 val_489 +490 val_490 490 val_490 +491 val_491 491 val_491 +492 val_492 492 val_492 +492 val_492 492 val_492 +492 val_492 492 val_492 +492 val_492 492 val_492 +493 val_493 493 val_493 +494 val_494 494 val_494 +495 val_495 495 val_495 +496 val_496 496 val_496 +497 val_497 497 val_497 +498 val_498 498 val_498 +498 val_498 498 val_498 +498 val_498 498 val_498 +498 val_498 498 val_498 +498 val_498 498 val_498 +498 val_498 498 val_498 +498 val_498 498 val_498 +498 val_498 498 val_498 +498 val_498 498 val_498 +5 val_5 5 val_5 +5 val_5 5 val_5 +5 val_5 5 val_5 +5 val_5 5 val_5 +5 val_5 5 val_5 +5 val_5 5 val_5 +5 val_5 5 val_5 +5 val_5 5 val_5 +5 val_5 5 val_5 +51 val_51 51 val_51 +51 val_51 51 val_51 +51 val_51 51 val_51 +51 val_51 51 val_51 +53 val_53 53 val_53 +54 val_54 54 val_54 +57 val_57 57 val_57 +58 val_58 58 val_58 +58 val_58 58 val_58 +58 val_58 58 val_58 +58 val_58 58 val_58 +64 val_64 64 val_64 +65 val_65 65 val_65 +66 val_66 66 val_66 +67 val_67 67 val_67 +67 val_67 67 val_67 +67 val_67 67 val_67 +67 val_67 67 val_67 +69 val_69 69 val_69 +70 val_70 70 val_70 +70 val_70 70 val_70 +70 val_70 70 val_70 +70 val_70 70 val_70 +70 val_70 70 val_70 +70 val_70 70 val_70 +70 val_70 70 val_70 +70 val_70 70 val_70 +70 val_70 70 val_70 +72 val_72 72 val_72 +72 val_72 72 val_72 +72 val_72 72 val_72 +72 val_72 72 val_72 +74 val_74 74 val_74 +76 val_76 76 val_76 +76 val_76 76 val_76 +76 val_76 76 val_76 +76 val_76 76 val_76 +77 val_77 77 val_77 +78 val_78 78 val_78 +8 val_8 8 val_8 +80 val_80 80 val_80 +82 val_82 82 val_82 +83 val_83 83 val_83 +83 val_83 83 val_83 +83 val_83 83 val_83 +83 val_83 83 val_83 +84 val_84 84 val_84 +84 val_84 84 val_84 +84 val_84 84 val_84 +84 val_84 84 val_84 +85 val_85 85 val_85 +86 val_86 86 val_86 +87 val_87 87 val_87 +9 val_9 9 val_9 +90 val_90 90 val_90 +90 val_90 90 val_90 +90 val_90 90 val_90 +90 val_90 90 val_90 +90 val_90 90 val_90 +90 val_90 90 val_90 +90 val_90 90 val_90 +90 val_90 90 val_90 +90 val_90 90 val_90 +92 val_92 92 val_92 +95 val_95 95 val_95 +95 val_95 95 val_95 +95 val_95 95 val_95 +95 val_95 95 val_95 +96 val_96 96 val_96 +97 val_97 97 val_97 +97 val_97 97 val_97 +97 val_97 97 val_97 +97 val_97 97 val_97 +98 val_98 98 val_98 +98 val_98 98 val_98 +98 val_98 98 val_98 +98 val_98 98 val_98 Index: src/contrib/hive/ql/src/test/results/clientpositive/input_part1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input_part1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input_part1.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (((key < 100) and (ds = '2008-04-08')) and (hr = '12')) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -25,7 +25,7 @@ expr: ds type: string File Output Operator - directory: /tmp/hive-njain/13222008/293248514.10000.insclause-0 + directory: /tmp/hive-njain/482622140/1009180747.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -34,7 +34,6 @@ serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 columns key,value,hr,ds - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -72,7 +71,7 @@ Move Operator tables: replace: - source: /tmp/hive-njain/13222008/293248514.10000.insclause-0 + source: /tmp/hive-njain/482622140/1009180747.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -81,7 +80,6 @@ serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 columns key,value,hr,ds - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/input18.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input18.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/input18.q.out (revision 0) @@ -0,0 +1,154 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_COLREF src value) (+ 1 2) (+ 3 4)) '/bin/cat'))) (TOK_CLUSTERBY key))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap key)) (TOK_SELEXPR (TOK_FUNCTION regexp_replace (TOK_COLREF tmap value) '\t' '+'))) (TOK_WHERE (< (TOK_COLREF tmap key) 100)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + tmap:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: (1 + 2) + type: int + expr: (3 + 4) + type: int + Transform Operator + command: /bin/cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + Reduce Output Operator + key expressions: + expr: key + type: string + Map-reduce partition columns: + expr: key + type: string + tag: -1 + value expressions: + expr: key + type: string + expr: value + type: string + Reduce Operator Tree: + Extract + Filter Operator + predicate: + expr: (0 < 100) + type: boolean + Select Operator + expressions: + expr: 0 + type: string + expr: regexp_replace(1, ' ', '+') + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +0 val_0+3+7 +0 val_0+3+7 +0 val_0+3+7 +10 val_10+3+7 +11 val_11+3+7 +12 val_12+3+7 +12 val_12+3+7 +15 val_15+3+7 +15 val_15+3+7 +17 val_17+3+7 +18 val_18+3+7 +18 val_18+3+7 +19 val_19+3+7 +2 val_2+3+7 +20 val_20+3+7 +24 val_24+3+7 +24 val_24+3+7 +26 val_26+3+7 +26 val_26+3+7 +27 val_27+3+7 +28 val_28+3+7 +30 val_30+3+7 +33 val_33+3+7 +34 val_34+3+7 +35 val_35+3+7 +35 val_35+3+7 +35 val_35+3+7 +37 val_37+3+7 +37 val_37+3+7 +4 val_4+3+7 +41 val_41+3+7 +42 val_42+3+7 +42 val_42+3+7 +43 val_43+3+7 +44 val_44+3+7 +47 val_47+3+7 +5 val_5+3+7 +5 val_5+3+7 +5 val_5+3+7 +51 val_51+3+7 +51 val_51+3+7 +53 val_53+3+7 +54 val_54+3+7 +57 val_57+3+7 +58 val_58+3+7 +58 val_58+3+7 +64 val_64+3+7 +65 val_65+3+7 +66 val_66+3+7 +67 val_67+3+7 +67 val_67+3+7 +69 val_69+3+7 +70 val_70+3+7 +70 val_70+3+7 +70 val_70+3+7 +72 val_72+3+7 +72 val_72+3+7 +74 val_74+3+7 +76 val_76+3+7 +76 val_76+3+7 +77 val_77+3+7 +78 val_78+3+7 +8 val_8+3+7 +80 val_80+3+7 +82 val_82+3+7 +83 val_83+3+7 +83 val_83+3+7 +84 val_84+3+7 +84 val_84+3+7 +85 val_85+3+7 +86 val_86+3+7 +87 val_87+3+7 +9 val_9+3+7 +90 val_90+3+7 +90 val_90+3+7 +90 val_90+3+7 +92 val_92+3+7 +95 val_95+3+7 +95 val_95+3+7 +96 val_96+3+7 +97 val_97+3+7 +97 val_97+3+7 +98 val_98+3+7 +98 val_98+3+7 Index: src/contrib/hive/ql/src/test/results/clientpositive/input_part3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input_part3.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/input_part3.q.out (revision 0) @@ -0,0 +1,512 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (and (= (TOK_COLREF x ds) '2008-04-08') (= (TOK_COLREF x hr) 11))))) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + + +238 val_238 2008-04-08 11 +86 val_86 2008-04-08 11 +311 val_311 2008-04-08 11 +27 val_27 2008-04-08 11 +165 val_165 2008-04-08 11 +409 val_409 2008-04-08 11 +255 val_255 2008-04-08 11 +278 val_278 2008-04-08 11 +98 val_98 2008-04-08 11 +484 val_484 2008-04-08 11 +265 val_265 2008-04-08 11 +193 val_193 2008-04-08 11 +401 val_401 2008-04-08 11 +150 val_150 2008-04-08 11 +273 val_273 2008-04-08 11 +224 val_224 2008-04-08 11 +369 val_369 2008-04-08 11 +66 val_66 2008-04-08 11 +128 val_128 2008-04-08 11 +213 val_213 2008-04-08 11 +146 val_146 2008-04-08 11 +406 val_406 2008-04-08 11 +429 val_429 2008-04-08 11 +374 val_374 2008-04-08 11 +152 val_152 2008-04-08 11 +469 val_469 2008-04-08 11 +145 val_145 2008-04-08 11 +495 val_495 2008-04-08 11 +37 val_37 2008-04-08 11 +327 val_327 2008-04-08 11 +281 val_281 2008-04-08 11 +277 val_277 2008-04-08 11 +209 val_209 2008-04-08 11 +15 val_15 2008-04-08 11 +82 val_82 2008-04-08 11 +403 val_403 2008-04-08 11 +166 val_166 2008-04-08 11 +417 val_417 2008-04-08 11 +430 val_430 2008-04-08 11 +252 val_252 2008-04-08 11 +292 val_292 2008-04-08 11 +219 val_219 2008-04-08 11 +287 val_287 2008-04-08 11 +153 val_153 2008-04-08 11 +193 val_193 2008-04-08 11 +338 val_338 2008-04-08 11 +446 val_446 2008-04-08 11 +459 val_459 2008-04-08 11 +394 val_394 2008-04-08 11 +237 val_237 2008-04-08 11 +482 val_482 2008-04-08 11 +174 val_174 2008-04-08 11 +413 val_413 2008-04-08 11 +494 val_494 2008-04-08 11 +207 val_207 2008-04-08 11 +199 val_199 2008-04-08 11 +466 val_466 2008-04-08 11 +208 val_208 2008-04-08 11 +174 val_174 2008-04-08 11 +399 val_399 2008-04-08 11 +396 val_396 2008-04-08 11 +247 val_247 2008-04-08 11 +417 val_417 2008-04-08 11 +489 val_489 2008-04-08 11 +162 val_162 2008-04-08 11 +377 val_377 2008-04-08 11 +397 val_397 2008-04-08 11 +309 val_309 2008-04-08 11 +365 val_365 2008-04-08 11 +266 val_266 2008-04-08 11 +439 val_439 2008-04-08 11 +342 val_342 2008-04-08 11 +367 val_367 2008-04-08 11 +325 val_325 2008-04-08 11 +167 val_167 2008-04-08 11 +195 val_195 2008-04-08 11 +475 val_475 2008-04-08 11 +17 val_17 2008-04-08 11 +113 val_113 2008-04-08 11 +155 val_155 2008-04-08 11 +203 val_203 2008-04-08 11 +339 val_339 2008-04-08 11 +0 val_0 2008-04-08 11 +455 val_455 2008-04-08 11 +128 val_128 2008-04-08 11 +311 val_311 2008-04-08 11 +316 val_316 2008-04-08 11 +57 val_57 2008-04-08 11 +302 val_302 2008-04-08 11 +205 val_205 2008-04-08 11 +149 val_149 2008-04-08 11 +438 val_438 2008-04-08 11 +345 val_345 2008-04-08 11 +129 val_129 2008-04-08 11 +170 val_170 2008-04-08 11 +20 val_20 2008-04-08 11 +489 val_489 2008-04-08 11 +157 val_157 2008-04-08 11 +378 val_378 2008-04-08 11 +221 val_221 2008-04-08 11 +92 val_92 2008-04-08 11 +111 val_111 2008-04-08 11 +47 val_47 2008-04-08 11 +72 val_72 2008-04-08 11 +4 val_4 2008-04-08 11 +280 val_280 2008-04-08 11 +35 val_35 2008-04-08 11 +427 val_427 2008-04-08 11 +277 val_277 2008-04-08 11 +208 val_208 2008-04-08 11 +356 val_356 2008-04-08 11 +399 val_399 2008-04-08 11 +169 val_169 2008-04-08 11 +382 val_382 2008-04-08 11 +498 val_498 2008-04-08 11 +125 val_125 2008-04-08 11 +386 val_386 2008-04-08 11 +437 val_437 2008-04-08 11 +469 val_469 2008-04-08 11 +192 val_192 2008-04-08 11 +286 val_286 2008-04-08 11 +187 val_187 2008-04-08 11 +176 val_176 2008-04-08 11 +54 val_54 2008-04-08 11 +459 val_459 2008-04-08 11 +51 val_51 2008-04-08 11 +138 val_138 2008-04-08 11 +103 val_103 2008-04-08 11 +239 val_239 2008-04-08 11 +213 val_213 2008-04-08 11 +216 val_216 2008-04-08 11 +430 val_430 2008-04-08 11 +278 val_278 2008-04-08 11 +176 val_176 2008-04-08 11 +289 val_289 2008-04-08 11 +221 val_221 2008-04-08 11 +65 val_65 2008-04-08 11 +318 val_318 2008-04-08 11 +332 val_332 2008-04-08 11 +311 val_311 2008-04-08 11 +275 val_275 2008-04-08 11 +137 val_137 2008-04-08 11 +241 val_241 2008-04-08 11 +83 val_83 2008-04-08 11 +333 val_333 2008-04-08 11 +180 val_180 2008-04-08 11 +284 val_284 2008-04-08 11 +12 val_12 2008-04-08 11 +230 val_230 2008-04-08 11 +181 val_181 2008-04-08 11 +67 val_67 2008-04-08 11 +260 val_260 2008-04-08 11 +404 val_404 2008-04-08 11 +384 val_384 2008-04-08 11 +489 val_489 2008-04-08 11 +353 val_353 2008-04-08 11 +373 val_373 2008-04-08 11 +272 val_272 2008-04-08 11 +138 val_138 2008-04-08 11 +217 val_217 2008-04-08 11 +84 val_84 2008-04-08 11 +348 val_348 2008-04-08 11 +466 val_466 2008-04-08 11 +58 val_58 2008-04-08 11 +8 val_8 2008-04-08 11 +411 val_411 2008-04-08 11 +230 val_230 2008-04-08 11 +208 val_208 2008-04-08 11 +348 val_348 2008-04-08 11 +24 val_24 2008-04-08 11 +463 val_463 2008-04-08 11 +431 val_431 2008-04-08 11 +179 val_179 2008-04-08 11 +172 val_172 2008-04-08 11 +42 val_42 2008-04-08 11 +129 val_129 2008-04-08 11 +158 val_158 2008-04-08 11 +119 val_119 2008-04-08 11 +496 val_496 2008-04-08 11 +0 val_0 2008-04-08 11 +322 val_322 2008-04-08 11 +197 val_197 2008-04-08 11 +468 val_468 2008-04-08 11 +393 val_393 2008-04-08 11 +454 val_454 2008-04-08 11 +100 val_100 2008-04-08 11 +298 val_298 2008-04-08 11 +199 val_199 2008-04-08 11 +191 val_191 2008-04-08 11 +418 val_418 2008-04-08 11 +96 val_96 2008-04-08 11 +26 val_26 2008-04-08 11 +165 val_165 2008-04-08 11 +327 val_327 2008-04-08 11 +230 val_230 2008-04-08 11 +205 val_205 2008-04-08 11 +120 val_120 2008-04-08 11 +131 val_131 2008-04-08 11 +51 val_51 2008-04-08 11 +404 val_404 2008-04-08 11 +43 val_43 2008-04-08 11 +436 val_436 2008-04-08 11 +156 val_156 2008-04-08 11 +469 val_469 2008-04-08 11 +468 val_468 2008-04-08 11 +308 val_308 2008-04-08 11 +95 val_95 2008-04-08 11 +196 val_196 2008-04-08 11 +288 val_288 2008-04-08 11 +481 val_481 2008-04-08 11 +457 val_457 2008-04-08 11 +98 val_98 2008-04-08 11 +282 val_282 2008-04-08 11 +197 val_197 2008-04-08 11 +187 val_187 2008-04-08 11 +318 val_318 2008-04-08 11 +318 val_318 2008-04-08 11 +409 val_409 2008-04-08 11 +470 val_470 2008-04-08 11 +137 val_137 2008-04-08 11 +369 val_369 2008-04-08 11 +316 val_316 2008-04-08 11 +169 val_169 2008-04-08 11 +413 val_413 2008-04-08 11 +85 val_85 2008-04-08 11 +77 val_77 2008-04-08 11 +0 val_0 2008-04-08 11 +490 val_490 2008-04-08 11 +87 val_87 2008-04-08 11 +364 val_364 2008-04-08 11 +179 val_179 2008-04-08 11 +118 val_118 2008-04-08 11 +134 val_134 2008-04-08 11 +395 val_395 2008-04-08 11 +282 val_282 2008-04-08 11 +138 val_138 2008-04-08 11 +238 val_238 2008-04-08 11 +419 val_419 2008-04-08 11 +15 val_15 2008-04-08 11 +118 val_118 2008-04-08 11 +72 val_72 2008-04-08 11 +90 val_90 2008-04-08 11 +307 val_307 2008-04-08 11 +19 val_19 2008-04-08 11 +435 val_435 2008-04-08 11 +10 val_10 2008-04-08 11 +277 val_277 2008-04-08 11 +273 val_273 2008-04-08 11 +306 val_306 2008-04-08 11 +224 val_224 2008-04-08 11 +309 val_309 2008-04-08 11 +389 val_389 2008-04-08 11 +327 val_327 2008-04-08 11 +242 val_242 2008-04-08 11 +369 val_369 2008-04-08 11 +392 val_392 2008-04-08 11 +272 val_272 2008-04-08 11 +331 val_331 2008-04-08 11 +401 val_401 2008-04-08 11 +242 val_242 2008-04-08 11 +452 val_452 2008-04-08 11 +177 val_177 2008-04-08 11 +226 val_226 2008-04-08 11 +5 val_5 2008-04-08 11 +497 val_497 2008-04-08 11 +402 val_402 2008-04-08 11 +396 val_396 2008-04-08 11 +317 val_317 2008-04-08 11 +395 val_395 2008-04-08 11 +58 val_58 2008-04-08 11 +35 val_35 2008-04-08 11 +336 val_336 2008-04-08 11 +95 val_95 2008-04-08 11 +11 val_11 2008-04-08 11 +168 val_168 2008-04-08 11 +34 val_34 2008-04-08 11 +229 val_229 2008-04-08 11 +233 val_233 2008-04-08 11 +143 val_143 2008-04-08 11 +472 val_472 2008-04-08 11 +322 val_322 2008-04-08 11 +498 val_498 2008-04-08 11 +160 val_160 2008-04-08 11 +195 val_195 2008-04-08 11 +42 val_42 2008-04-08 11 +321 val_321 2008-04-08 11 +430 val_430 2008-04-08 11 +119 val_119 2008-04-08 11 +489 val_489 2008-04-08 11 +458 val_458 2008-04-08 11 +78 val_78 2008-04-08 11 +76 val_76 2008-04-08 11 +41 val_41 2008-04-08 11 +223 val_223 2008-04-08 11 +492 val_492 2008-04-08 11 +149 val_149 2008-04-08 11 +449 val_449 2008-04-08 11 +218 val_218 2008-04-08 11 +228 val_228 2008-04-08 11 +138 val_138 2008-04-08 11 +453 val_453 2008-04-08 11 +30 val_30 2008-04-08 11 +209 val_209 2008-04-08 11 +64 val_64 2008-04-08 11 +468 val_468 2008-04-08 11 +76 val_76 2008-04-08 11 +74 val_74 2008-04-08 11 +342 val_342 2008-04-08 11 +69 val_69 2008-04-08 11 +230 val_230 2008-04-08 11 +33 val_33 2008-04-08 11 +368 val_368 2008-04-08 11 +103 val_103 2008-04-08 11 +296 val_296 2008-04-08 11 +113 val_113 2008-04-08 11 +216 val_216 2008-04-08 11 +367 val_367 2008-04-08 11 +344 val_344 2008-04-08 11 +167 val_167 2008-04-08 11 +274 val_274 2008-04-08 11 +219 val_219 2008-04-08 11 +239 val_239 2008-04-08 11 +485 val_485 2008-04-08 11 +116 val_116 2008-04-08 11 +223 val_223 2008-04-08 11 +256 val_256 2008-04-08 11 +263 val_263 2008-04-08 11 +70 val_70 2008-04-08 11 +487 val_487 2008-04-08 11 +480 val_480 2008-04-08 11 +401 val_401 2008-04-08 11 +288 val_288 2008-04-08 11 +191 val_191 2008-04-08 11 +5 val_5 2008-04-08 11 +244 val_244 2008-04-08 11 +438 val_438 2008-04-08 11 +128 val_128 2008-04-08 11 +467 val_467 2008-04-08 11 +432 val_432 2008-04-08 11 +202 val_202 2008-04-08 11 +316 val_316 2008-04-08 11 +229 val_229 2008-04-08 11 +469 val_469 2008-04-08 11 +463 val_463 2008-04-08 11 +280 val_280 2008-04-08 11 +2 val_2 2008-04-08 11 +35 val_35 2008-04-08 11 +283 val_283 2008-04-08 11 +331 val_331 2008-04-08 11 +235 val_235 2008-04-08 11 +80 val_80 2008-04-08 11 +44 val_44 2008-04-08 11 +193 val_193 2008-04-08 11 +321 val_321 2008-04-08 11 +335 val_335 2008-04-08 11 +104 val_104 2008-04-08 11 +466 val_466 2008-04-08 11 +366 val_366 2008-04-08 11 +175 val_175 2008-04-08 11 +403 val_403 2008-04-08 11 +483 val_483 2008-04-08 11 +53 val_53 2008-04-08 11 +105 val_105 2008-04-08 11 +257 val_257 2008-04-08 11 +406 val_406 2008-04-08 11 +409 val_409 2008-04-08 11 +190 val_190 2008-04-08 11 +406 val_406 2008-04-08 11 +401 val_401 2008-04-08 11 +114 val_114 2008-04-08 11 +258 val_258 2008-04-08 11 +90 val_90 2008-04-08 11 +203 val_203 2008-04-08 11 +262 val_262 2008-04-08 11 +348 val_348 2008-04-08 11 +424 val_424 2008-04-08 11 +12 val_12 2008-04-08 11 +396 val_396 2008-04-08 11 +201 val_201 2008-04-08 11 +217 val_217 2008-04-08 11 +164 val_164 2008-04-08 11 +431 val_431 2008-04-08 11 +454 val_454 2008-04-08 11 +478 val_478 2008-04-08 11 +298 val_298 2008-04-08 11 +125 val_125 2008-04-08 11 +431 val_431 2008-04-08 11 +164 val_164 2008-04-08 11 +424 val_424 2008-04-08 11 +187 val_187 2008-04-08 11 +382 val_382 2008-04-08 11 +5 val_5 2008-04-08 11 +70 val_70 2008-04-08 11 +397 val_397 2008-04-08 11 +480 val_480 2008-04-08 11 +291 val_291 2008-04-08 11 +24 val_24 2008-04-08 11 +351 val_351 2008-04-08 11 +255 val_255 2008-04-08 11 +104 val_104 2008-04-08 11 +70 val_70 2008-04-08 11 +163 val_163 2008-04-08 11 +438 val_438 2008-04-08 11 +119 val_119 2008-04-08 11 +414 val_414 2008-04-08 11 +200 val_200 2008-04-08 11 +491 val_491 2008-04-08 11 +237 val_237 2008-04-08 11 +439 val_439 2008-04-08 11 +360 val_360 2008-04-08 11 +248 val_248 2008-04-08 11 +479 val_479 2008-04-08 11 +305 val_305 2008-04-08 11 +417 val_417 2008-04-08 11 +199 val_199 2008-04-08 11 +444 val_444 2008-04-08 11 +120 val_120 2008-04-08 11 +429 val_429 2008-04-08 11 +169 val_169 2008-04-08 11 +443 val_443 2008-04-08 11 +323 val_323 2008-04-08 11 +325 val_325 2008-04-08 11 +277 val_277 2008-04-08 11 +230 val_230 2008-04-08 11 +478 val_478 2008-04-08 11 +178 val_178 2008-04-08 11 +468 val_468 2008-04-08 11 +310 val_310 2008-04-08 11 +317 val_317 2008-04-08 11 +333 val_333 2008-04-08 11 +493 val_493 2008-04-08 11 +460 val_460 2008-04-08 11 +207 val_207 2008-04-08 11 +249 val_249 2008-04-08 11 +265 val_265 2008-04-08 11 +480 val_480 2008-04-08 11 +83 val_83 2008-04-08 11 +136 val_136 2008-04-08 11 +353 val_353 2008-04-08 11 +172 val_172 2008-04-08 11 +214 val_214 2008-04-08 11 +462 val_462 2008-04-08 11 +233 val_233 2008-04-08 11 +406 val_406 2008-04-08 11 +133 val_133 2008-04-08 11 +175 val_175 2008-04-08 11 +189 val_189 2008-04-08 11 +454 val_454 2008-04-08 11 +375 val_375 2008-04-08 11 +401 val_401 2008-04-08 11 +421 val_421 2008-04-08 11 +407 val_407 2008-04-08 11 +384 val_384 2008-04-08 11 +256 val_256 2008-04-08 11 +26 val_26 2008-04-08 11 +134 val_134 2008-04-08 11 +67 val_67 2008-04-08 11 +384 val_384 2008-04-08 11 +379 val_379 2008-04-08 11 +18 val_18 2008-04-08 11 +462 val_462 2008-04-08 11 +492 val_492 2008-04-08 11 +100 val_100 2008-04-08 11 +298 val_298 2008-04-08 11 +9 val_9 2008-04-08 11 +341 val_341 2008-04-08 11 +498 val_498 2008-04-08 11 +146 val_146 2008-04-08 11 +458 val_458 2008-04-08 11 +362 val_362 2008-04-08 11 +186 val_186 2008-04-08 11 +285 val_285 2008-04-08 11 +348 val_348 2008-04-08 11 +167 val_167 2008-04-08 11 +18 val_18 2008-04-08 11 +273 val_273 2008-04-08 11 +183 val_183 2008-04-08 11 +281 val_281 2008-04-08 11 +344 val_344 2008-04-08 11 +97 val_97 2008-04-08 11 +469 val_469 2008-04-08 11 +315 val_315 2008-04-08 11 +84 val_84 2008-04-08 11 +28 val_28 2008-04-08 11 +37 val_37 2008-04-08 11 +448 val_448 2008-04-08 11 +152 val_152 2008-04-08 11 +348 val_348 2008-04-08 11 +307 val_307 2008-04-08 11 +194 val_194 2008-04-08 11 +414 val_414 2008-04-08 11 +477 val_477 2008-04-08 11 +222 val_222 2008-04-08 11 +126 val_126 2008-04-08 11 +90 val_90 2008-04-08 11 +169 val_169 2008-04-08 11 +403 val_403 2008-04-08 11 +400 val_400 2008-04-08 11 +200 val_200 2008-04-08 11 +97 val_97 2008-04-08 11 Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out (working copy) @@ -17,37 +17,44 @@ type: string expr: substr(value, 4) type: string - # partition fields: 2147483647 + Map-reduce partition columns: + expr: substr(key, 0, 1) + type: string + expr: substr(value, 4) + type: string tag: -1 Reduce Operator Tree: Group By Operator expr: count(DISTINCT KEY.1) - expr: sum(KEY.1) + expr: sum(UDFToDouble(KEY.1)) keys: expr: KEY.0 type: string mode: partial1 File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-njain/307368091/808162418.10001 + /tmp/hive-njain/282197599/271187968.10001 Reduce Output Operator key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: -1 value expressions: expr: 1 - type: string + type: bigint expr: 2 - type: string + type: double Reduce Operator Tree: Group By Operator @@ -56,15 +63,15 @@ keys: expr: KEY.0 type: string - mode: partial2 + mode: unknown Select Operator expressions: expr: 0 type: string expr: 1 + type: bigint + expr: concat(0, UDFToString(2)) type: string - expr: concat(0, 2) - type: string File Output Operator table: input format: org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/input_part5.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input_part5.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/input_part5.q.out (revision 0) @@ -0,0 +1,204 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (and (= (TOK_COLREF x ds) '2008-04-08') (< (TOK_COLREF x key) 100))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + x + Filter Operator + predicate: + expr: ((ds = '2008-04-08') and (key < 100)) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +86 val_86 2008-04-08 11 +27 val_27 2008-04-08 11 +98 val_98 2008-04-08 11 +66 val_66 2008-04-08 11 +37 val_37 2008-04-08 11 +15 val_15 2008-04-08 11 +82 val_82 2008-04-08 11 +17 val_17 2008-04-08 11 +0 val_0 2008-04-08 11 +57 val_57 2008-04-08 11 +20 val_20 2008-04-08 11 +92 val_92 2008-04-08 11 +47 val_47 2008-04-08 11 +72 val_72 2008-04-08 11 +4 val_4 2008-04-08 11 +35 val_35 2008-04-08 11 +54 val_54 2008-04-08 11 +51 val_51 2008-04-08 11 +65 val_65 2008-04-08 11 +83 val_83 2008-04-08 11 +12 val_12 2008-04-08 11 +67 val_67 2008-04-08 11 +84 val_84 2008-04-08 11 +58 val_58 2008-04-08 11 +8 val_8 2008-04-08 11 +24 val_24 2008-04-08 11 +42 val_42 2008-04-08 11 +0 val_0 2008-04-08 11 +96 val_96 2008-04-08 11 +26 val_26 2008-04-08 11 +51 val_51 2008-04-08 11 +43 val_43 2008-04-08 11 +95 val_95 2008-04-08 11 +98 val_98 2008-04-08 11 +85 val_85 2008-04-08 11 +77 val_77 2008-04-08 11 +0 val_0 2008-04-08 11 +87 val_87 2008-04-08 11 +15 val_15 2008-04-08 11 +72 val_72 2008-04-08 11 +90 val_90 2008-04-08 11 +19 val_19 2008-04-08 11 +10 val_10 2008-04-08 11 +5 val_5 2008-04-08 11 +58 val_58 2008-04-08 11 +35 val_35 2008-04-08 11 +95 val_95 2008-04-08 11 +11 val_11 2008-04-08 11 +34 val_34 2008-04-08 11 +42 val_42 2008-04-08 11 +78 val_78 2008-04-08 11 +76 val_76 2008-04-08 11 +41 val_41 2008-04-08 11 +30 val_30 2008-04-08 11 +64 val_64 2008-04-08 11 +76 val_76 2008-04-08 11 +74 val_74 2008-04-08 11 +69 val_69 2008-04-08 11 +33 val_33 2008-04-08 11 +70 val_70 2008-04-08 11 +5 val_5 2008-04-08 11 +2 val_2 2008-04-08 11 +35 val_35 2008-04-08 11 +80 val_80 2008-04-08 11 +44 val_44 2008-04-08 11 +53 val_53 2008-04-08 11 +90 val_90 2008-04-08 11 +12 val_12 2008-04-08 11 +5 val_5 2008-04-08 11 +70 val_70 2008-04-08 11 +24 val_24 2008-04-08 11 +70 val_70 2008-04-08 11 +83 val_83 2008-04-08 11 +26 val_26 2008-04-08 11 +67 val_67 2008-04-08 11 +18 val_18 2008-04-08 11 +9 val_9 2008-04-08 11 +18 val_18 2008-04-08 11 +97 val_97 2008-04-08 11 +84 val_84 2008-04-08 11 +28 val_28 2008-04-08 11 +37 val_37 2008-04-08 11 +90 val_90 2008-04-08 11 +97 val_97 2008-04-08 11 +86 val_86 2008-04-08 12 +27 val_27 2008-04-08 12 +98 val_98 2008-04-08 12 +66 val_66 2008-04-08 12 +37 val_37 2008-04-08 12 +15 val_15 2008-04-08 12 +82 val_82 2008-04-08 12 +17 val_17 2008-04-08 12 +0 val_0 2008-04-08 12 +57 val_57 2008-04-08 12 +20 val_20 2008-04-08 12 +92 val_92 2008-04-08 12 +47 val_47 2008-04-08 12 +72 val_72 2008-04-08 12 +4 val_4 2008-04-08 12 +35 val_35 2008-04-08 12 +54 val_54 2008-04-08 12 +51 val_51 2008-04-08 12 +65 val_65 2008-04-08 12 +83 val_83 2008-04-08 12 +12 val_12 2008-04-08 12 +67 val_67 2008-04-08 12 +84 val_84 2008-04-08 12 +58 val_58 2008-04-08 12 +8 val_8 2008-04-08 12 +24 val_24 2008-04-08 12 +42 val_42 2008-04-08 12 +0 val_0 2008-04-08 12 +96 val_96 2008-04-08 12 +26 val_26 2008-04-08 12 +51 val_51 2008-04-08 12 +43 val_43 2008-04-08 12 +95 val_95 2008-04-08 12 +98 val_98 2008-04-08 12 +85 val_85 2008-04-08 12 +77 val_77 2008-04-08 12 +0 val_0 2008-04-08 12 +87 val_87 2008-04-08 12 +15 val_15 2008-04-08 12 +72 val_72 2008-04-08 12 +90 val_90 2008-04-08 12 +19 val_19 2008-04-08 12 +10 val_10 2008-04-08 12 +5 val_5 2008-04-08 12 +58 val_58 2008-04-08 12 +35 val_35 2008-04-08 12 +95 val_95 2008-04-08 12 +11 val_11 2008-04-08 12 +34 val_34 2008-04-08 12 +42 val_42 2008-04-08 12 +78 val_78 2008-04-08 12 +76 val_76 2008-04-08 12 +41 val_41 2008-04-08 12 +30 val_30 2008-04-08 12 +64 val_64 2008-04-08 12 +76 val_76 2008-04-08 12 +74 val_74 2008-04-08 12 +69 val_69 2008-04-08 12 +33 val_33 2008-04-08 12 +70 val_70 2008-04-08 12 +5 val_5 2008-04-08 12 +2 val_2 2008-04-08 12 +35 val_35 2008-04-08 12 +80 val_80 2008-04-08 12 +44 val_44 2008-04-08 12 +53 val_53 2008-04-08 12 +90 val_90 2008-04-08 12 +12 val_12 2008-04-08 12 +5 val_5 2008-04-08 12 +70 val_70 2008-04-08 12 +24 val_24 2008-04-08 12 +70 val_70 2008-04-08 12 +83 val_83 2008-04-08 12 +26 val_26 2008-04-08 12 +67 val_67 2008-04-08 12 +18 val_18 2008-04-08 12 +9 val_9 2008-04-08 12 +18 val_18 2008-04-08 12 +97 val_97 2008-04-08 12 +84 val_84 2008-04-08 12 +28 val_28 2008-04-08 12 +37 val_37 2008-04-08 12 +90 val_90 2008-04-08 12 +97 val_97 2008-04-08 12 Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out (working copy) @@ -11,12 +11,18 @@ Map Reduce Alias -> Map Operator Tree: src - Reduce Output Operator - key expressions: - expr: substr(key, 0, 1) + Select Operator + expressions: + expr: key type: string - # partition fields: -1 - tag: -1 + Reduce Output Operator + key expressions: + expr: substr(0, 0, 1) + type: string + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 Reduce Operator Tree: Group By Operator keys: @@ -25,25 +31,28 @@ mode: partial1 File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-njain/1561965178/525265780.10001 + /tmp/hive-njain/213545057/773995409.10001 Reduce Output Operator key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: -1 Reduce Operator Tree: Group By Operator keys: expr: KEY.0 type: string - mode: partial2 + mode: unknown Select Operator expressions: expr: 0 Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out (working copy) @@ -11,12 +11,18 @@ Map Reduce Alias -> Map Operator Tree: src - Reduce Output Operator - key expressions: - expr: substr(value, 4, 1) + Select Operator + expressions: + expr: value type: string - # partition fields: -1 - tag: -1 + Reduce Output Operator + key expressions: + expr: substr(0, 4, 1) + type: string + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 Reduce Operator Tree: Group By Operator keys: @@ -25,25 +31,28 @@ mode: partial1 File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-njain/256745338/35530060.10001 + /tmp/hive-njain/911936039/66288606.10001 Reduce Output Operator key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: -1 Reduce Operator Tree: Group By Operator keys: expr: KEY.0 type: string - mode: partial2 + mode: unknown Select Operator expressions: expr: 0 Index: src/contrib/hive/ql/src/test/results/clientpositive/input1_limit.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input1_limit.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input1_limit.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (key < 100) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -30,7 +30,7 @@ Filter Operator predicate: expr: (key < 100) - type: Boolean + type: boolean Select Operator expressions: expr: key Index: src/contrib/hive/ql/src/test/results/clientpositive/input2_limit.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/input2_limit.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/input2_limit.q.out (working copy) @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (key < 300) - type: Boolean + type: boolean Select Operator expressions: expr: key Index: src/contrib/hive/ql/src/test/results/clientpositive/case_sensitivity.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/case_sensitivity.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/case_sensitivity.q.out (working copy) @@ -10,22 +10,28 @@ Map Reduce Alias -> Map Operator Tree: src_thrift - Filter Operator - predicate: - expr: (lint[0] > 0) - type: Boolean - Select Operator - expressions: - expr: lint[1] - type: int - expr: lintstring[0].MYSTRING - type: string - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - name: dest1 + Select Operator + expressions: + expr: lint + type: array + expr: lintstring + type: array + Filter Operator + predicate: + expr: (0[0] > 0) + type: boolean + Select Operator + expressions: + expr: 0[1] + type: int + expr: 1[0].MYSTRING + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator Index: src/contrib/hive/ql/src/test/results/clientpositive/scriptfile1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/scriptfile1.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/scriptfile1.q.out (revision 0) @@ -0,0 +1,9 @@ +10 val_10 +100 val_100 +100 val_100 +103 val_103 +103 val_103 +104 val_104 +104 val_104 +105 val_105 +310 val_310 Index: src/contrib/hive/ql/src/test/results/clientpositive/sample1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/sample1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/sample1.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart (TOK_TABLESAMPLE 1 1) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_WHERE (and (= (TOK_COLREF s ds) '2008-04-08') (= (TOK_COLREF s hr) '11'))))) + (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart (TOK_TABLESAMPLE 1 1 (TOK_FUNCTION rand)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_WHERE (and (= (TOK_COLREF s ds) '2008-04-08') (= (TOK_COLREF s hr) '11'))))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -12,36 +12,39 @@ s Filter Operator predicate: - expr: ((ds = '2008-04-08') and (hr = '11')) - type: Boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - File Output Operator - directory: /tmp/hive-njain/1799446246/1449752317.10000.insclause-0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - properties: - name dest1 - serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} - serialization.format 1 - columns key,value,dt,hr - SORTBUCKETCOLSPREFIX TRUE - bucket_count -1 - serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - name: dest1 + expr: (((default_sample_hashfn(rand()) & 2147483647) % 1) = 0) + type: boolean + Filter Operator + predicate: + expr: ((ds = '2008-04-08') and (hr = '11')) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + File Output Operator + directory: /tmp/hive-njain/804625404/371878036.10000.insclause-0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name dest1 + serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} + serialization.format 1 + columns key,value,dt,hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Needs Tagging: Path -> Alias: file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 @@ -72,7 +75,7 @@ Move Operator tables: replace: - source: /tmp/hive-njain/1799446246/1449752317.10000.insclause-0 + source: /tmp/hive-njain/804625404/371878036.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -81,7 +84,6 @@ serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 columns key,value,dt,hr - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/inputddl2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/inputddl2.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/inputddl2.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_CREATETABLE INPUTDDL2 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEPARTCOLS (TOK_TABCOLLIST (TOK_TABCOL ds TOK_DATETIME) (TOK_TABCOL country TOK_STRING)))) + (TOK_CREATETABLE INPUTDDL2 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEPARTCOLS (TOK_TABCOLLIST (TOK_TABCOL ds TOK_DATETIME) (TOK_TABCOL country TOK_STRING))) TOK_TBLTEXTFILE) STAGE DEPENDENCIES: Stage-0 is a root stage Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby1_map.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby1_map.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby1_map.q.out (revision 0) @@ -0,0 +1,401 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4)))) (TOK_GROUPBY (TOK_COLREF src key)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Group By Operator + + expr: sum(UDFToDouble(substr(value, 4))) + keys: + expr: key + type: string + mode: hash + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: 1 + type: double + Reduce Operator Tree: + Group By Operator + + expr: sum(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: partial2 + File Output Operator + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/126291708/5299613.10001 + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: -1 + value expressions: + expr: 1 + type: double + Reduce Operator Tree: + Group By Operator + + expr: sum(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: unknown + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: double + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +0 0.0 +10 10.0 +100 200.0 +103 206.0 +104 208.0 +105 105.0 +11 11.0 +111 111.0 +113 226.0 +114 114.0 +116 116.0 +118 236.0 +119 357.0 +12 24.0 +120 240.0 +125 250.0 +126 126.0 +128 384.0 +129 258.0 +131 131.0 +133 133.0 +134 268.0 +136 136.0 +137 274.0 +138 552.0 +143 143.0 +145 145.0 +146 292.0 +149 298.0 +15 30.0 +150 150.0 +152 304.0 +153 153.0 +155 155.0 +156 156.0 +157 157.0 +158 158.0 +160 160.0 +162 162.0 +163 163.0 +164 328.0 +165 330.0 +166 166.0 +167 501.0 +168 168.0 +169 676.0 +17 17.0 +170 170.0 +172 344.0 +174 348.0 +175 350.0 +176 352.0 +177 177.0 +178 178.0 +179 358.0 +18 36.0 +180 180.0 +181 181.0 +183 183.0 +186 186.0 +187 561.0 +189 189.0 +19 19.0 +190 190.0 +191 382.0 +192 192.0 +193 579.0 +194 194.0 +195 390.0 +196 196.0 +197 394.0 +199 597.0 +2 2.0 +20 20.0 +200 400.0 +201 201.0 +202 202.0 +203 406.0 +205 410.0 +207 414.0 +208 624.0 +209 418.0 +213 426.0 +214 214.0 +216 432.0 +217 434.0 +218 218.0 +219 438.0 +221 442.0 +222 222.0 +223 446.0 +224 448.0 +226 226.0 +228 228.0 +229 458.0 +230 1150.0 +233 466.0 +235 235.0 +237 474.0 +238 476.0 +239 478.0 +24 48.0 +241 241.0 +242 484.0 +244 244.0 +247 247.0 +248 248.0 +249 249.0 +252 252.0 +255 510.0 +256 512.0 +257 257.0 +258 258.0 +26 52.0 +260 260.0 +262 262.0 +263 263.0 +265 530.0 +266 266.0 +27 27.0 +272 544.0 +273 819.0 +274 274.0 +275 275.0 +277 1108.0 +278 556.0 +28 28.0 +280 560.0 +281 562.0 +282 564.0 +283 283.0 +284 284.0 +285 285.0 +286 286.0 +287 287.0 +288 576.0 +289 289.0 +291 291.0 +292 292.0 +296 296.0 +298 894.0 +30 30.0 +302 302.0 +305 305.0 +306 306.0 +307 614.0 +308 308.0 +309 618.0 +310 310.0 +311 933.0 +315 315.0 +316 948.0 +317 634.0 +318 954.0 +321 642.0 +322 644.0 +323 323.0 +325 650.0 +327 981.0 +33 33.0 +331 662.0 +332 332.0 +333 666.0 +335 335.0 +336 336.0 +338 338.0 +339 339.0 +34 34.0 +341 341.0 +342 684.0 +344 688.0 +345 345.0 +348 1740.0 +35 105.0 +351 351.0 +353 706.0 +356 356.0 +360 360.0 +362 362.0 +364 364.0 +365 365.0 +366 366.0 +367 734.0 +368 368.0 +369 1107.0 +37 74.0 +373 373.0 +374 374.0 +375 375.0 +377 377.0 +378 378.0 +379 379.0 +382 764.0 +384 1152.0 +386 386.0 +389 389.0 +392 392.0 +393 393.0 +394 394.0 +395 790.0 +396 1188.0 +397 794.0 +399 798.0 +4 4.0 +400 400.0 +401 2005.0 +402 402.0 +403 1209.0 +404 808.0 +406 1624.0 +407 407.0 +409 1227.0 +41 41.0 +411 411.0 +413 826.0 +414 828.0 +417 1251.0 +418 418.0 +419 419.0 +42 84.0 +421 421.0 +424 848.0 +427 427.0 +429 858.0 +43 43.0 +430 1290.0 +431 1293.0 +432 432.0 +435 435.0 +436 436.0 +437 437.0 +438 1314.0 +439 878.0 +44 44.0 +443 443.0 +444 444.0 +446 446.0 +448 448.0 +449 449.0 +452 452.0 +453 453.0 +454 1362.0 +455 455.0 +457 457.0 +458 916.0 +459 918.0 +460 460.0 +462 924.0 +463 926.0 +466 1398.0 +467 467.0 +468 1872.0 +469 2345.0 +47 47.0 +470 470.0 +472 472.0 +475 475.0 +477 477.0 +478 956.0 +479 479.0 +480 1440.0 +481 481.0 +482 482.0 +483 483.0 +484 484.0 +485 485.0 +487 487.0 +489 1956.0 +490 490.0 +491 491.0 +492 984.0 +493 493.0 +494 494.0 +495 495.0 +496 496.0 +497 497.0 +498 1494.0 +5 15.0 +51 102.0 +53 53.0 +54 54.0 +57 57.0 +58 116.0 +64 64.0 +65 65.0 +66 66.0 +67 134.0 +69 69.0 +70 210.0 +72 144.0 +74 74.0 +76 152.0 +77 77.0 +78 78.0 +8 8.0 +80 80.0 +82 82.0 +83 166.0 +84 168.0 +85 85.0 +86 86.0 +87 87.0 +9 9.0 +90 270.0 +92 92.0 +95 190.0 +96 96.0 +97 194.0 +98 196.0 Index: src/contrib/hive/ql/src/test/results/clientpositive/sample5.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/sample5.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/sample5.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 5 key) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) + (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 5 (TOK_COLREF key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -13,7 +13,7 @@ Filter Operator predicate: expr: (((default_sample_hashfn(key) & 2147483647) % 5) = 0) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -21,7 +21,7 @@ expr: value type: string File Output Operator - directory: /tmp/hive-njain/535229973/587370170.10000.insclause-0 + directory: /tmp/hive-njain/126424010/37680496.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -30,7 +30,6 @@ serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -49,9 +48,10 @@ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name srcbucket + bucket_field_name key serialization.ddl struct srcbucket { string key, string value} - serialization.format 1 columns key,value + serialization.format 1 bucket_count 2 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -64,7 +64,7 @@ Move Operator tables: replace: - source: /tmp/hive-njain/535229973/587370170.10000.insclause-0 + source: /tmp/hive-njain/126424010/37680496.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -73,7 +73,6 @@ serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby1_limit.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby1_limit.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby1_limit.q.out (working copy) @@ -15,7 +15,9 @@ key expressions: expr: key type: string - # partition fields: -1 + Map-reduce partition columns: + expr: rand() + type: double tag: -1 value expressions: expr: substr(value, 4) @@ -23,29 +25,32 @@ Reduce Operator Tree: Group By Operator - expr: sum(VALUE.0) + expr: sum(UDFToDouble(VALUE.0)) keys: expr: KEY.0 type: string mode: partial1 File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-njain/7427260/341902671.10001 + /tmp/hive-njain/1162348581/1600132674.10001 Reduce Output Operator key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: -1 value expressions: expr: 1 - type: string + type: double Reduce Operator Tree: Group By Operator @@ -53,13 +58,13 @@ keys: expr: KEY.0 type: string - mode: partial2 + mode: unknown Select Operator expressions: expr: 0 type: string expr: 1 - type: string + type: double Limit File Output Operator table: Index: src/contrib/hive/ql/src/test/results/clientpositive/inputddl4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/inputddl4.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/inputddl4.q.out (working copy) @@ -17,4 +17,4 @@ ds datetime country string Detailed Table Information: -Table(tableName:inputddl4,dbName:default,owner:njain,createTime:1224285030,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:viewtime,type:datetime,comment:null), FieldSchema(name:userid,type:int,comment:null), FieldSchema(name:page_url,type:string,comment:null), FieldSchema(name:referrer_url,type:string,comment:null), FieldSchema(name:friends,type:array,comment:null), FieldSchema(name:properties,type:map,comment:null), FieldSchema(name:ip,type:string,comment:'IP Address of the User')],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/inputddl4,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:32,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[userid],sortCols:[Order(col:viewtime,order:1)],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null), FieldSchema(name:country,type:string,comment:null)],parameters:{comment='This is the page view table'}) +Table(tableName:inputddl4,dbName:default,owner:njain,createTime:1225993821,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:viewtime,type:datetime,comment:null), FieldSchema(name:userid,type:int,comment:null), FieldSchema(name:page_url,type:string,comment:null), FieldSchema(name:referrer_url,type:string,comment:null), FieldSchema(name:friends,type:array,comment:null), FieldSchema(name:properties,type:map,comment:null), FieldSchema(name:ip,type:string,comment:IP Address of the User)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/inputddl4,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:32,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[userid],sortCols:[Order(col:viewtime,order:1)],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null), FieldSchema(name:country,type:string,comment:null)],parameters:{comment=This is the page view table}) Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby2_limit.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby2_limit.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby2_limit.q.out (working copy) @@ -15,7 +15,9 @@ key expressions: expr: key type: string - # partition fields: -1 + Map-reduce partition columns: + expr: rand() + type: double tag: -1 value expressions: expr: substr(value, 4) @@ -23,29 +25,32 @@ Reduce Operator Tree: Group By Operator - expr: sum(VALUE.0) + expr: sum(UDFToDouble(VALUE.0)) keys: expr: KEY.0 type: string mode: partial1 File Output Operator table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-njain/606295988/175965730.10002 + /tmp/hive-njain/4007501/112626006.10002 Reduce Output Operator key expressions: expr: 0 type: string - # partition fields: 1 + Map-reduce partition columns: + expr: 0 + type: string tag: -1 value expressions: expr: 1 - type: string + type: double Reduce Operator Tree: Group By Operator @@ -53,13 +58,13 @@ keys: expr: KEY.0 type: string - mode: partial2 + mode: unknown Select Operator expressions: expr: 0 type: string expr: 1 - type: string + type: double Limit File Output Operator table: Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby3_map.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby3_map.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby3_map.q.out (revision 0) @@ -0,0 +1,119 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION avg (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTIONDI avg (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION max (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION min (TOK_FUNCTION substr (TOK_COLREF src value) 4)))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Select Operator + expressions: + expr: value + type: string + Group By Operator + + expr: avg(DISTINCT UDFToDouble(substr(0, 4))) + expr: sum(UDFToDouble(substr(0, 4))) + expr: avg(UDFToDouble(substr(0, 4))) + expr: min(UDFToDouble(substr(0, 4))) + expr: max(UDFToDouble(substr(0, 4))) + keys: + expr: substr(0, 4) + type: string + mode: hash + Reduce Output Operator + key expressions: + expr: 0 + type: string + Map-reduce partition columns: + expr: 0 + type: string + tag: -1 + value expressions: + expr: 1 + type: string + expr: 2 + type: double + expr: 3 + type: string + expr: 4 + type: double + expr: 5 + type: double + Reduce Operator Tree: + Group By Operator + + expr: avg(DISTINCT UDFToDouble(KEY.0)) + expr: sum(VALUE.1) + expr: avg(VALUE.2) + expr: min(VALUE.3) + expr: max(VALUE.4) + mode: partial2 + File Output Operator + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: binary_table + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/130240402/83496104.10001 + Reduce Output Operator + tag: -1 + value expressions: + expr: 0 + type: string + expr: 1 + type: double + expr: 2 + type: string + expr: 3 + type: double + expr: 4 + type: double + Reduce Operator Tree: + Group By Operator + + expr: avg(VALUE.0) + expr: sum(VALUE.1) + expr: avg(VALUE.2) + expr: min(VALUE.3) + expr: max(VALUE.4) + mode: unknown + Select Operator + expressions: + expr: 1 + type: double + expr: 2 + type: string + expr: 0 + type: string + expr: 4 + type: double + expr: 3 + type: double + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +130091.0 260.182 256.10355987055016 498.0 0.0 Index: src/contrib/hive/ql/src/test/results/clientpositive/sample7.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/sample7.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/sample7.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 4 key) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_WHERE (> (TOK_COLREF s key) 100)))) + (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 4 (TOK_COLREF key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_WHERE (> (TOK_COLREF s key) 100)))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -13,11 +13,11 @@ Filter Operator predicate: expr: (((default_sample_hashfn(key) & 2147483647) % 4) = 0) - type: Boolean + type: boolean Filter Operator predicate: expr: (key > 100) - type: Boolean + type: boolean Select Operator expressions: expr: key @@ -25,7 +25,7 @@ expr: value type: string File Output Operator - directory: /tmp/hive-njain/1265167991/144531187.10000.insclause-0 + directory: /tmp/hive-njain/682589217/559576387.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -34,7 +34,6 @@ serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -44,18 +43,19 @@ name: dest1 Needs Tagging: Path -> Alias: - file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt Path -> Partition: - file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt Partition input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name srcbucket + bucket_field_name key serialization.ddl struct srcbucket { string key, string value} - serialization.format 1 columns key,value + serialization.format 1 bucket_count 2 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -68,7 +68,7 @@ Move Operator tables: replace: - source: /tmp/hive-njain/1265167991/144531187.10000.insclause-0 + source: /tmp/hive-njain/682589217/559576387.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -77,7 +77,6 @@ serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value - SORTBUCKETCOLSPREFIX TRUE bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat @@ -181,113 +180,3 @@ 477 val_477 169 val_169 400 val_400 -271 val_272 -217 val_218 -455 val_456 -231 val_232 -448 val_449 -246 val_247 -440 val_441 -147 val_148 -356 val_357 -217 val_218 -110 val_111 -275 val_276 -293 val_294 -286 val_287 -408 val_409 -477 val_478 -455 val_456 -367 val_368 -488 val_489 -349 val_350 -161 val_162 -224 val_225 -206 val_207 -114 val_115 -239 val_240 -389 val_390 -114 val_115 -235 val_236 -165 val_166 -264 val_265 -129 val_130 -257 val_258 -323 val_324 -411 val_412 -338 val_339 -147 val_148 -473 val_474 -213 val_214 -426 val_427 -132 val_133 -121 val_122 -129 val_130 -246 val_247 -491 val_492 -352 val_353 -349 val_350 -480 val_481 -480 val_481 -183 val_184 -392 val_393 -275 val_276 -114 val_115 -161 val_162 -264 val_265 -484 val_485 -305 val_306 -367 val_368 -260 val_261 -349 val_350 -363 val_364 -118 val_119 -121 val_122 -239 val_240 -143 val_144 -341 val_342 -363 val_364 -293 val_294 -206 val_207 -381 val_382 -404 val_405 -206 val_207 -385 val_386 -260 val_261 -404 val_405 -451 val_452 -132 val_133 -356 val_357 -352 val_353 -330 val_331 -257 val_258 -374 val_375 -437 val_438 -334 val_335 -404 val_405 -268 val_269 -491 val_492 -392 val_393 -341 val_342 -118 val_119 -172 val_173 -136 val_137 -349 val_350 -462 val_463 -408 val_409 -378 val_379 -495 val_496 -385 val_386 -389 val_390 -125 val_126 -415 val_416 -228 val_229 -363 val_364 -459 val_460 -341 val_342 -440 val_441 -309 val_310 -161 val_162 -161 val_162 -118 val_119 -242 val_243 Index: src/contrib/hive/ql/src/test/results/clientpositive/inputddl6.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/inputddl6.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/clientpositive/inputddl6.q.out (working copy) @@ -2,7 +2,7 @@ value string ds datetime Detailed Table Information: -Table(tableName:inputddl6,dbName:default,owner:njain,createTime:1224285279,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/inputddl6,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null)],parameters:{SORTBUCKETCOLSPREFIX=TRUE}) +Table(tableName:inputddl6,dbName:default,owner:njain,createTime:1225994083,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/inputddl6,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null)],parameters:{}) key string value string ds datetime Index: src/contrib/hive/ql/src/test/results/clientpositive/groupby5_map.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/clientpositive/groupby5_map.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/clientpositive/groupby5_map.q.out (revision 0) @@ -0,0 +1,56 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_COLREF src key)))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Select Operator + expressions: + expr: key + type: string + Group By Operator + + expr: sum(UDFToDouble(0)) + mode: hash + Reduce Output Operator + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: 0 + type: double + Reduce Operator Tree: + Group By Operator + + expr: sum(VALUE.0) + mode: unknown + Select Operator + expressions: + expr: 0 + type: double + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +130091.0 Index: src/contrib/hive/ql/src/test/results/compiler/parse/cast1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/cast1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/cast1.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (+ 3 2)) (TOK_SELEXPR (+ 3.0 2)) (TOK_SELEXPR (+ 3 2.0)) (TOK_SELEXPR (+ 3.0 2.0)) (TOK_SELEXPR (+ 3 (TOK_FUNCTION TOK_INT 2.0))) (TOK_SELEXPR (TOK_FUNCTION TOK_BOOLEAN 1)) (TOK_SELEXPR (TOK_FUNCTION TOK_INT TRUE))) (TOK_WHERE (= (TOK_COLREF src key) 86)))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ 3 2)) (TOK_SELEXPR (+ 3.0 2)) (TOK_SELEXPR (+ 3 2.0)) (TOK_SELEXPR (+ 3.0 2.0)) (TOK_SELEXPR (+ 3 (TOK_FUNCTION TOK_INT 2.0))) (TOK_SELEXPR (TOK_FUNCTION TOK_BOOLEAN 1)) (TOK_SELEXPR (TOK_FUNCTION TOK_INT TRUE))) (TOK_WHERE (= (TOK_COLREF src key) 86)))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/input2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/input2.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/input2.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (< (TOK_COLREF src key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest2)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_COLREF src value))) (TOK_WHERE (and (>= (TOK_COLREF src key) 100) (< (TOK_COLREF src key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest3 (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key))) (TOK_WHERE (>= (TOK_COLREF src key) 200)))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (< (TOK_COLREF src key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest2)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_COLREF src value))) (TOK_WHERE (and (>= (TOK_COLREF src key) 100) (< (TOK_COLREF src key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest3 (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR 2)) (TOK_WHERE (>= (TOK_COLREF src key) 200)))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/input3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/input3.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/input3.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (< (TOK_COLREF src key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest2)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_COLREF src value))) (TOK_WHERE (and (>= (TOK_COLREF src key) 100) (< (TOK_COLREF src key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest3 (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key))) (TOK_WHERE (and (>= (TOK_COLREF src key) 200) (< (TOK_COLREF src key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../../../../build/contrib/hive/ql/test/data/warehouse/dest4.out')) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src value))) (TOK_WHERE (>= (TOK_COLREF src key) 300)))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (< (TOK_COLREF src key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest2)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_COLREF src value))) (TOK_WHERE (and (>= (TOK_COLREF src key) 100) (< (TOK_COLREF src key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest3 (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR 2)) (TOK_WHERE (and (>= (TOK_COLREF src key) 200) (< (TOK_COLREF src key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../../../../build/contrib/hive/ql/test/data/warehouse/dest4.out')) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src value))) (TOK_WHERE (>= (TOK_COLREF src key) 300)))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/join4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/join4.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/join4.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) c1) (TOK_SELEXPR (TOK_COLREF src1 value) c2)) (TOK_WHERE (and (> (TOK_COLREF src1 key) 10) (< (TOK_COLREF src1 key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src2 key) c3) (TOK_SELEXPR (TOK_COLREF src2 value) c4)) (TOK_WHERE (and (> (TOK_COLREF src2 key) 15) (< (TOK_COLREF src2 key) 25))))) b) (= (TOK_COLREF a c1) (TOK_COLREF b c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a c1) c1) (TOK_SELEXPR (TOK_COLREF a c2) c2) (TOK_SELEXPR (TOK_COLREF b c3) c3) (TOK_SELEXPR (TOK_COLREF b c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF c c1)) (TOK_SELEXPR (TOK_COLREF c c2)) (TOK_SELEXPR (TOK_COLREF c c3)) (TOK_SELEXPR (TOK_COLREF c c4))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) c1) (TOK_SELEXPR (TOK_COLREF src1 value) c2)) (TOK_WHERE (and (> (TOK_COLREF src1 key) 10) (< (TOK_COLREF src1 key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src2 key) c3) (TOK_SELEXPR (TOK_COLREF src2 value) c4)) (TOK_WHERE (and (> (TOK_COLREF src2 key) 15) (< (TOK_COLREF src2 key) 25))))) b) (= (TOK_COLREF a c1) (TOK_COLREF b c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a c1) c1) (TOK_SELEXPR (TOK_COLREF a c2) c2) (TOK_SELEXPR (TOK_COLREF b c3) c3) (TOK_SELEXPR (TOK_COLREF b c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF c c1)) (TOK_SELEXPR (TOK_COLREF c c2)) (TOK_SELEXPR (TOK_COLREF c c3)) (TOK_SELEXPR (TOK_COLREF c c4))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/input4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/input4.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/input4.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_COLLIST (TOK_COLREF src key) (TOK_COLREF src value)) (TOK_ALIASLIST tkey tvalue) '/bin/cat'))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))) (TOK_WHERE (< (TOK_COLREF tmap tkey) 100)))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))) (TOK_WHERE (< (TOK_COLREF tmap tkey) 100)))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/join5.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/join5.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/join5.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) c1) (TOK_SELEXPR (TOK_COLREF src1 value) c2)) (TOK_WHERE (and (> (TOK_COLREF src1 key) 10) (< (TOK_COLREF src1 key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src2 key) c3) (TOK_SELEXPR (TOK_COLREF src2 value) c4)) (TOK_WHERE (and (> (TOK_COLREF src2 key) 15) (< (TOK_COLREF src2 key) 25))))) b) (= (TOK_COLREF a c1) (TOK_COLREF b c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a c1) c1) (TOK_SELEXPR (TOK_COLREF a c2) c2) (TOK_SELEXPR (TOK_COLREF b c3) c3) (TOK_SELEXPR (TOK_COLREF b c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF c c1)) (TOK_SELEXPR (TOK_COLREF c c2)) (TOK_SELEXPR (TOK_COLREF c c3)) (TOK_SELEXPR (TOK_COLREF c c4))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) c1) (TOK_SELEXPR (TOK_COLREF src1 value) c2)) (TOK_WHERE (and (> (TOK_COLREF src1 key) 10) (< (TOK_COLREF src1 key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src2 key) c3) (TOK_SELEXPR (TOK_COLREF src2 value) c4)) (TOK_WHERE (and (> (TOK_COLREF src2 key) 15) (< (TOK_COLREF src2 key) 25))))) b) (= (TOK_COLREF a c1) (TOK_COLREF b c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a c1) c1) (TOK_SELEXPR (TOK_COLREF a c2) c2) (TOK_SELEXPR (TOK_COLREF b c3) c3) (TOK_SELEXPR (TOK_COLREF b c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF c c1)) (TOK_SELEXPR (TOK_COLREF c c2)) (TOK_SELEXPR (TOK_COLREF c c3)) (TOK_SELEXPR (TOK_COLREF c c4))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/input5.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/input5.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/input5.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_COLLIST (TOK_COLREF src_thrift lint) (TOK_COLREF src_thrift lintstring)) (TOK_ALIASLIST tkey tvalue) '/bin/cat'))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src_thrift lint) (TOK_COLREF src_thrift lintstring)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/join6.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/join6.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/join6.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) c1) (TOK_SELEXPR (TOK_COLREF src1 value) c2)) (TOK_WHERE (and (> (TOK_COLREF src1 key) 10) (< (TOK_COLREF src1 key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src2 key) c3) (TOK_SELEXPR (TOK_COLREF src2 value) c4)) (TOK_WHERE (and (> (TOK_COLREF src2 key) 15) (< (TOK_COLREF src2 key) 25))))) b) (= (TOK_COLREF a c1) (TOK_COLREF b c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a c1) c1) (TOK_SELEXPR (TOK_COLREF a c2) c2) (TOK_SELEXPR (TOK_COLREF b c3) c3) (TOK_SELEXPR (TOK_COLREF b c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF c c1)) (TOK_SELEXPR (TOK_COLREF c c2)) (TOK_SELEXPR (TOK_COLREF c c3)) (TOK_SELEXPR (TOK_COLREF c c4))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) c1) (TOK_SELEXPR (TOK_COLREF src1 value) c2)) (TOK_WHERE (and (> (TOK_COLREF src1 key) 10) (< (TOK_COLREF src1 key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src2 key) c3) (TOK_SELEXPR (TOK_COLREF src2 value) c4)) (TOK_WHERE (and (> (TOK_COLREF src2 key) 15) (< (TOK_COLREF src2 key) 25))))) b) (= (TOK_COLREF a c1) (TOK_COLREF b c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a c1) c1) (TOK_SELEXPR (TOK_COLREF a c2) c2) (TOK_SELEXPR (TOK_COLREF b c3) c3) (TOK_SELEXPR (TOK_COLREF b c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF c c1)) (TOK_SELEXPR (TOK_COLREF c c2)) (TOK_SELEXPR (TOK_COLREF c c3)) (TOK_SELEXPR (TOK_COLREF c c4))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/input_testxpath2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/input_testxpath2.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/input_testxpath2.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift lint))) (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift lintstring))) (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift mstringstring)))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNOTNULL (TOK_COLREF src_thrift lint)) (NOT (TOK_FUNCTION TOK_ISNULL (TOK_COLREF src_thrift mstringstring))))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift lint))) (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift lintstring))) (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift mstringstring)))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNOTNULL (TOK_COLREF src_thrift lint)) (NOT (TOK_FUNCTION TOK_ISNULL (TOK_COLREF src_thrift mstringstring))))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/join7.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/join7.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/join7.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_FULLOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) c1) (TOK_SELEXPR (TOK_COLREF src1 value) c2)) (TOK_WHERE (and (> (TOK_COLREF src1 key) 10) (< (TOK_COLREF src1 key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src2 key) c3) (TOK_SELEXPR (TOK_COLREF src2 value) c4)) (TOK_WHERE (and (> (TOK_COLREF src2 key) 15) (< (TOK_COLREF src2 key) 25))))) b) (= (TOK_COLREF a c1) (TOK_COLREF b c3))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src3 key) c5) (TOK_SELEXPR (TOK_COLREF src3 value) c6)) (TOK_WHERE (and (> (TOK_COLREF src3 key) 20) (< (TOK_COLREF src3 key) 25))))) c) (= (TOK_COLREF a c1) (TOK_COLREF c c5)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a c1) c1) (TOK_SELEXPR (TOK_COLREF a c2) c2) (TOK_SELEXPR (TOK_COLREF b c3) c3) (TOK_SELEXPR (TOK_COLREF b c4) c4) (TOK_SELEXPR (TOK_COLREF c c5) c5) (TOK_SELEXPR (TOK_COLREF c c6) c6)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF c c1)) (TOK_SELEXPR (TOK_COLREF c c2)) (TOK_SELEXPR (TOK_COLREF c c3)) (TOK_SELEXPR (TOK_COLREF c c4)) (TOK_SELEXPR (TOK_COLREF c c5)) (TOK_SELEXPR (TOK_COLREF c c6))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_FULLOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) c1) (TOK_SELEXPR (TOK_COLREF src1 value) c2)) (TOK_WHERE (and (> (TOK_COLREF src1 key) 10) (< (TOK_COLREF src1 key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src2 key) c3) (TOK_SELEXPR (TOK_COLREF src2 value) c4)) (TOK_WHERE (and (> (TOK_COLREF src2 key) 15) (< (TOK_COLREF src2 key) 25))))) b) (= (TOK_COLREF a c1) (TOK_COLREF b c3))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src3 key) c5) (TOK_SELEXPR (TOK_COLREF src3 value) c6)) (TOK_WHERE (and (> (TOK_COLREF src3 key) 20) (< (TOK_COLREF src3 key) 25))))) c) (= (TOK_COLREF a c1) (TOK_COLREF c c5)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a c1) c1) (TOK_SELEXPR (TOK_COLREF a c2) c2) (TOK_SELEXPR (TOK_COLREF b c3) c3) (TOK_SELEXPR (TOK_COLREF b c4) c4) (TOK_SELEXPR (TOK_COLREF c c5) c5) (TOK_SELEXPR (TOK_COLREF c c6) c6)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF c c1)) (TOK_SELEXPR (TOK_COLREF c c2)) (TOK_SELEXPR (TOK_COLREF c c3)) (TOK_SELEXPR (TOK_COLREF c c4)) (TOK_SELEXPR (TOK_COLREF c c5)) (TOK_SELEXPR (TOK_COLREF c c6))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/input8.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/input8.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/input8.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src1)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (+ 4 TOK_NULL)) (TOK_SELEXPR (- (TOK_COLREF src1 key) TOK_NULL)) (TOK_SELEXPR (+ TOK_NULL TOK_NULL))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ 4 TOK_NULL)) (TOK_SELEXPR (- (TOK_COLREF src1 key) TOK_NULL)) (TOK_SELEXPR (+ TOK_NULL TOK_NULL))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/join8.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/join8.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/join8.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) c1) (TOK_SELEXPR (TOK_COLREF src1 value) c2)) (TOK_WHERE (and (> (TOK_COLREF src1 key) 10) (< (TOK_COLREF src1 key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src2 key) c3) (TOK_SELEXPR (TOK_COLREF src2 value) c4)) (TOK_WHERE (and (> (TOK_COLREF src2 key) 15) (< (TOK_COLREF src2 key) 25))))) b) (= (TOK_COLREF a c1) (TOK_COLREF b c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a c1) c1) (TOK_SELEXPR (TOK_COLREF a c2) c2) (TOK_SELEXPR (TOK_COLREF b c3) c3) (TOK_SELEXPR (TOK_COLREF b c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF c c1)) (TOK_SELEXPR (TOK_COLREF c c2)) (TOK_SELEXPR (TOK_COLREF c c3)) (TOK_SELEXPR (TOK_COLREF c c4))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNULL (TOK_COLREF c c3)) (TOK_FUNCTION TOK_ISNOTNULL (TOK_COLREF c c1)))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) c1) (TOK_SELEXPR (TOK_COLREF src1 value) c2)) (TOK_WHERE (and (> (TOK_COLREF src1 key) 10) (< (TOK_COLREF src1 key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src2 key) c3) (TOK_SELEXPR (TOK_COLREF src2 value) c4)) (TOK_WHERE (and (> (TOK_COLREF src2 key) 15) (< (TOK_COLREF src2 key) 25))))) b) (= (TOK_COLREF a c1) (TOK_COLREF b c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a c1) c1) (TOK_SELEXPR (TOK_COLREF a c2) c2) (TOK_SELEXPR (TOK_COLREF b c3) c3) (TOK_SELEXPR (TOK_COLREF b c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF c c1)) (TOK_SELEXPR (TOK_COLREF c c2)) (TOK_SELEXPR (TOK_COLREF c c3)) (TOK_SELEXPR (TOK_COLREF c c4))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNULL (TOK_COLREF c c3)) (TOK_FUNCTION TOK_ISNOTNULL (TOK_COLREF c c1)))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/udf1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/udf1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/udf1.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (LIKE 'a' '%a%')) (TOK_SELEXPR (LIKE 'b' '%a%')) (TOK_SELEXPR (LIKE 'ab' '%a%')) (TOK_SELEXPR (LIKE 'ab' '%a_')) (TOK_SELEXPR (LIKE '%_' '\%\_')) (TOK_SELEXPR (LIKE 'ab' '\%\_')) (TOK_SELEXPR (LIKE 'ab' '_a%')) (TOK_SELEXPR (LIKE 'ab' 'a')) (TOK_SELEXPR (RLIKE '' '.*')) (TOK_SELEXPR (RLIKE 'a' '[ab]')) (TOK_SELEXPR (RLIKE '' '[ab]')) (TOK_SELEXPR (RLIKE 'hadoop' '[a-z]*')) (TOK_SELEXPR (RLIKE 'hadoop' 'o*')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'abc' 'b' 'c')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'abc' 'z' 'a')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'abbbb' 'bb' 'b')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'hadoop' '(.)[a-z]*' '$1ive'))) (TOK_WHERE (= (TOK_COLREF src key) 86)))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (LIKE 'a' '%a%')) (TOK_SELEXPR (LIKE 'b' '%a%')) (TOK_SELEXPR (LIKE 'ab' '%a%')) (TOK_SELEXPR (LIKE 'ab' '%a_')) (TOK_SELEXPR (LIKE '%_' '\%\_')) (TOK_SELEXPR (LIKE 'ab' '\%\_')) (TOK_SELEXPR (LIKE 'ab' '_a%')) (TOK_SELEXPR (LIKE 'ab' 'a')) (TOK_SELEXPR (RLIKE '' '.*')) (TOK_SELEXPR (RLIKE 'a' '[ab]')) (TOK_SELEXPR (RLIKE '' '[ab]')) (TOK_SELEXPR (RLIKE 'hadoop' '[a-z]*')) (TOK_SELEXPR (RLIKE 'hadoop' 'o*')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'abc' 'b' 'c')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'abc' 'z' 'a')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'abbbb' 'bb' 'b')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'hadoop' '(.)[a-z]*' '$1ive'))) (TOK_WHERE (= (TOK_COLREF src key) 86)))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/input_testxpath.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/input_testxpath.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/input_testxpath.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (TOK_COLREF src_thrift lint) 1)) (TOK_SELEXPR (. ([ (TOK_COLREF src_thrift lintstring) 0) mystring)) (TOK_SELEXPR ([ (TOK_COLREF src_thrift mstringstring) 'key_2'))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR ([ (TOK_COLREF src_thrift lint) 1)) (TOK_SELEXPR (. ([ (TOK_COLREF src_thrift lintstring) 0) mystring)) (TOK_SELEXPR ([ (TOK_COLREF src_thrift mstringstring) 'key_2'))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/input_part1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/input_part1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/input_part1.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF srcpart key)) (TOK_SELEXPR (TOK_COLREF srcpart value)) (TOK_SELEXPR (TOK_COLREF srcpart hr)) (TOK_SELEXPR (TOK_COLREF srcpart ds))) (TOK_WHERE (and (and (< (TOK_COLREF srcpart key) 100) (= (TOK_COLREF srcpart ds) '2008-04-08')) (= (TOK_COLREF srcpart hr) '12'))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF srcpart key)) (TOK_SELEXPR (TOK_COLREF srcpart value)) (TOK_SELEXPR (TOK_COLREF srcpart hr)) (TOK_SELEXPR (TOK_COLREF srcpart ds))) (TOK_WHERE (and (and (< (TOK_COLREF srcpart key) 100) (= (TOK_COLREF srcpart ds) '2008-04-08')) (= (TOK_COLREF srcpart hr) '12'))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/groupby2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/groupby2.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/groupby2.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (TOK_COLREF src key) 0 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (TOK_COLREF src key) 0 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4))))) (TOK_GROUPBY (TOK_FUNCTION substr (TOK_COLREF src key) 0 1)))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (TOK_COLREF src key) 0 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (TOK_COLREF src key) 0 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4))))) (TOK_GROUPBY (TOK_FUNCTION substr (TOK_COLREF src key) 0 1)))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/groupby3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/groupby3.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/groupby3.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION avg (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTIONDI avg (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION max (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION min (TOK_FUNCTION substr (TOK_COLREF src value) 4)))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION avg (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTIONDI avg (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION max (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION min (TOK_FUNCTION substr (TOK_COLREF src value) 4)))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/groupby4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/groupby4.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/groupby4.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (TOK_COLREF src key) 0 1))) (TOK_GROUPBY (TOK_FUNCTION substr (TOK_COLREF src key) 0 1)))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (TOK_COLREF src key) 0 1))) (TOK_GROUPBY (TOK_FUNCTION substr (TOK_COLREF src key) 0 1)))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/groupby5.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/groupby5.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/groupby5.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4)))) (TOK_GROUPBY (TOK_COLREF src key)))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4)))) (TOK_GROUPBY (TOK_COLREF src key)))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/groupby6.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/groupby6.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/groupby6.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECTDI (TOK_SELEXPR (TOK_FUNCTION substr (TOK_COLREF src value) 4 1))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECTDI (TOK_SELEXPR (TOK_FUNCTION substr (TOK_COLREF src value) 4 1))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/sample1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/sample1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/sample1.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF srcpart (TOK_TABLESAMPLE 1 1) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_WHERE (and (= (TOK_COLREF s ds) '2008-04-08') (= (TOK_COLREF s hr) '11'))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF srcpart (TOK_TABLESAMPLE 1 1 (TOK_FUNCTION rand)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_WHERE (and (= (TOK_COLREF s ds) '2008-04-08') (= (TOK_COLREF s hr) '11'))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/sample3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/sample3.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/sample3.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 2 key value) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 2 (TOK_COLREF key) (TOK_COLREF value)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/sample4.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/sample4.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/sample4.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 2 key) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 2 (TOK_COLREF key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/sample5.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/sample5.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/sample5.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 5 key) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 5 (TOK_COLREF key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/sample6.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/sample6.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/sample6.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 4 key) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 4 (TOK_COLREF key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/parse/sample7.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/parse/sample7.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/parse/sample7.q.out (working copy) @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 4 key) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_WHERE (> (TOK_COLREF s key) 100)))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 4 (TOK_COLREF key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_WHERE (> (TOK_COLREF s key) 100)))) null \ No newline at end of file Index: src/contrib/hive/ql/src/test/results/compiler/plan/join2.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/join2.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/join2.q.xml (working copy) @@ -38,7 +38,7 @@ true - /tmp/hive-zshao/1036144012/1033953173.10000.insclause-0 + /tmp/hive-njain/303286777.10000.insclause-0 @@ -87,7 +87,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -122,15 +122,15 @@ src3 - + - + - + @@ -176,36 +176,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { double joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + 1 @@ -234,25 +238,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -260,6 +268,13 @@ + + + + + + + @@ -324,11 +339,11 @@ $INTNAME - + - + @@ -403,7 +418,7 @@ - 2 + 1 @@ -429,40 +444,44 @@ - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { double joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + - 2 + 1 @@ -472,16 +491,6 @@ - 3 - - - - - - - - - 0 @@ -489,38 +498,32 @@ - - - - 1 - - - - - - - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1,2,3 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -528,34 +531,584 @@ - - - - - - - - VALUE.0 + + + + + + + + + + + + /tmp/hive-njain/547407703/58605415.10001 + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string temporarycol0, string temporarycol1} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + 1 + + + + + + + + + + + - - + + + + + + + + + + 1 + + + + + + + + 0 + + + + + VALUE.0 + + + + + + + + + + 1 + + + + + VALUE.0 + + + + + + + + + + - - - - VALUE.1 + + + + + + + + + + + + 0 + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string joinkey0} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + + + -1 + + + + + + + + + + 0 + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string reducesinkvalue0} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + + + + + + + + + + + + + VALUE.0 + + + + + + + + + + + - - + + + + + + + + + + 0 + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string joinkey0} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + + + -1 + + + + + + 1 + + + + + + + 0 + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string reducesinkvalue0} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + + + + + + + + + + + + + VALUE.0 + + + + + + + + + + + + + + + + + + + + + + - VALUE.2 + VALUE.0 @@ -565,7 +1118,7 @@ - VALUE.3 + VALUE.1 @@ -580,13 +1133,16 @@ + + + true - /tmp/hive-zshao/1036144012/1033953173.10001 + /tmp/hive-njain/547407703/58605415.10001 $INTNAME @@ -594,7 +1150,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src3 @@ -606,37 +1162,15 @@ - /tmp/hive-zshao/1036144012/1033953173.10001 + /tmp/hive-njain/547407703/58605415.10001 - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0,1,2,3 - - - serialization.format - 1 - - - - + - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -688,7 +1222,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -702,11 +1236,11 @@ - + - + @@ -714,13 +1248,20 @@ - /tmp/hive-zshao/1036144012/1033953173.10000.insclause-0 + /tmp/hive-njain/303286777.10000.insclause-0 + + + + + + + @@ -760,7 +1301,7 @@ - 2 + 1 @@ -770,7 +1311,7 @@ - 5 + 3 @@ -781,6 +1322,13 @@ + + + + + + + @@ -830,26 +1378,6 @@ - - - - VALUE.2 - - - - - - - - - - VALUE.3 - - - - - - @@ -881,6 +1409,16 @@ + + + + + + + + + + @@ -925,32 +1463,22 @@ - - - - 4 - - - - - - - - - - 5 - - - - - - + + + + + + + + + + @@ -969,350 +1497,24 @@ src2 - - - - - - - - - - - - - key - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - 1 - - - -1 - - - 1 - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0,1 - - - serialization.format - 1 - - - - - - - - - - - - - - - VALUE.0 - - - - - - - - - - VALUE.1 - - - - - - - - - - - - - - - - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - + src1 - - - - - - - - - - - - - key - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - 1 - - - -1 - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0,1 - - - serialization.format - 1 - - - - - - - - - - - - - - - VALUE.0 - - - - - - - - - - VALUE.1 - - - - - - - - - - - - - - - - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - + + + + true - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src2 @@ -1327,7 +1529,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -1379,7 +1581,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -1393,145 +1595,16 @@ - - - - - - - - - /tmp/hive-zshao/1036144012/1033953173.10001 - - - - - - - - - - - - - - 0 - - - - - - - - - - 1 - - - - - - - - - - 2 - - - - - - - - - - 3 - - - - - - - - - - - - - + + + + + + - - - - - - - - 1 - - - - - - - - - 0 - - - - - VALUE.0 - - - - - - - - - - VALUE.1 - - - - - - - - - - 1 - - - - - VALUE.0 - - - - - - - - - - VALUE.1 - - - - - - - - - - - + + - - - Index: src/contrib/hive/ql/src/test/results/compiler/plan/input2.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input2.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input2.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/20345754/201293413.10000.insclause-0 + /tmp/hive-njain/96064841/341873759.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -100,7 +100,7 @@ true - /tmp/hive-zshao/20345754/201293413.10001.insclause-1 + /tmp/hive-njain/96064841/341873759.10001.insclause-1 @@ -149,7 +149,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest2 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest2 @@ -178,7 +178,7 @@ true - /tmp/hive-zshao/20345754/201293413.10002.insclause-2 + /tmp/hive-njain/96064841/341873759.10002.insclause-2 @@ -231,7 +231,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest3 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest3 @@ -259,15 +259,15 @@ src - + - + - + @@ -275,13 +275,20 @@ - /tmp/hive-zshao/20345754/201293413.10000.insclause-0 + /tmp/hive-njain/96064841/341873759.10000.insclause-0 + + + + + + + @@ -344,8 +351,18 @@ + + true + + + + + + + + @@ -416,6 +433,13 @@ + + + + + + + @@ -447,11 +471,11 @@ - + - + @@ -459,13 +483,20 @@ - /tmp/hive-zshao/20345754/201293413.10001.insclause-1 + /tmp/hive-njain/96064841/341873759.10001.insclause-1 + + + + + + + @@ -526,6 +557,13 @@ + + + + + + + @@ -662,6 +700,13 @@ + + + + + + + @@ -672,11 +717,11 @@ - + - + @@ -684,13 +729,20 @@ - /tmp/hive-zshao/20345754/201293413.10002.insclause-2 + /tmp/hive-njain/96064841/341873759.10002.insclause-2 + + + + + + + @@ -705,6 +757,16 @@ + + + + 1 + + + + + + @@ -727,10 +789,27 @@ + + + + + + + 2 + + + + + + + + + + @@ -793,6 +872,13 @@ + + + + + + + @@ -818,7 +904,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -830,7 +916,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -882,7 +968,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -895,6 +981,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/join3.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/join3.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/join3.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/1679400869/64637247.10000.insclause-0 + /tmp/hive-njain/1136932164.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,66 +108,150 @@ src2 - + - - - - - - - - - key + + + + + + + + + + + + + 0 + + + + + java.lang.String + + + + + + - - - - java.lang.String + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string joinkey0} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + -1 + + + + + + 1 + + + + + + + 0 + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string reducesinkvalue0} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + + + + - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 + + + + + + + + VALUE.0 + + + + + + + - - serialization.format - 1 - - - 1 - - - -1 - - - 1 - - + + + + + @@ -179,42 +263,14 @@ - - - - value - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0,1 - - - serialization.format - 1 - - - - + + + + + + @@ -225,23 +281,13 @@ - VALUE.0 + 0 - - - - VALUE.1 - - - - - - @@ -282,15 +328,15 @@ src3 - + - + - + @@ -306,34 +352,38 @@ - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + 2 @@ -362,25 +412,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -388,6 +442,13 @@ + + + + + + + @@ -452,59 +513,143 @@ src1 - + - - - - - - - - - key + + + + + + + + + + + + + 0 + + + + + + + - - + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string joinkey0} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + + -1 + + + + + + + + + + 0 + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string reducesinkvalue0} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + + + + - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 + + + + + + + + VALUE.0 + + + + + + + - - serialization.format - 1 - - - 1 - - - -1 - - + + + + + @@ -516,42 +661,14 @@ - - - - value - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0,1 - - - serialization.format - 1 - - - - + + + + + + @@ -562,23 +679,13 @@ - VALUE.0 + 0 - - - - VALUE.1 - - - - - - @@ -619,13 +726,16 @@ + + + true - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src2 @@ -643,7 +753,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -695,7 +805,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -709,11 +819,11 @@ - + - + @@ -721,13 +831,20 @@ - /tmp/hive-zshao/1679400869/64637247.10000.insclause-0 + /tmp/hive-njain/1136932164.10000.insclause-0 + + + + + + + @@ -777,7 +894,7 @@ - 5 + 3 @@ -788,6 +905,13 @@ + + + + + + + @@ -834,16 +958,6 @@ - - - - VALUE.1 - - - - - - @@ -859,16 +973,6 @@ - - - - VALUE.1 - - - - - - @@ -900,6 +1004,19 @@ + + + + + + + + + + + + + @@ -944,32 +1061,25 @@ - - - - 4 - - - - - - - - - - 5 - - - - - - + + + + + + + + + + + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input3.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input3.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input3.q.xml (working copy) @@ -28,7 +28,7 @@ true - /tmp/hive-njain/356693511/91381909.10003.insclause-3 + /tmp/hive-njain/346624347/1822685820.10003.insclause-3 ../../../../build/contrib/hive/ql/test/data/warehouse/dest4.out @@ -48,7 +48,7 @@ true - /tmp/hive-njain/356693511/91381909.10000.insclause-0 + /tmp/hive-njain/346624347/1822685820.10000.insclause-0 @@ -117,7 +117,7 @@ true - /tmp/hive-njain/356693511/91381909.10001.insclause-1 + /tmp/hive-njain/346624347/1822685820.10001.insclause-1 @@ -195,7 +195,7 @@ true - /tmp/hive-njain/356693511/91381909.10002.insclause-2 + /tmp/hive-njain/346624347/1822685820.10002.insclause-2 @@ -276,15 +276,15 @@ src - + - + - + @@ -292,13 +292,20 @@ - /tmp/hive-njain/356693511/91381909.10000.insclause-0 + /tmp/hive-njain/346624347/1822685820.10000.insclause-0 + + + + + + + @@ -361,8 +368,18 @@ + + true + + + + + + + + @@ -433,6 +450,13 @@ + + + + + + + @@ -464,11 +488,11 @@ - + - + @@ -476,13 +500,20 @@ - /tmp/hive-njain/356693511/91381909.10001.insclause-1 + /tmp/hive-njain/346624347/1822685820.10001.insclause-1 + + + + + + + @@ -543,6 +574,13 @@ + + + + + + + @@ -679,6 +717,13 @@ + + + + + + + @@ -689,11 +734,11 @@ - + - + @@ -701,13 +746,20 @@ - /tmp/hive-njain/356693511/91381909.10002.insclause-2 + /tmp/hive-njain/346624347/1822685820.10002.insclause-2 + + + + + + + @@ -722,6 +774,16 @@ + + + + 1 + + + + + + @@ -744,10 +806,27 @@ + + + + + + + 2 + + + + + + + + + + @@ -884,6 +963,13 @@ + + + + + + + @@ -894,11 +980,11 @@ - + - + @@ -906,7 +992,7 @@ - /tmp/hive-njain/356693511/91381909.10003.insclause-3 + /tmp/hive-njain/346624347/1822685820.10003.insclause-3 @@ -922,6 +1008,10 @@ + columns + value + + serialization.format 1 @@ -931,6 +1021,13 @@ + + + + + + + @@ -971,6 +1068,13 @@ + + + + + + + @@ -1033,6 +1137,13 @@ + + + + + + + @@ -1135,6 +1246,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/join4.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/join4.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/join4.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/257975023/155084340.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,23 +10,23 @@ c:b:src2 - + - + - + - + - + @@ -142,36 +44,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + 1 @@ -200,25 +106,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -226,6 +136,13 @@ + + + + + + + @@ -286,6 +203,13 @@ + + + + + + + @@ -451,6 +375,13 @@ + + + + + + + @@ -494,23 +425,23 @@ c:a:src1 - + - + - + - + - + @@ -526,34 +457,38 @@ - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + @@ -579,25 +514,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -605,6 +544,13 @@ + + + + + + + @@ -665,6 +611,13 @@ + + + + + + + @@ -822,6 +775,13 @@ + + + + + + + @@ -865,13 +825,16 @@ + + + true - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src c:a:src1 @@ -886,7 +849,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -938,7 +901,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -952,15 +915,15 @@ - + - + - + @@ -968,13 +931,42 @@ - /tmp/hive-zshao/257975023/155084340.10000.insclause-0 + /tmp/hive-njain/170014316/124791502.10001.insclause-0 - + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + c1,c2,c3,c4 + + + serialization.format + 1 + + + + + + + + + + + @@ -1075,6 +1067,13 @@ + + + + + + + @@ -1134,6 +1133,13 @@ + + + + + + + @@ -1258,6 +1264,16 @@ + + + + + + + + + + @@ -1308,6 +1324,16 @@ + + + + + + + + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input4.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input4.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input4.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/1042895489/270462051.10000.insclause-0 + /tmp/hive-njain/1463239211/50419269.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,23 +108,23 @@ tmap:src - + - + - + - + - + @@ -142,36 +142,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + -1 @@ -200,25 +204,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -226,6 +234,13 @@ + + + + + + + @@ -315,6 +330,13 @@ + + + + + + + @@ -354,6 +376,13 @@ + + + + + + + @@ -418,10 +447,13 @@ + + + - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src tmap:src @@ -433,7 +465,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -485,7 +517,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -499,15 +531,15 @@ - + - + - + @@ -515,13 +547,20 @@ - /tmp/hive-zshao/1042895489/270462051.10000.insclause-0 + /tmp/hive-njain/1463239211/50419269.10000.insclause-0 + + + + + + + @@ -582,6 +621,13 @@ + + + + + + + @@ -652,6 +698,13 @@ + + + + + + + @@ -698,6 +751,13 @@ + + + + + + + @@ -707,6 +767,13 @@ + + + + + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/join5.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/join5.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/join5.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/936351131/313796179.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,23 +10,23 @@ c:b:src2 - + - + - + - + - + @@ -142,36 +44,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + 1 @@ -200,25 +106,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -226,6 +136,13 @@ + + + + + + + @@ -286,6 +203,13 @@ + + + + + + + @@ -451,6 +375,13 @@ + + + + + + + @@ -494,23 +425,23 @@ c:a:src1 - + - + - + - + - + @@ -526,34 +457,38 @@ - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + @@ -579,25 +514,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -605,6 +544,13 @@ + + + + + + + @@ -665,6 +611,13 @@ + + + + + + + @@ -822,6 +775,13 @@ + + + + + + + @@ -865,13 +825,16 @@ + + + true - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src c:a:src1 @@ -886,7 +849,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -938,7 +901,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -952,15 +915,15 @@ - + - + - + @@ -968,13 +931,42 @@ - /tmp/hive-zshao/936351131/313796179.10000.insclause-0 + /tmp/hive-njain/198286603/204778341.10001.insclause-0 - + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + c1,c2,c3,c4 + + + serialization.format + 1 + + + + + + + + + + + @@ -1075,6 +1067,13 @@ + + + + + + + @@ -1134,6 +1133,13 @@ + + + + + + + @@ -1258,6 +1264,16 @@ + + + + + + + + + + @@ -1308,6 +1324,16 @@ + + + + + + + + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input5.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input5.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input5.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/641253778/15454385.10000.insclause-0 + /tmp/hive-njain/37361661.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,40 +108,181 @@ tmap:src_thrift - + - + - + - - - - - - - - - tkey + + + + + + + + + + + + + tkey + + + + + java.lang.String + + + + + + - - - - java.lang.String + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + -1 + + + + + + -1 + + + + + + + tkey + + + + + + + + + + tvalue + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + tkey + + + + + + + + + + tvalue + + + + + + + + + + - + + + + + + /bin/cat + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -156,50 +297,17 @@ columns - 0 + serialization.format - 1 + 9 - - 1 - - - -1 - - - -1 - - - - - - - tkey - - - - - - - - - - tvalue - - - - - - - - - + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -214,11 +322,11 @@ columns - 0,1 + tkey,tvalue serialization.format - 1 + 9 @@ -226,31 +334,17 @@ + + + + + + + - - - - - tkey - - - - - - - - - - tvalue - - - - - - - + @@ -259,55 +353,38 @@ - - - /bin/cat - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - + + + + + + + 0 - - serialization.format - 9 + + + + + + java.lang.Integer + + + + - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - tkey,tvalue + + + + 1 - - serialization.format - 9 + + + + + + @@ -315,10 +392,38 @@ + + + + + + + - + + + + + 0 + + + + + + + + + + 0 + + + + + + + @@ -336,15 +441,7 @@ lint - - - - - java.lang.Integer - - - - + @@ -354,11 +451,7 @@ lintstring - - - - - + @@ -366,6 +459,13 @@ + + + + + + + @@ -383,7 +483,7 @@ - 0 + 1 @@ -481,10 +581,13 @@ + + + - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift tmap:src_thrift @@ -496,7 +599,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift @@ -552,7 +655,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift @@ -566,11 +669,11 @@ - + - + @@ -578,13 +681,20 @@ - /tmp/hive-zshao/641253778/15454385.10000.insclause-0 + /tmp/hive-njain/37361661.10000.insclause-0 + + + + + + + @@ -645,6 +755,13 @@ + + + + + + + @@ -670,6 +787,13 @@ + + + + + + + @@ -700,6 +824,13 @@ + + + + + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/join6.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/join6.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/join6.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/161126421/564619381.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,23 +10,23 @@ c:b:src2 - + - + - + - + - + @@ -142,36 +44,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + 1 @@ -200,25 +106,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -226,6 +136,13 @@ + + + + + + + @@ -286,6 +203,13 @@ + + + + + + + @@ -451,6 +375,13 @@ + + + + + + + @@ -494,23 +425,23 @@ c:a:src1 - + - + - + - + - + @@ -526,34 +457,38 @@ - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + @@ -579,25 +514,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -605,6 +544,13 @@ + + + + + + + @@ -665,6 +611,13 @@ + + + + + + + @@ -822,6 +775,13 @@ + + + + + + + @@ -865,13 +825,16 @@ + + + true - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src c:a:src1 @@ -886,7 +849,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -938,7 +901,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -952,15 +915,15 @@ - + - + - + @@ -968,13 +931,42 @@ - /tmp/hive-zshao/161126421/564619381.10000.insclause-0 + /tmp/hive-njain/113474329/1108575385.10001.insclause-0 - + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + c1,c2,c3,c4 + + + serialization.format + 1 + + + + + + + + + + + @@ -1075,6 +1067,13 @@ + + + + + + + @@ -1134,6 +1133,13 @@ + + + + + + + @@ -1258,6 +1264,16 @@ + + + + + + + + + + @@ -1308,6 +1324,16 @@ + + + + + + + + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input_testxpath2.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input_testxpath2.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input_testxpath2.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/421781212/46322741.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,287 +10,272 @@ src_thrift - + - + - + - - - - - /tmp/hive-zshao/421781212/46322741.10000.insclause-0 - - - - - - - - - - - - - - 0 + + + + + + + + + /tmp/hive-njain/709347576.10001.insclause-0 - - - - java.lang.Integer + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + _c0,_c1,_c2 + + + serialization.format + 1 + + + - - - - 1 + + + + - - - - - - - 2 + + + + + + + + 0 + + + + + java.lang.Integer + + + + + + + + + 1 + + + + + + + + + + 2 + + + + + + + - - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFSize - - - - evaluate - - - java.util.List - - - - - + + + - - - lint + + + org.apache.hadoop.hive.ql.udf.UDFSize - - - - + + + evaluate + + + java.util.List + + + + + + + + + + 0 + + + + + + + + + + + + - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFSize - - - - evaluate - - - java.util.List - - - - - - - - - lintstring + + + org.apache.hadoop.hive.ql.udf.UDFSize - - - - + + + evaluate + + + java.util.List + + + + + + + + + + 1 + + + + + + + + + + + + - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFSize - - - - evaluate - - - java.util.Map - - - - - - - - - mstringstring + + + org.apache.hadoop.hive.ql.udf.UDFSize - - - - - - java.lang.String + + + evaluate + + + java.util.Map + + + + + + + + + + 2 + + + + + + java.lang.String + + + + + + + + - - - + + + - - - - - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPAnd - - - - evaluate - - - java.lang.Boolean - - - java.lang.Boolean - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPNotNull - - - - evaluate - - - java.lang.Object - - - - - + - - - lint - - - - - + - - - - java.lang.Boolean + + + + - + + + + + - org.apache.hadoop.hive.ql.udf.UDFOPNot + org.apache.hadoop.hive.ql.udf.UDFOPAnd - + evaluate - + java.lang.Boolean + + java.lang.Boolean + @@ -397,10 +284,10 @@ - org.apache.hadoop.hive.ql.udf.UDFOPNull + org.apache.hadoop.hive.ql.udf.UDFOPNotNull - + evaluate @@ -414,16 +301,78 @@ - mstringstring + 0 - + + + + java.lang.Boolean + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNot + + + + evaluate + + + java.lang.Boolean + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNull + + + + evaluate + + + java.lang.Object + + + + + + + + + + 2 + + + + + + + + + + + + + + + + @@ -437,77 +386,85 @@ - - - - - - - - - - - - - - - aint + + + + - - - - - - - astring + + + + + + + + 0 + + + + + + + + + + 1 + + + + + + + + + + 2 + + + + + + + - - - + + + + + + + + - - + + lint - + - - - lstring - - - - - - - - - - - - - + + lintstring - + - - + + mstringstring - + @@ -516,6 +473,20 @@ + + + + + + + + + + + + + + @@ -523,7 +494,72 @@ - + + + + + aint + + + + + + + + + + astring + + + + + + + + + + lint + + + + + + + + + + lstring + + + + + + + + + + + + + + lintstring + + + + + + + + + + mstringstring + + + + + + + @@ -534,7 +570,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift src_thrift @@ -546,7 +582,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift @@ -602,7 +638,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift @@ -615,6 +651,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input6.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input6.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input6.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/301573619/12587577.10000.insclause-0 + /tmp/hive-njain/958229575/254397981.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,15 +108,15 @@ src1 - + - + - + @@ -124,13 +124,20 @@ - /tmp/hive-zshao/301573619/12587577.10000.insclause-0 + /tmp/hive-njain/958229575/254397981.10000.insclause-0 + + + + + + + @@ -195,6 +202,13 @@ + + + + + + + @@ -248,6 +262,13 @@ + + + + + + + @@ -294,7 +315,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 src1 @@ -306,7 +327,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -358,7 +379,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -371,6 +392,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/join7.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/join7.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/join7.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/61032244/444662007.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,23 +10,23 @@ c:b:src2 - + - + - + - + - + @@ -144,34 +46,38 @@ - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + 1 @@ -200,25 +106,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -226,6 +136,13 @@ + + + + + + + @@ -286,6 +203,13 @@ + + + + + + + @@ -451,6 +375,13 @@ + + + + + + + @@ -494,23 +425,23 @@ c:a:src1 - + - + - + - + - + @@ -526,34 +457,38 @@ - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + @@ -579,25 +514,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -605,6 +544,13 @@ + + + + + + + @@ -665,6 +611,13 @@ + + + + + + + @@ -822,6 +775,13 @@ + + + + + + + @@ -865,23 +825,23 @@ c:c:src3 - + - + - + - + - + @@ -895,36 +855,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + 2 @@ -953,25 +917,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -979,6 +947,13 @@ + + + + + + + @@ -1039,6 +1014,13 @@ + + + + + + + @@ -1196,6 +1178,13 @@ + + + + + + + @@ -1239,13 +1228,16 @@ + + + true - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src c:a:src1 @@ -1263,7 +1255,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -1315,7 +1307,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -1329,15 +1321,15 @@ - + - + - + @@ -1345,13 +1337,42 @@ - /tmp/hive-zshao/61032244/444662007.10000.insclause-0 + /tmp/hive-njain/604172747/190204720.10001.insclause-0 - + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + c1,c2,c3,c4,c5,c6 + + + serialization.format + 1 + + + + + + + + + + + @@ -1492,6 +1513,13 @@ + + + + + + + @@ -1571,6 +1599,13 @@ + + + + + + + @@ -1750,6 +1785,19 @@ + + + + + + + + + + + + + @@ -1820,6 +1868,19 @@ + + + + + + + + + + + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input7.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input7.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input7.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/182607732/631207979.10000.insclause-0 + /tmp/hive-njain/505144482.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,54 +108,94 @@ src1 - + - + - - - - - /tmp/hive-zshao/182607732/631207979.10000.insclause-0 + + + + + + + + + /tmp/hive-njain/505144482.10000.insclause-0 + + + + + + + + + + + + + + + + + + + + + 0 + + + + + java.lang.Void + + + + + + + + + 1 + + + + + java.lang.String + + + + + + + + + + - - - - - - - + + + + - - - 0 + + + - - - - java.lang.Void - - - - - - 1 + + + 0 - - - - java.lang.String - - + + @@ -163,6 +203,20 @@ + + + + + + + + + + + + + + @@ -172,13 +226,6 @@ - - - - - - - key @@ -192,10 +239,28 @@ + + + + + + + - + + + + + 0 + + + + + + + @@ -238,7 +303,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 src1 @@ -250,7 +315,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -302,7 +367,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -315,6 +380,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/1185059341/85219314.10000.insclause-0 + /tmp/hive-njain/317501800/36487063.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest4_sequencefile + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest4_sequencefile @@ -108,11 +108,11 @@ src - + - + @@ -120,13 +120,20 @@ - /tmp/hive-zshao/1185059341/85219314.10000.insclause-0 + /tmp/hive-njain/317501800/36487063.10000.insclause-0 + + + + + + + @@ -191,6 +198,13 @@ + + + + + + + @@ -237,7 +251,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -249,7 +263,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -301,7 +315,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -314,6 +328,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input8.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input8.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input8.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/243084011/767326882.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,67 +10,104 @@ src1 - + - + - - - - - /tmp/hive-zshao/243084011/767326882.10000.insclause-0 - - - - - - - - - - - - - - 0 + + + + + + + + + /tmp/hive-njain/568957411.10001.insclause-0 - - - - java.lang.Integer + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + _c0,_c1,_c2 + + + serialization.format + 1 + + + - - - - 1 + + + + - - - - java.lang.Double - - - - - - - 2 - - - - - java.lang.Byte + + + + + + + + 0 + + + + + java.lang.Integer + + + + + + + + 1 + + + + + java.lang.Double + + + + + + + + + 2 + + + + + java.lang.Byte + + + + + @@ -177,109 +116,119 @@ - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - - evaluate - - - java.lang.Integer - - - java.lang.Integer - - - - - + + + - - - + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus - - 4 + + + evaluate + + + java.lang.Integer + + + java.lang.Integer + + + - - - - + + + + + + + + + 4 + + + + + + + + + + + + - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPMinus - - - - evaluate - - - java.lang.Double - - - java.lang.Double - - - - - - - org.apache.hadoop.hive.ql.udf.UDFToDouble + org.apache.hadoop.hive.ql.udf.UDFOPMinus - + evaluate - + - java.lang.String + java.lang.Double + + java.lang.Double + - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble - - - - java.lang.String + + + evaluate + + + java.lang.String + + + + + + + + + + 0 + + + + + java.lang.String + + + + + + + + + + + + + + @@ -288,57 +237,79 @@ - + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + evaluate + + + java.lang.Byte + + + java.lang.Byte + + + + + + + + + + + + + + + + + + + + + + - + - - + + + + + + + + + + + + + + + + + + + + + - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + key - - - evaluate - - - java.lang.Byte - - - java.lang.Byte - - - - - - - - - - - - - - - - - - - - - - - + @@ -346,10 +317,28 @@ + + + + + + + - + + + + + 0 + + + + + + + @@ -392,7 +381,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 src1 @@ -404,7 +393,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -456,7 +445,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -469,6 +458,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/join8.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/join8.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/join8.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/616083721/271603255.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,23 +10,23 @@ c:b:src2 - + - + - + - + - + @@ -142,36 +44,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + 1 @@ -200,25 +106,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -226,6 +136,13 @@ + + + + + + + @@ -286,6 +203,13 @@ + + + + + + + @@ -451,6 +375,13 @@ + + + + + + + @@ -494,23 +425,23 @@ c:a:src1 - + - + - + - + - + @@ -526,34 +457,38 @@ - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + @@ -579,25 +514,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -605,6 +544,13 @@ + + + + + + + @@ -665,6 +611,13 @@ + + + + + + + @@ -822,6 +775,13 @@ + + + + + + + @@ -865,13 +825,16 @@ + + + true - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src c:a:src1 @@ -886,7 +849,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -938,7 +901,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -952,19 +915,19 @@ - + - + - + - + @@ -972,13 +935,42 @@ - /tmp/hive-zshao/616083721/271603255.10000.insclause-0 + /tmp/hive-njain/378159414/850135187.10001.insclause-0 - + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + c1,c2,c3,c4 + + + serialization.format + 1 + + + + + + + + + + + @@ -1079,6 +1071,13 @@ + + + + + + + @@ -1189,6 +1188,13 @@ + + + + + + + @@ -1289,6 +1295,13 @@ + + + + + + + @@ -1372,6 +1385,16 @@ + + + + + + + + + + @@ -1422,6 +1445,16 @@ + + + + + + + + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/union.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/union.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/union.q.xml (working copy) @@ -28,7 +28,7 @@ true - /tmp/hive-njain/635791418/469963157.10000.insclause-0 + /tmp/hive-njain/14629339/1349764315.10000.insclause-0 ../../../../build/contrib/hive/ql/test/data/warehouse/union.out @@ -55,23 +55,23 @@ null-subquery1:unioninput-subquery1:src - + - + - + - + - + @@ -79,7 +79,7 @@ - /tmp/hive-njain/635791418/469963157.10000.insclause-0 + /tmp/hive-njain/14629339/1349764315.10000.insclause-0 @@ -95,6 +95,10 @@ + columns + key,value + + serialization.format 1 @@ -104,6 +108,13 @@ + + + + + + + @@ -166,8 +177,18 @@ + + true + + + + + + + + @@ -179,6 +200,203 @@ + + + + + + + + + + + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + true + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan + + + + evaluate + + + java.lang.String + + + java.lang.Number + + + + + + + + + + key + + + + + + + + + + + + java.lang.Integer + + + + + 100 + + + + + + + + + java.lang.Boolean + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + 1 + + + + + + + + + + + + + + @@ -239,6 +457,13 @@ + + + + + + + @@ -292,11 +517,7 @@ - - - java.lang.Integer - - + 100 @@ -306,182 +527,19 @@ - - - java.lang.Boolean - - + - - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - - - - - - - - - - - - - - - null-subquery2:unioninput-subquery2:src - - - - - - + - - - - - - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - - - - - - - - 0 - - - - - - - - - - 1 - - - - - - - - - - - + - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - - - - evaluate - - - java.lang.String - - - java.lang.Number - - - - - - - - - - key - - - - - - - - - - - - - 100 - - - - - - - - - - - - @@ -523,6 +581,10 @@ + + null-subquery2:unioninput-subquery2:src + + @@ -608,6 +670,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input9.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input9.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input9.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/1031500531/592905830.10000.insclause-0 + /tmp/hive-njain/510164179.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,58 +108,98 @@ src1 - + - + - + - - - - - /tmp/hive-zshao/1031500531/592905830.10000.insclause-0 + + + + + + + + + /tmp/hive-njain/510164179.10000.insclause-0 + + + + + + + + + + + + + + + + + + + + + 0 + + + + + java.lang.Void + + + + + + + + + 1 + + + + + java.lang.String + + + + + + + + + + - - - - - - - + + + + - - - 0 + + + - - - - java.lang.Void - - - - - - 1 + + + 0 - - - - java.lang.String - - + + @@ -167,39 +207,95 @@ + + + + + + + + + + + + + + - - - - - - - - + + + + + org.apache.hadoop.hive.ql.udf.UDFOPEqual + + + + evaluate + + + java.lang.String + + + java.lang.String + + - - - - key + + + + + + + + - - + + + + + + + + + + java.lang.Boolean + + + + + + + + + + - + + + + + 0 + + + + + + + @@ -208,79 +304,34 @@ - - - - - org.apache.hadoop.hive.ql.udf.UDFOPEqual - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - - - - + + + + + + + key - - - - - - + + - - - - java.lang.Boolean - - - + + + + + + + - - - - - key - - - - - - - - - - value - - - - - - - + @@ -291,7 +342,28 @@ - + + + + + key + + + + + + + + + + value + + + + + + + @@ -302,7 +374,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 src1 @@ -314,7 +386,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -366,7 +438,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -379,6 +451,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/udf1.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/udf1.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/udf1.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/247949494/314100641.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,1172 +10,1245 @@ src - + - + - + - - - - - /tmp/hive-zshao/247949494/314100641.10000.insclause-0 - - - - - - - - - - - - - - 0 + + + + + + + + + /tmp/hive-njain/530183626.10001.insclause-0 - - - - java.lang.Boolean + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + _c0,_c1,_c2,_c3,_c4,_c5,_c6,_c7,_c8,_c9,_c10,_c11,_c12,_c13,_c14,_c15,_c16 + + + serialization.format + 1 + + + - - - - 1 + + + + - - - - - - - 2 + + + + + + + + 0 + + + + + java.lang.Boolean + + + + + + + + + 1 + + + + + + + + + + 2 + + + + + + + + + + 3 + + + + + + + + + + 4 + + + + + + + + + + 5 + + + + + + + + + + 6 + + + + + + + + + + 7 + + + + + + + + + + 8 + + + + + + + + + + 9 + + + + + + + + + + 10 + + + + + + + + + + 11 + + + + + + + + + + 12 + + + + + + + + + + 13 + + + + + java.lang.String + + + + + + + + + 14 + + + + + + + + + + 15 + + + + + + + + + + 16 + + + + + + + - - - + + + + + + + + - - - 3 + + + org.apache.hadoop.hive.ql.udf.UDFLike - - + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - 4 + + + + + + + + + a + + + + + + + + + + %a% + + + + - + - - - 5 + + + org.apache.hadoop.hive.ql.udf.UDFLike - - + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - 6 + + + + + + + + + b + + + + + + + + + + %a% + + + + - + - - - 7 + + + org.apache.hadoop.hive.ql.udf.UDFLike - - + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - 8 + + + + + + + + + ab + + + + + + + + + + %a% + + + + - + - - - 9 + + + org.apache.hadoop.hive.ql.udf.UDFLike - - + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - 10 + + + + + + + + + ab + + + + + + + + + + %a_ + + + + - + - - - 11 + + + org.apache.hadoop.hive.ql.udf.UDFLike - - + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - 12 + + + + + + + + + %_ + + + + + + + + + + \%\_ + + + + - + - - - 13 + + + org.apache.hadoop.hive.ql.udf.UDFLike - - - - java.lang.String + + + evaluate + + + java.lang.String + + + java.lang.String + + + + + + + + + + + + + ab + + + + + + + + + \%\_ + + + - - - - - - 14 + + - - - - - - 15 + + + org.apache.hadoop.hive.ql.udf.UDFLike - - + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - 16 + + + + + + + + + ab + + + + + + + + + + _a% + + + + - - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - + - - a - - - - + + + org.apache.hadoop.hive.ql.udf.UDFLike - - %a% + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - - + + + + + + + + + ab + + + + + + + + + + a + + + + - - b - - - - - - + - - %a% - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - + + + org.apache.hadoop.hive.ql.udf.UDFRegExp - - ab + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - + + + + + + + + + + + + + + + + + + + .* + + + + - - %a% - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - + - - ab - - - - + + + org.apache.hadoop.hive.ql.udf.UDFRegExp - - %a_ + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - - + + + + + + + + + a + + + + + + + + + + [ab] + + + + - - %_ - - - - - - + - - \%\_ - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - + + + org.apache.hadoop.hive.ql.udf.UDFRegExp - - ab + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - + + + + + + + + + + + + + + + + + + + [ab] + + + + - - \%\_ - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - + - - ab - - - - + + + org.apache.hadoop.hive.ql.udf.UDFRegExp - - _a% + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - - + + + + + + + + + hadoop + + + + + + + + + + [a-z]* + + + + - - ab - - - - - - + - - a - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - + + + org.apache.hadoop.hive.ql.udf.UDFRegExp - - + + + evaluate + + + java.lang.String + + + java.lang.String + + + - - - - - - + + + + + + + + + hadoop + + + + + + + + + + o* + + + + - - .* - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - + - - a - - - - + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - [ab] + + + evaluate + + + java.lang.String + + + java.lang.String + + + java.lang.String + + + - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - - + + + + + + + + + abc + + + + + + + + + + b + + + + + + + + + + c + + + + - - - - - - - - - [ab] - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - hadoop + + + evaluate + + + java.lang.String + + + java.lang.String + + + java.lang.String + + + - - - - - - + + + + + + + + + abc + + + + + + + + + + z + + + + + + + + + + a + + + + - - [a-z]* - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - - evaluate - - - java.lang.String - - - java.lang.String - - - - - - - - - - hadoop - - - - + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - o* + + + evaluate + + + java.lang.String + + + java.lang.String + + + java.lang.String + + + - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - - - evaluate - - - java.lang.String - - - java.lang.String - - - java.lang.String - - - - - - - - + + + + + + + + + abbbb + + + + + + + + + + bb + + + + + + + + + + b + + + + + - - abc - - - - + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - b + + + evaluate + + + java.lang.String + + + java.lang.String + + + java.lang.String + + + - - - - + + + + + + + + + hadoop + + + + + + + + + + (.)[a-z]* + + + + + + + + + + $1ive + + + + + - - c - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace + + + + - - - evaluate - - - java.lang.String - - - java.lang.String - - - java.lang.String - - - - - - - - - - - - - abc - - - - - - - - - - z - - - - - - - - - - a - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace + + + + - - - evaluate - - - java.lang.String - - - java.lang.String - - - java.lang.String - - - - - - - - - - - - - abbbb - - + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPEqual + + + + evaluate + + + java.lang.String + + + java.lang.Number + + + + + + + + + + 0 - - - - - - - bb - - + + - - - - - - - b - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - - - evaluate - - - java.lang.String - - - java.lang.String - - - java.lang.String - - - - - - - - - - + + + + + + java.lang.Integer - - hadoop - - - - - - - - (.)[a-z]* - - + + 86 - - - - - - - $1ive - - - - - - + + + - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPEqual - - - - evaluate - - - java.lang.String - - - java.lang.Number - - - - - + - - - key - - - - - + - - - - - - java.lang.Integer + + + + + + + + + + 0 + + + - - 86 - - - - - - - - + + + + - - + + key - + - - - - value - - - - - - + + + + + + + + + + + + + + @@ -1281,7 +1256,28 @@ - + + + + + key + + + + + + + + + + value + + + + + + + @@ -1292,7 +1288,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -1304,7 +1300,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -1356,7 +1352,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -1369,6 +1365,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/udf4.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/udf4.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/udf4.q.xml (working copy) @@ -10,36 +10,235 @@ dest1 - + - + - - - - - /tmp/hive-njain/463574005/202732636.10001.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + + + + + + + /tmp/hive-njain/126141838.10001.insclause-0 + + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + _c0,_c1,_c2,_c3,_c4,_c5,_c6,_c7,_c8,_c9,_c10,_c11,_c12,_c13,_c14,_c15 + + + serialization.format + 1 + + + + + + - - org.apache.hadoop.mapred.TextInputFormat + + + + + + - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - serialization.format - 1 + + + + + + + + 0 + + + + + java.lang.Long + + + + + + + + + 1 + + + + + + + + + + 2 + + + + + + + + + + 3 + + + + + + + + + + 4 + + + + + + + + + + 5 + + + + + + + + + + 6 + + + + + + + + + + 7 + + + + + + + + + + 8 + + + + + + + + + + 9 + + + + + + + + + + 10 + + + + + java.lang.Double + + + + + + + + + 11 + + + + + java.lang.Integer + + + + + + + + + 12 + + + + + + + + + + 13 + + + + + + + + + + 14 + + + + + + + + + + 15 + + + + + + + @@ -47,286 +246,303 @@ - - - - + + + + - - - 0 + + + org.apache.hadoop.hive.ql.udf.UDFRound - - - - java.lang.Long + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.0 + + - - - - - - 1 - - + - - - 2 + + + org.apache.hadoop.hive.ql.udf.UDFRound - - + + + evaluate + + + java.lang.Double + + + - - - - - - 3 + + + + + + + + + 1.5 + + + + - + - - - 4 + + + org.apache.hadoop.hive.ql.udf.UDFRound - - + + + evaluate + + + java.lang.Double + + + - - - - - - 5 + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNegative + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.5 + + + + + + + + + + + - + - - - 6 + + + org.apache.hadoop.hive.ql.udf.UDFFloor - - + + + evaluate + + + java.lang.Double + + + - - - - - - 7 + + + + + + + + + 1.0 + + + + - + - - - 8 + + + org.apache.hadoop.hive.ql.udf.UDFFloor - - + + + evaluate + + + java.lang.Double + + + - - - - - - 9 + + + + + + + + + 1.5 + + + + - + - - - 10 + + + org.apache.hadoop.hive.ql.udf.UDFFloor - - - - java.lang.Double - + + + evaluate + + + java.lang.Double + + - - - - - - 11 - - - - - java.lang.Integer + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNegative + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.5 + + + + + + + + + - - - - - - 12 + + - - - - - - 13 + + + org.apache.hadoop.hive.ql.udf.UDFCeil - - + + + evaluate + + + java.lang.Double + + + - - - - - - 14 + + + + + + + + + 1.0 + + + + - - - - - - - - - 15 - - - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRound - - - - evaluate - - - java.lang.Double - - - - - - - - - + - - 1.0 - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRound - - - - evaluate - - - java.lang.Double - - - - - - - - - - - - 1.5 - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRound - - - - evaluate - - - java.lang.Double - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPNegative + org.apache.hadoop.hive.ql.udf.UDFCeil - + evaluate @@ -350,109 +566,75 @@ - + - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFFloor - - - - evaluate - - - java.lang.Double - - - - - - - - - + + + org.apache.hadoop.hive.ql.udf.UDFCeil - - 1.0 + + + evaluate + + + java.lang.Double + + + - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFFloor - - - - evaluate - - - java.lang.Double - - - - - - - - + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNegative + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.5 + + + + + + + + + + + + - + - - 1.5 - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFFloor - - - - evaluate - - - java.lang.Double - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPNegative + org.apache.hadoop.hive.ql.udf.UDFCeil - + evaluate @@ -469,109 +651,85 @@ - 1.5 + 1.0 - + - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFCeil - - - - evaluate - - - java.lang.Double - - - - - - - + + + org.apache.hadoop.hive.ql.udf.UDFRand + + + + evaluate + + + long + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToLong + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 3 + + + + + + + + + + + + - - 1.0 - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFCeil - - - - evaluate - - - java.lang.Double - - - - - - - + - 1.5 + 3 - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFCeil - - - - evaluate - - - java.lang.Double - - - - - - @@ -582,7 +740,7 @@ evaluate - java.lang.Double + java.lang.Integer @@ -592,90 +750,35 @@ - + - 1.5 + 3 - + - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFCeil - - - - evaluate - - - java.lang.Double - - - - - - - - - - - - 1.0 - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRand - - - - evaluate - - - long - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFToLong + org.apache.hadoop.hive.ql.udf.UDFOPPlus - + evaluate - + java.lang.Integer + + java.lang.Integer + @@ -687,152 +790,105 @@ - 3 + 1 + + + + + + + 2 + + + - - - - - - - - - - - - - - - - - - 3 - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPNegative - - - - evaluate - - - java.lang.Integer - - - - - - - - - - - 3 - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - - evaluate - - - java.lang.Integer - - - java.lang.Integer - - - - - - - - - + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus - - 1 + + + evaluate + + + java.lang.Integer + + + java.lang.Integer + + + - - - - - - + + + + + + + + + 1 + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNegative + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 2 + + + + + + + + + + + - - 2 - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - - evaluate - - - java.lang.Integer - - - java.lang.Integer - - - - - - - - - - 1 - - org.apache.hadoop.hive.ql.udf.UDFOPNegative + org.apache.hadoop.hive.ql.udf.UDFOPBitNot - + evaluate @@ -849,7 +905,7 @@ - 2 + 1 @@ -862,56 +918,43 @@ - - + + + + + + - - - - org.apache.hadoop.hive.ql.udf.UDFOPBitNot + + + + - - - evaluate - - - java.lang.Integer - - - - - - - - - - - - - 1 - - - - - - - - - - - - + + + + + + + + + + + + + + @@ -1032,6 +1075,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input_testxpath.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input_testxpath.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input_testxpath.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/780638111/964614278.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,63 +10,217 @@ src_thrift - + - + - - - - - /tmp/hive-zshao/780638111/964614278.10000.insclause-0 + + + + + + + + + /tmp/hive-njain/493664790.10001.insclause-0 + + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + _c0,mystring,_c2 + + + serialization.format + 1 + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + java.lang.Integer + + + + + + + + + 1 + + + + + java.lang.String + + + + + + + + + 2 + + + + + + + + + + + - - - - - - - + + + + - - - 0 + + + + + 0 + + + + + + + + + - - - - java.lang.Integer + + + + + + 1 + + + + - - - 1 - - - - - java.lang.String + + + + + + + 1 + + + + + + + + + + + + + + + + 0 + + + + + + + + mystring + + + false + + + + - - - 2 + + + + + 2 + + + + + + + + + + + + - + + + + + + + key_2 + + + + @@ -173,6 +229,20 @@ + + + + + + + + + + + + + + @@ -182,111 +252,32 @@ - - - - - lint - - - - - - - - - + + + lint - - - - - - - 1 - - - - + - - - - - - - lintstring - - - - - - - - - - - - - - - - - 0 - - - - - - - + + + lintstring - - mystring - - - false - - + - - - - - mstringstring - - - - - - - - - - - - + + + mstringstring - - - - - - - key_2 - - - - + @@ -294,10 +285,48 @@ + + + + + + + - + + + + + 0 + + + + + + + + + + 1 + + + + + + + + + + 2 + + + + + + + @@ -384,7 +413,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift src_thrift @@ -396,7 +425,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift @@ -452,7 +481,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift @@ -465,6 +494,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input_part1.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input_part1.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input_part1.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/587528285/609899583.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,15 +10,15 @@ srcpart - + - + - + @@ -124,13 +26,42 @@ - /tmp/hive-zshao/587528285/609899583.10000.insclause-0 + /tmp/hive-njain/133993466/535789358.10001.insclause-0 - + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + key,value,hr,ds + + + serialization.format + 1 + + + + + + + + + + + @@ -235,6 +166,13 @@ + + + + + + + @@ -453,6 +391,13 @@ + + + + + + + @@ -519,7 +464,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 srcpart @@ -531,7 +476,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 @@ -596,7 +541,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart @@ -609,6 +554,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/groupby1.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/groupby1.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/groupby1.q.xml (working copy) @@ -35,7 +35,7 @@ true - /tmp/hive-zshao/1211327466/29184745.10000.insclause-0 + /tmp/hive-njain/255951081/1610738340.10000.insclause-0 @@ -84,7 +84,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -118,15 +118,15 @@ - /tmp/hive-zshao/1211327466/29184745.10001 - + /tmp/hive-njain/255951081/1610738340.10001 + true - + @@ -144,36 +144,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + -1 @@ -185,32 +189,40 @@ 1 - + + + java.lang.Double + + - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_table + serialization.ddl + struct binary_table { double reducesinkvalue0} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -218,6 +230,407 @@ + + + + + + + + + + + + /tmp/hive-njain/255951081/1610738340.10001 + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string temporarycol0, double temporarycol1} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + 1 + + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFSum + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.String + + + + + + + + + + VALUE.0 + + + + + + + + + + + + + + + + + + + + + + + + + KEY.0 + + + + + + + + + + + PARTIAL1 + + + + + + + + + + + + + + + + key + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + + + -1 + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRand + + + + evaluate + + + + + + + + + + + + + + + -1 + + + + + + + org.apache.hadoop.hive.ql.udf.UDFSubstr + + + + evaluate + + + java.lang.String + + + int + + + + + + + + + + value + + + + + + + + + + + + java.lang.Integer + + + + + 4 + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string reducesinkvalue0} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + + + + + + + + + + + + + KEY.0 + + + + + + + + + + VALUE.0 + + + + + + + + + + + + + + + + + + + + + @@ -238,7 +651,7 @@ VALUE.0 - + @@ -253,13 +666,16 @@ true + + + - /tmp/hive-zshao/1211327466/29184745.10001 + /tmp/hive-njain/255951081/1610738340.10001 - /tmp/hive-zshao/1211327466/29184745.10001 + /tmp/hive-njain/255951081/1610738340.10001 @@ -268,43 +684,21 @@ - /tmp/hive-zshao/1211327466/29184745.10001 + /tmp/hive-njain/255951081/1610738340.10001 - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0,1 - - - serialization.format - 1 - - - - + - + - + @@ -312,13 +706,20 @@ - /tmp/hive-zshao/1211327466/29184745.10000.insclause-0 + /tmp/hive-njain/255951081/1610738340.10000.insclause-0 + + + + + + + @@ -339,7 +740,7 @@ 1 - + @@ -371,7 +772,7 @@ 1 - + @@ -379,6 +780,13 @@ + + + + + + + @@ -407,7 +815,7 @@ VALUE.0 - + @@ -433,11 +841,18 @@ - PARTIAL2 + FINAL + + + + + + + @@ -458,7 +873,7 @@ 1 - + @@ -468,6 +883,13 @@ + + + + + + + @@ -483,211 +905,17 @@ src - - - - - - - - - - - - - key - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - -1 - - - -1 - - - -1 - - - - - - - org.apache.hadoop.hive.ql.udf.UDFSubstr - - - - evaluate - - - java.lang.String - - - int - - - - - - - - - - value - - - - - - - - - - - - java.lang.Integer - - - - - 4 - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - - - - - - - - - KEY.0 - - - - - - - - - - VALUE.0 - - - - - - - - - - - - - - - - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - + + + + - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -699,7 +927,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -751,7 +979,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -765,104 +993,13 @@ - - - - - - - - - /tmp/hive-zshao/1211327466/29184745.10001 - - - - - - - - - - - - - - 0 - - - - - - - - - - 1 - - - - - - - - - - - - - + + + + + + - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFSum - - - - - - - VALUE.0 - - - - - - - - - - - - - - - - - - KEY.0 - - - - - - - - - - - PARTIAL1 - - - - - - - Index: src/contrib/hive/ql/src/test/results/compiler/plan/groupby2.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/groupby2.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/groupby2.q.xml (working copy) @@ -4,105 +4,7 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/62629504/152527911.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-3 @@ -118,15 +20,15 @@ - /tmp/hive-zshao/62629504/152527911.10001 - + /tmp/hive-njain/16836316/818176241.10002 + true - + @@ -144,36 +46,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + -1 @@ -185,7 +91,11 @@ 1 - + + + java.lang.Long + + @@ -195,32 +105,40 @@ 2 - + + + java.lang.Double + + - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { i64 reducesinkvalue0, double reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -228,6 +146,470 @@ + + + + + + + + + + + + /tmp/hive-njain/16836316/818176241.10002 + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string temporarycol0, i64 temporarycol1, double temporarycol2} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + 1 + + + + + + + + + + 2 + + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFCount + + + true + + + + + + + KEY.1 + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFSum + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.String + + + + + + + + + + KEY.1 + + + + + + + + + + + + + + + + + + + + + + + + + KEY.0 + + + + + + + + + + + PARTIAL1 + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFSubstr + + + + evaluate + + + java.lang.String + + + int + + + int + + + + + + + + + + key + + + + + + + + + + + + java.lang.Integer + + + + + 0 + + + + + + + + + + 1 + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFSubstr + + + + evaluate + + + java.lang.String + + + int + + + + + + + + + + value + + + + + + + + + + + + + 4 + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string reducesinkkey0, string reducesinkkey1} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + + + -1 + + + + + + -1 + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { } + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + + + + + + + + + + + + + KEY.0 + + + + + + + + + + KEY.1 + + + + + + + + + + + + + + + + + + + + + @@ -248,7 +630,7 @@ VALUE.0 - + @@ -258,7 +640,7 @@ VALUE.1 - + @@ -273,13 +655,16 @@ true + + + - /tmp/hive-zshao/62629504/152527911.10001 + /tmp/hive-njain/16836316/818176241.10002 - /tmp/hive-zshao/62629504/152527911.10001 + /tmp/hive-njain/16836316/818176241.10002 @@ -288,43 +673,21 @@ - /tmp/hive-zshao/62629504/152527911.10001 + /tmp/hive-njain/16836316/818176241.10002 - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0,1,2 - - - serialization.format - 1 - - - - + - + - + @@ -332,13 +695,42 @@ - /tmp/hive-zshao/62629504/152527911.10000.insclause-0 + /tmp/hive-njain/16836316/818176241.10001.insclause-0 - + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + _c0,_c1,_c2 + + + serialization.format + 1 + + + + + + + + + + + @@ -359,7 +751,7 @@ 1 - + @@ -401,7 +793,7 @@ 1 - + @@ -436,10 +828,34 @@ - - - 2 + + + org.apache.hadoop.hive.ql.udf.UDFToString + + + evaluate + + + java.lang.Double + + + + + + + + + + 2 + + + + + + + + @@ -456,6 +872,13 @@ + + + + + + + @@ -484,7 +907,7 @@ VALUE.0 - + @@ -505,7 +928,7 @@ VALUE.1 - + @@ -531,11 +954,18 @@ - PARTIAL2 + FINAL + + + + + + + @@ -556,7 +986,7 @@ 1 - + @@ -566,7 +996,7 @@ 2 - + @@ -576,6 +1006,13 @@ + + + + + + + @@ -591,260 +1028,17 @@ src - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFSubstr - - - - evaluate - - - java.lang.String - - - int - - - int - - - - - - - - - - key - - - - - - - - - - - - java.lang.Integer - - - - - 0 - - - - - - - - - - 1 - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFSubstr - - - - evaluate - - - java.lang.String - - - int - - - - - - - - - - value - - - - - - - - - - - - - 4 - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0,1 - - - serialization.format - 1 - - - - - - - 2147483647 - - - -1 - - - -1 - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - - - - serialization.format - 1 - - - - - - - - - - - - - - - KEY.0 - - - - - - - - - - KEY.1 - - - - - - - - - - - - - - - - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - + + + + - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -856,7 +1050,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -908,7 +1102,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -922,138 +1116,13 @@ - - - - - - - - - /tmp/hive-zshao/62629504/152527911.10001 - - - - - - - - - - - - - - 0 - - - - - - - - - - 1 - - - - - - - - - - 2 - - - - - - - - - - - - - + + + + + + - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFCount - - - true - - - - - - - KEY.1 - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFSum - - - - - - - KEY.1 - - - - - - - - - - - - - - - - - - KEY.0 - - - - - - - - - - - PARTIAL1 - - - - - - - Index: src/contrib/hive/ql/src/test/results/compiler/plan/subq.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/subq.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/subq.q.xml (working copy) @@ -28,7 +28,7 @@ true - /tmp/hive-njain/560916151/1453534343.10000.insclause-0 + /tmp/hive-njain/361852244/1393208672.10000.insclause-0 ../../../../build/contrib/hive/ql/test/data/warehouse/union.out @@ -55,19 +55,19 @@ unioninput:src - + - + - + - + @@ -75,7 +75,7 @@ - /tmp/hive-njain/560916151/1453534343.10000.insclause-0 + /tmp/hive-njain/361852244/1393208672.10000.insclause-0 @@ -91,6 +91,10 @@ + columns + key,value + + serialization.format 1 @@ -100,6 +104,13 @@ + + + + + + + @@ -162,8 +173,18 @@ + + true + + + + + + + + @@ -201,8 +222,18 @@ + + true + + + + + + + + @@ -294,6 +325,13 @@ + + + + + + + @@ -417,6 +455,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/groupby3.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/groupby3.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/groupby3.q.xml (working copy) @@ -4,105 +4,7 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/168733871/318815163.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-3 @@ -118,36 +20,40 @@ - /tmp/hive-zshao/168733871/318815163.10001 - + /tmp/hive-njain/6206243/1572612461.10002 + true - + - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { } + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol @@ -156,6 +62,9 @@ -1 + + + -1 @@ -181,7 +90,11 @@ 1 - + + + java.lang.Double + + @@ -201,7 +114,7 @@ 3 - + @@ -211,32 +124,36 @@ 4 - + - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1,2,3,4 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, double reducesinkvalue1, string reducesinkvalue2, double reducesinkvalue3, double reducesinkvalue4} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -244,6 +161,619 @@ + + + + + + + + + + + + /tmp/hive-njain/6206243/1572612461.10002 + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string temporarycol0, double temporarycol1, string temporarycol2, double temporarycol3, double temporarycol4} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + 1 + + + + + + + + + + 2 + + + + + + + + + + 3 + + + + + + + + + + 4 + + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFAvg + + + true + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.String + + + + + + + + + + KEY.0 + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFSum + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.String + + + + + + + + + + KEY.0 + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFAvg + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.String + + + + + + + + + + KEY.0 + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFMin + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.String + + + + + + + + + + KEY.0 + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFMax + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.String + + + + + + + + + + KEY.0 + + + + + + + + + + + + + + + + + + + + + + + + + PARTIAL1 + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFSubstr + + + + evaluate + + + java.lang.String + + + int + + + + + + + + + + 0 + + + + + + + + + + + + java.lang.Integer + + + + + 4 + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + + + -1 + + + + + + -1 + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { } + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + + + + + value + + + + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + + + + + + + + + + + + + KEY.0 + + + + + + + + + + + + + + + + + + + + + @@ -264,7 +794,7 @@ VALUE.1 - + @@ -284,7 +814,7 @@ VALUE.3 - + @@ -294,7 +824,7 @@ VALUE.4 - + @@ -309,13 +839,16 @@ true + + + - /tmp/hive-zshao/168733871/318815163.10001 + /tmp/hive-njain/6206243/1572612461.10002 - /tmp/hive-zshao/168733871/318815163.10001 + /tmp/hive-njain/6206243/1572612461.10002 @@ -324,43 +857,21 @@ - /tmp/hive-zshao/168733871/318815163.10001 + /tmp/hive-njain/6206243/1572612461.10002 - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0,1,2,3,4 - - - serialization.format - 1 - - - - + - + - + @@ -368,13 +879,42 @@ - /tmp/hive-zshao/168733871/318815163.10000.insclause-0 + /tmp/hive-njain/453676475.10001.insclause-0 - + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + _c0,_c1,_c2,_c3,_c4 + + + serialization.format + 1 + + + + + + + + + + + @@ -385,7 +925,7 @@ 0 - + @@ -415,7 +955,7 @@ 3 - + @@ -425,7 +965,7 @@ 4 - + @@ -447,7 +987,7 @@ 1 - + @@ -477,7 +1017,7 @@ 4 - + @@ -487,7 +1027,7 @@ 3 - + @@ -495,6 +1035,13 @@ + + + + + + + @@ -544,7 +1091,7 @@ VALUE.1 - + @@ -586,7 +1133,7 @@ VALUE.3 - + @@ -607,7 +1154,7 @@ VALUE.4 - + @@ -622,11 +1169,18 @@ - PARTIAL2 + FINAL + + + + + + + @@ -647,7 +1201,7 @@ 1 - + @@ -667,7 +1221,7 @@ 3 - + @@ -677,7 +1231,7 @@ 4 - + @@ -687,6 +1241,13 @@ + + + + + + + @@ -702,190 +1263,17 @@ src - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFSubstr - - - - evaluate - - - java.lang.String - - - int - - - - - - - - - - value - - - - - - - - - - - - java.lang.Integer - - - - - 4 - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - 2147483647 - - - -1 - - - -1 - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - - - - serialization.format - 1 - - - - - - - - - - - - - - - KEY.0 - - - - - - - - - - - - - - - - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - + + + + - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -897,7 +1285,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -949,7 +1337,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -963,210 +1351,13 @@ - - - - - - - - - /tmp/hive-zshao/168733871/318815163.10001 - - - - - - - - - - - - - - 0 - - - - - - - - - - 1 - - - - - - - - - - 2 - - - - - - - - - - 3 - - - - - - - - - - 4 - - - - - - - - - - - - - + + + + + + - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFAvg - - - true - - - - - - - KEY.0 - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFSum - - - - - - - KEY.0 - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFAvg - - - - - - - KEY.0 - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFMin - - - - - - - KEY.0 - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFMax - - - - - - - KEY.0 - - - - - - - - - - - - - - - - - - PARTIAL1 - - - - - - - Index: src/contrib/hive/ql/src/test/results/compiler/plan/groupby4.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/groupby4.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/groupby4.q.xml (working copy) @@ -4,105 +4,7 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/843671827/38838214.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-3 @@ -118,15 +20,15 @@ - /tmp/hive-zshao/843671827/38838214.10001 - + /tmp/hive-njain/887282678/418909568.10002 + true - + @@ -144,36 +46,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + -1 @@ -181,25 +87,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - + name + binary_table + serialization.ddl + struct binary_table { } + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -207,6 +117,398 @@ + + + + + + + + + + + + /tmp/hive-njain/887282678/418909568.10002 + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string temporarycol0} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + + + + + + + + + + + + + + + + KEY.0 + + + + + + + + + + + PARTIAL1 + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFSubstr + + + + evaluate + + + java.lang.String + + + int + + + int + + + + + + + + + + 0 + + + + + + + + + + + + java.lang.Integer + + + + + 0 + + + + + + + + + + 1 + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + + + -1 + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRand + + + + evaluate + + + + + + + + + + java.lang.Double + + + + + + + + + -1 + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { } + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + + + + + + + + + + + + + KEY.0 + + + + + + + + + + + + + + + + + + + + + @@ -232,13 +534,16 @@ true + + + - /tmp/hive-zshao/843671827/38838214.10001 + /tmp/hive-njain/887282678/418909568.10002 - /tmp/hive-zshao/843671827/38838214.10001 + /tmp/hive-njain/887282678/418909568.10002 @@ -247,43 +552,21 @@ - /tmp/hive-zshao/843671827/38838214.10001 + /tmp/hive-njain/887282678/418909568.10002 - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - + - + - + @@ -291,13 +574,42 @@ - /tmp/hive-zshao/843671827/38838214.10000.insclause-0 + /tmp/hive-njain/261034330.10001.insclause-0 - + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + _c0 + + + serialization.format + 1 + + + + + + + + + + + @@ -338,6 +650,13 @@ + + + + + + + @@ -370,11 +689,18 @@ - PARTIAL2 + FINAL + + + + + + + @@ -395,6 +721,13 @@ + + + + + + + @@ -410,203 +743,17 @@ src - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFSubstr - - - - evaluate - - - java.lang.String - - - int - - - int - - - - - - - - - - key - - - - - - - - - - - - java.lang.Integer - - - - - 0 - - - - - - - - - - 1 - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - -1 - - - -1 - - - -1 - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - - - - serialization.format - 1 - - - - - - - - - - - - - - - KEY.0 - - - - - - - - - - - - - - - - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - + + + + - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -618,7 +765,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -670,7 +817,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -684,72 +831,13 @@ - - - - - - - - - /tmp/hive-zshao/843671827/38838214.10001 - - - - - - - - - - - - - - 0 - - - - - - - - - - - - - + + + + + + - - - - - - - - - - - KEY.0 - - - - - - - - - - - PARTIAL1 - - - - - - - Index: src/contrib/hive/ql/src/test/results/compiler/plan/groupby5.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/groupby5.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/groupby5.q.xml (working copy) @@ -4,105 +4,7 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/218070299/172646370.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-3 @@ -118,15 +20,15 @@ - /tmp/hive-zshao/218070299/172646370.10001 - + /tmp/hive-njain/261145182/229076034.10002 + true - + @@ -144,36 +46,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + -1 @@ -185,32 +91,40 @@ 1 - + + + java.lang.Double + + - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_table + serialization.ddl + struct binary_table { double reducesinkvalue0} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -218,6 +132,407 @@ + + + + + + + + + + + + /tmp/hive-njain/261145182/229076034.10002 + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string temporarycol0, double temporarycol1} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + 1 + + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFSum + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.String + + + + + + + + + + VALUE.0 + + + + + + + + + + + + + + + + + + + + + + + + + KEY.0 + + + + + + + + + + + PARTIAL1 + + + + + + + + + + + + + + + + key + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + + + -1 + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRand + + + + evaluate + + + + + + + + + + + + + + + -1 + + + + + + + org.apache.hadoop.hive.ql.udf.UDFSubstr + + + + evaluate + + + java.lang.String + + + int + + + + + + + + + + value + + + + + + + + + + + + java.lang.Integer + + + + + 4 + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string reducesinkvalue0} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + + + + + + + + + + + + + KEY.0 + + + + + + + + + + VALUE.0 + + + + + + + + + + + + + + + + + + + + + @@ -238,7 +553,7 @@ VALUE.0 - + @@ -253,13 +568,16 @@ true + + + - /tmp/hive-zshao/218070299/172646370.10001 + /tmp/hive-njain/261145182/229076034.10002 - /tmp/hive-zshao/218070299/172646370.10001 + /tmp/hive-njain/261145182/229076034.10002 @@ -268,43 +586,21 @@ - /tmp/hive-zshao/218070299/172646370.10001 + /tmp/hive-njain/261145182/229076034.10002 - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0,1 - - - serialization.format - 1 - - - - + - + - + @@ -312,13 +608,42 @@ - /tmp/hive-zshao/218070299/172646370.10000.insclause-0 + /tmp/hive-njain/261145182/229076034.10001.insclause-0 - + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + key,_c1 + + + serialization.format + 1 + + + + + + + + + + + @@ -339,7 +664,7 @@ 1 - + @@ -371,7 +696,7 @@ 1 - + @@ -379,6 +704,13 @@ + + + + + + + @@ -407,7 +739,7 @@ VALUE.0 - + @@ -433,11 +765,18 @@ - PARTIAL2 + FINAL + + + + + + + @@ -458,7 +797,7 @@ 1 - + @@ -468,6 +807,13 @@ + + + + + + + @@ -483,211 +829,17 @@ src - - - - - - - - - - - - - key - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - -1 - - - -1 - - - -1 - - - - - - - org.apache.hadoop.hive.ql.udf.UDFSubstr - - - - evaluate - - - java.lang.String - - - int - - - - - - - - - - value - - - - - - - - - - - - java.lang.Integer - - - - - 4 - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - - - - - - - - - KEY.0 - - - - - - - - - - VALUE.0 - - - - - - - - - - - - - - - - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - + + + + - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -699,7 +851,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -751,7 +903,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -765,104 +917,13 @@ - - - - - - - - - /tmp/hive-zshao/218070299/172646370.10001 - - - - - - - - - - - - - - 0 - - - - - - - - - - 1 - - - - - - - - - - - - - + + + + + + - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFSum - - - - - - - VALUE.0 - - - - - - - - - - - - - - - - - - KEY.0 - - - - - - - - - - - PARTIAL1 - - - - - - - Index: src/contrib/hive/ql/src/test/results/compiler/plan/groupby6.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/groupby6.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/groupby6.q.xml (working copy) @@ -4,105 +4,7 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/20627718/64699543.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-3 @@ -118,15 +20,15 @@ - /tmp/hive-zshao/20627718/64699543.10001 - + /tmp/hive-njain/14395907/27060646.10002 + true - + @@ -144,36 +46,40 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + -1 @@ -181,25 +87,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - + name + binary_table + serialization.ddl + struct binary_table { } + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -207,6 +117,398 @@ + + + + + + + + + + + + /tmp/hive-njain/14395907/27060646.10002 + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string temporarycol0} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + + + + + + + + + + + + + + + + KEY.0 + + + + + + + + + + + PARTIAL1 + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFSubstr + + + + evaluate + + + java.lang.String + + + int + + + int + + + + + + + + + + 0 + + + + + + + + + + + + java.lang.Integer + + + + + 4 + + + + + + + + + + 1 + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string reducesinkkey0} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + + + -1 + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRand + + + + evaluate + + + + + + + + + + java.lang.Double + + + + + + + + + -1 + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { } + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + + + + + + + + + + + + + + + + + + + + + + value + + + + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + + + + + + + + + + + + + KEY.0 + + + + + + + + + + + + + + + + + + + + + @@ -232,13 +534,16 @@ true + + + - /tmp/hive-zshao/20627718/64699543.10001 + /tmp/hive-njain/14395907/27060646.10002 - /tmp/hive-zshao/20627718/64699543.10001 + /tmp/hive-njain/14395907/27060646.10002 @@ -247,43 +552,21 @@ - /tmp/hive-zshao/20627718/64699543.10001 + /tmp/hive-njain/14395907/27060646.10002 - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - + - + - + @@ -291,13 +574,42 @@ - /tmp/hive-zshao/20627718/64699543.10000.insclause-0 + /tmp/hive-njain/786616721.10001.insclause-0 - + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + _c0 + + + serialization.format + 1 + + + + + + + + + + + @@ -338,6 +650,13 @@ + + + + + + + @@ -370,11 +689,18 @@ - PARTIAL2 + FINAL + + + + + + + @@ -395,6 +721,13 @@ + + + + + + + @@ -410,203 +743,17 @@ src - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFSubstr - - - - evaluate - - - java.lang.String - - - int - - - int - - - - - - - - - - value - - - - - - - - - - - - java.lang.Integer - - - - - 4 - - - - - - - - - - 1 - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - -1 - - - -1 - - - -1 - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - - - - serialization.format - 1 - - - - - - - - - - - - - - - KEY.0 - - - - - - - - - - - - - - - - - - - - - - key - - - - - - - - - - value - - - - - - - - - - - + + + + - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -618,7 +765,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -670,7 +817,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -684,72 +831,13 @@ - - - - - - - - - /tmp/hive-zshao/20627718/64699543.10001 - - - - - - - - - - - - - - 0 - - - - - - - - - - - - - + + + + + + - - - - - - - - - - - KEY.0 - - - - - - - - - - - PARTIAL1 - - - - - - - Index: src/contrib/hive/ql/src/test/results/compiler/plan/case_sensitivity.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/case_sensitivity.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/case_sensitivity.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/111275125/273788462.10000.insclause-0 + /tmp/hive-njain/14809281.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,119 +108,212 @@ src_thrift - + - + - + - - - - - /tmp/hive-zshao/111275125/273788462.10000.insclause-0 + + + + + + + + + /tmp/hive-njain/14809281.10000.insclause-0 + + + + + + + + + + + + + + + + + + + + + 0 + + + + + java.lang.Integer + + + + + + + + + 1 + + + + + java.lang.String + + + + + + + + + + - - - - - - - + + + + - - - 0 + + + + + 0 + + + + + + + + + - - - - java.lang.Integer + + + + + + 1 + + + + - - - 1 - - - - - java.lang.String + + + + + + + 1 + + + + + + + + + + + + + + + + 0 + + + + + + + + MYSTRING + + + false + + + + + + + + + + + + + + + + + + - - - - - - - - - lint - - - - - - - - - - - - - - - - - 1 - - - - - - + + + + + org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan + + + + evaluate + + + java.lang.Integer + + + java.lang.Integer + + - - - + + + - lintstring + 0 - - - - - + @@ -235,187 +328,114 @@ - + - - MYSTRING + + + + + + + 0 + + - - false + + + + + + java.lang.Boolean - - - + + + + + + + - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - - - - evaluate - - - java.lang.Integer - - - java.lang.Integer - - - - - - - - - - - - lint + + + + + 0 - + - - - - + + + + 1 - - 0 + + - - - - - - - - - - 0 - - - - - - - java.lang.Boolean - - - - - - - + + + + - - - aint - - - - - - - - - - astring - - - - - - - - - + + lint - + - - - lstring - - - - - - - - - - - - - + + lintstring - + - - - - mstringstring - - - - - - - - - - - - - + + + + + + + + + + + + + + @@ -423,7 +443,79 @@ - + + + + + aint + + + + + + + + + + astring + + + + + + + + + + lint + + + + + + + + + + lstring + + + + + + + + + + + + + + lintstring + + + + + + + + + + mstringstring + + + + + + + + + + + + + + @@ -434,7 +526,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift src_thrift @@ -446,7 +538,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift @@ -502,7 +594,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift @@ -515,6 +607,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/sample1.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/sample1.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/sample1.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/130068324/266130293.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,125 +10,363 @@ s - + - + - + - - - - - /tmp/hive-zshao/130068324/266130293.10000.insclause-0 + + + + + + + + + /tmp/hive-njain/57674480/1824861482.10001.insclause-0 + + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + key,value,ds,hr + + + serialization.format + 1 + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + java.lang.String + + + + + + + + + 1 + + + + + + + + + + 2 + + + + + + + + + + 3 + + + + + + + + + + + - - - - - - - + + + + - - - 0 + + + key - - - - java.lang.String - - + + - - - 1 + + + value - + - - - 2 + + + ds - + - - - 3 + + + hr - + + + true + + + + + + + + + + + + + + + - - - + + + + + org.apache.hadoop.hive.ql.udf.UDFOPAnd + + + + evaluate + + + java.lang.Boolean + + + java.lang.Boolean + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPEqual + + + + evaluate + + + java.lang.String + + + java.lang.String + + + + + + + + + + ds + + + + + + + + + + + + + 2008-04-08 + + + + + + + + + java.lang.Boolean + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPEqual + + + + evaluate + + + java.lang.String + + + java.lang.String + + + + + + + + + + hr + + + + + + + + + + + + + 11 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + key - + - - + + value - + - - + + ds - + - - + + hr - + @@ -235,13 +375,6 @@ - - - - - - - @@ -251,17 +384,17 @@ - org.apache.hadoop.hive.ql.udf.UDFOPAnd + org.apache.hadoop.hive.ql.udf.UDFOPEqual - + evaluate - java.lang.Boolean + java.lang.Integer - java.lang.Boolean + java.lang.Integer @@ -271,17 +404,17 @@ - org.apache.hadoop.hive.ql.udf.UDFOPEqual + org.apache.hadoop.hive.ql.udf.UDFOPMod - + evaluate - java.lang.String + java.lang.Integer - java.lang.String + java.lang.Integer @@ -289,81 +422,118 @@ - - - ds + + + org.apache.hadoop.hive.ql.udf.UDFOPBitAnd + + + evaluate + + + java.lang.Integer + + + java.lang.Integer + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFDefaultSampleHashFn + + + + evaluate + + + java.lang.Object + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRand + + + + evaluate + + + + + + + + + + java.lang.Double + + + + + + + + + + + java.lang.Integer + + + + + + + + + + + + 2147483647 + + + + + - + - + - 2008-04-08 + 1 - - - java.lang.Boolean - - + - - - org.apache.hadoop.hive.ql.udf.UDFOPEqual + + + - - - evaluate - - - java.lang.String - - - java.lang.String - - - + + 0 - - - - - - hr - - - - - - - - - - - - - 11 - - - - - - - - @@ -375,51 +545,10 @@ - - - - - - - - key - - - - - - - - - - value - - - - - - - - - - ds - - - - - - - - - - hr - - - - - - - + + + + @@ -441,7 +570,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 s @@ -453,7 +582,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 @@ -518,7 +647,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart @@ -531,6 +660,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/sample2.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/sample2.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/sample2.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/741118865/96139643.10000.insclause-0 + /tmp/hive-njain/186273508/11889374.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,11 +108,11 @@ s - + - + @@ -120,13 +120,20 @@ - /tmp/hive-zshao/741118865/96139643.10000.insclause-0 + /tmp/hive-njain/186273508/11889374.10000.insclause-0 + + + + + + + @@ -189,8 +196,18 @@ + + true + + + + + + + + @@ -237,7 +254,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt s @@ -249,7 +266,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt @@ -272,18 +289,22 @@ srcbucket + bucket_field_name + key + + serialization.ddl struct srcbucket { string key, string value} + columns + key,value + + serialization.format 1 - columns - key,value - - bucket_count 2 @@ -301,7 +322,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -314,6 +335,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/sample3.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/sample3.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/sample3.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/1895709606/245717296.10000.insclause-0 + /tmp/hive-njain/164819642/256272558.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,15 +108,15 @@ s - + - + - + @@ -124,13 +124,20 @@ - /tmp/hive-zshao/1895709606/245717296.10000.insclause-0 + /tmp/hive-njain/164819642/256272558.10000.insclause-0 + + + + + + + @@ -193,8 +200,18 @@ + + true + + + + + + + + @@ -376,6 +393,13 @@ + + + + + + + @@ -415,7 +439,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket s @@ -427,7 +451,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -450,18 +474,22 @@ srcbucket + bucket_field_name + key + + serialization.ddl struct srcbucket { string key, string value} + columns + key,value + + serialization.format 1 - columns - key,value - - bucket_count 2 @@ -479,7 +507,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -492,6 +520,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/sample4.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/sample4.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/sample4.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/614066513/551990576.10000.insclause-0 + /tmp/hive-njain/85259061/94539507.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,85 +108,56 @@ s - + - + - - + + + + + /tmp/hive-njain/85259061/94539507.10000.insclause-0 + + + + + + + - - - - - /tmp/hive-zshao/614066513/551990576.10000.insclause-0 - - - - - - - - - - - - - - 0 - - - - - java.lang.String - - - - - - - - - 1 - - - - - - - - - - - + - - - - + + + + - - - key + + + 0 - - + + + + java.lang.String + + - - - value + + + 1 - + @@ -195,174 +166,55 @@ - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPEqual - - - - evaluate - - - java.lang.Integer - - - java.lang.Integer - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - - evaluate - - - java.lang.Integer - - - java.lang.Integer - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - - evaluate - - - java.lang.Integer - - - java.lang.Integer - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFDefaultSampleHashFn - - - - evaluate - - - java.lang.Object - - - - - - - - - - key - - - - - - - - - - - - java.lang.Integer - - - - - - - - - - - - 2147483647 - - - - - - - - - - - - - - - - - 2 - - - - - - - - - + + + + + + + key - - - - - - - 0 - - + + - - - - java.lang.Boolean + + + + value + + + + + true + + + + + + + + + + + + + + + @@ -402,7 +254,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt s @@ -414,7 +266,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt @@ -437,18 +289,22 @@ srcbucket + bucket_field_name + key + + serialization.ddl struct srcbucket { string key, string value} - serialization.format - 1 - - columns key,value + serialization.format + 1 + + bucket_count 2 @@ -466,7 +322,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -479,6 +335,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/sample5.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/sample5.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/sample5.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/532954132/21366373.10000.insclause-0 + /tmp/hive-njain/423510659/355253855.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,15 +108,15 @@ s - + - + - + @@ -124,13 +124,20 @@ - /tmp/hive-zshao/532954132/21366373.10000.insclause-0 + /tmp/hive-njain/423510659/355253855.10000.insclause-0 + + + + + + + @@ -193,8 +200,18 @@ + + true + + + + + + + + @@ -363,6 +380,13 @@ + + + + + + + @@ -402,7 +426,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket s @@ -414,7 +438,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -437,18 +461,22 @@ srcbucket + bucket_field_name + key + + serialization.ddl struct srcbucket { string key, string value} + columns + key,value + + serialization.format 1 - columns - key,value - - bucket_count 2 @@ -466,7 +494,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -479,6 +507,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/sample6.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/sample6.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/sample6.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/554218395/580685485.10000.insclause-0 + /tmp/hive-njain/404713103/151871838.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,15 +108,15 @@ s - + - + - + @@ -124,13 +124,20 @@ - /tmp/hive-zshao/554218395/580685485.10000.insclause-0 + /tmp/hive-njain/404713103/151871838.10000.insclause-0 + + + + + + + @@ -193,8 +200,18 @@ + + true + + + + + + + + @@ -363,6 +380,13 @@ + + + + + + + @@ -402,7 +426,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt s @@ -414,7 +438,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt @@ -437,18 +461,22 @@ srcbucket + bucket_field_name + key + + serialization.ddl struct srcbucket { string key, string value} + columns + key,value + + serialization.format 1 - columns - key,value - - bucket_count 2 @@ -466,7 +494,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -479,6 +507,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/sample7.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/sample7.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/sample7.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/393286311/133813526.10000.insclause-0 + /tmp/hive-njain/1242963668/223671734.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,19 +108,19 @@ s - + - + - + - + @@ -128,13 +128,20 @@ - /tmp/hive-zshao/393286311/133813526.10000.insclause-0 + /tmp/hive-njain/1242963668/223671734.10000.insclause-0 + + + + + + + @@ -197,8 +204,18 @@ + + true + + + + + + + + @@ -269,6 +286,13 @@ + + + + + + + @@ -450,6 +474,13 @@ + + + + + + + @@ -468,7 +499,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt s @@ -480,7 +511,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt @@ -503,18 +534,22 @@ srcbucket + bucket_field_name + key + + serialization.ddl struct srcbucket { string key, string value} + columns + key,value + + serialization.format 1 - columns - key,value - - bucket_count 2 @@ -532,7 +567,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -545,6 +580,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/cast1.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/cast1.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/cast1.q.xml (working copy) @@ -1,104 +1,6 @@ - - - - - - - Stage-1 - - - - - - - - - - - - - - - - - - - - - - true - - - /tmp/hive-zshao/783071627/588980298.10000.insclause-0 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - name - dest1 - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - - - - - - + Stage-2 @@ -108,228 +10,339 @@ src - + - + - + - - - - - /tmp/hive-zshao/783071627/588980298.10000.insclause-0 - - - - - - - - - - - - - - 0 + + + + + + + + + /tmp/hive-njain/168127484.10001.insclause-0 - - - - java.lang.Integer + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - - - - - - 1 - - - - - java.lang.Double + + org.apache.hadoop.mapred.TextInputFormat + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + _c0,_c1,_c2,_c3,_c4,_c5,_c6 + + + serialization.format + 1 + + + - - - - 2 + + + + - - - - - - - 3 - - - - - - - - - - 4 - - - - - - - - - - 5 - - - - - java.lang.Boolean + + + + + + + + 0 + + + + + java.lang.Integer + + + + + + + + 1 + + + + + java.lang.Double + + + + + + + + + 2 + + + + + + + + + + 3 + + + + + + + + + + 4 + + + + + + + + + + 5 + + + + + java.lang.Boolean + + + + + + + + + 6 + + + + + + - - - - 6 - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - - evaluate - - - java.lang.Integer - - - java.lang.Integer - - - - - + + + - - - + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus - - 3 + + + evaluate + + + java.lang.Integer + + + java.lang.Integer + + + - - - - + + + + + + + + + 3 + + + + + + + + + + 2 + + + + + - - 2 - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - - evaluate - - - java.lang.Double - - - java.lang.Double - - - - - - - + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + evaluate + + + java.lang.Double + + + java.lang.Double + + + + + + + + + + + + + 3.0 + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 2 + + + + + + + + + + + + - - 3.0 - - org.apache.hadoop.hive.ql.udf.UDFToDouble + org.apache.hadoop.hive.ql.udf.UDFOPPlus - + evaluate - + - java.lang.Integer + java.lang.Double + + java.lang.Double + + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 3 + + + + + + + + + + + - + - 2 + 2.0 @@ -340,45 +353,21 @@ - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - - evaluate - - - java.lang.Double - - - java.lang.Double - - - - - - - org.apache.hadoop.hive.ql.udf.UDFToDouble + org.apache.hadoop.hive.ql.udf.UDFOPPlus - + evaluate - + - java.lang.Integer + java.lang.Double + + java.lang.Double + @@ -387,13 +376,23 @@ - + - 3 + 3.0 + + + + + + + 2.0 + + + @@ -402,97 +401,108 @@ - - - + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus - - 2.0 + + + evaluate + + + java.lang.Integer + + + java.lang.Integer + + + - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - - evaluate - - - java.lang.Double - - - java.lang.Double - - - - - - - - + + + + + + + + + 3 + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToInteger + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 2.0 + + + + + + + + + + + + - + - - 3.0 - - - - + + + org.apache.hadoop.hive.ql.udf.UDFToBoolean - - 2.0 + + + evaluate + + + java.lang.Integer + + + - - - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - - evaluate - - - java.lang.Integer - - - java.lang.Integer - - - - - - - - + + + + + + + + + 1 + + + + + - + - - 3 - @@ -505,7 +515,7 @@ evaluate - java.lang.Double + java.lang.Boolean @@ -515,10 +525,10 @@ - + - 2.0 + true @@ -531,177 +541,142 @@ - - + + + + + + - - - - org.apache.hadoop.hive.ql.udf.UDFToBoolean + + + + - - - evaluate - - - java.lang.Integer - - - - - - - - - - + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPEqual + + + + evaluate + + + java.lang.String + + + java.lang.Number + + + + + + + + + + 0 + + + + + java.lang.String - - 1 - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFToInteger - - - - evaluate - - - java.lang.Boolean - - - - - - - - - - - - - true - - + + + + + + 86 + - - - + + + - - - - + + + + - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFOPEqual - - - - evaluate - - - java.lang.String - - - java.lang.Number - - - - - - - - - - key - - - - - java.lang.String + + + + + + + + 0 + + + - - - - - - - 86 - - - - - - - - - - + + + + - - + + key - + - - - - value - - - - - - + + + + + + + + + + + + + + @@ -709,7 +684,28 @@ - + + + + + key + + + + + + + + + + value + + + + + + + @@ -720,7 +716,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -732,7 +728,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -784,7 +780,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -797,6 +793,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/join1.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/join1.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/join1.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/108001173/349936017.10000.insclause-0 + /tmp/hive-njain/810786628.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,15 +108,15 @@ src2 - + - + - + @@ -136,34 +136,38 @@ - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0 + name + binary_sortable_table + serialization.ddl + struct binary_sortable_table { string joinkey0} + + serialization.format - 1 + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol - - 1 - -1 + + + 1 @@ -192,25 +196,29 @@ - + - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - org.apache.hadoop.mapred.TextInputFormat + org.apache.hadoop.mapred.SequenceFileInputFormat - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + org.apache.hadoop.mapred.SequenceFileOutputFormat - columns - 0,1 + name + binary_table + serialization.ddl + struct binary_table { string reducesinkvalue0, string reducesinkvalue1} + + serialization.format - 1 + com.facebook.thrift.protocol.TBinaryProtocol @@ -218,6 +226,13 @@ + + + + + + + @@ -282,59 +297,143 @@ src1 - + - - - - - - - - - key + + + + + + + + + + + + + 0 + + + + + + + - - + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_sortable_table + + + serialization.ddl + struct binary_sortable_table { string joinkey0} + + + serialization.format + org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol + + + + + + -1 + + + + + + + + + + 0 + + + + + + + + + + + + org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + binary_table + + + serialization.ddl + struct binary_table { string reducesinkvalue0} + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + + + - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + + + + - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 + + + + + + + + VALUE.0 + + + + + + + - - serialization.format - 1 - - - 1 - - - -1 - - + + + + + @@ -346,42 +445,14 @@ - - - - value - - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0,1 - - - serialization.format - 1 - - - - + + + + + + @@ -392,23 +463,13 @@ - VALUE.0 + 0 - - - - VALUE.1 - - - - - - @@ -449,13 +510,16 @@ + + + true - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src2 @@ -470,7 +534,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -522,7 +586,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -536,11 +600,11 @@ - + - + @@ -548,13 +612,20 @@ - /tmp/hive-zshao/108001173/349936017.10000.insclause-0 + /tmp/hive-njain/810786628.10000.insclause-0 + + + + + + + @@ -604,7 +675,7 @@ - 3 + 2 @@ -615,6 +686,13 @@ + + + + + + + @@ -654,16 +732,6 @@ - - - - VALUE.1 - - - - - - @@ -695,6 +763,16 @@ + + + + + + + + + + @@ -729,22 +807,22 @@ - - - - 3 - - - - - - + + + + + + + + + + Index: src/contrib/hive/ql/src/test/results/compiler/plan/input1.q.xml =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/plan/input1.q.xml (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/plan/input1.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/374447248/1282977307.10000.insclause-0 + /tmp/hive-njain/1266815017/75925705.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 @@ -108,15 +108,15 @@ src - + - + - + @@ -124,13 +124,20 @@ - /tmp/hive-zshao/374447248/1282977307.10000.insclause-0 + /tmp/hive-njain/1266815017/75925705.10000.insclause-0 + + + + + + + @@ -195,6 +202,13 @@ + + + + + + + @@ -265,6 +279,13 @@ + + + + + + + @@ -311,7 +332,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -323,7 +344,7 @@ - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -375,7 +396,7 @@ location - file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -388,6 +409,9 @@ + + + Index: src/contrib/hive/ql/src/test/results/compiler/errors/notable_alias1.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/errors/notable_alias1.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/errors/notable_alias1.q.out (working copy) @@ -1 +0,0 @@ -Parse Error: line 2:44 cannot recognize input 'value' Index: src/contrib/hive/ql/src/test/results/compiler/errors/notable_alias2.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/errors/notable_alias2.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/errors/notable_alias2.q.out (working copy) @@ -1 +0,0 @@ -Parse Error: line 2:60 cannot recognize input 'key' Index: src/contrib/hive/ql/src/test/results/compiler/errors/notable_alias3.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/errors/notable_alias3.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/errors/notable_alias3.q.out (working copy) @@ -1 +0,0 @@ -Parse Error: line 2:83 cannot recognize input 'key' Index: src/contrib/hive/ql/src/test/results/compiler/errors/quoted_string.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/errors/quoted_string.q.out (revision 712243) +++ src/contrib/hive/ql/src/test/results/compiler/errors/quoted_string.q.out (working copy) @@ -1,2 +1,2 @@ -Parse Error: line 2:36 character '"' not supported here -line 2:41 character '"' not supported here +Parse Error: line 3:0 character '' not supported here +line 3:0 character '' not supported here Index: src/contrib/hive/ql/src/test/results/compiler/errors/insert_wrong_number_columns.q.out =================================================================== --- src/contrib/hive/ql/src/test/results/compiler/errors/insert_wrong_number_columns.q.out (revision 0) +++ src/contrib/hive/ql/src/test/results/compiler/errors/insert_wrong_number_columns.q.out (revision 0) @@ -0,0 +1,2 @@ +Semantic Exception: +line 2:23 Cannot insert into target table because column number/types are different dest1: Table insclause-0 has 2 columns but query has [0: string, 1: string, 2: int]. \ No newline at end of file Index: src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java =================================================================== --- src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (revision 712243) +++ src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (working copy) @@ -198,7 +198,7 @@ @SuppressWarnings("unchecked") private void populateMapRedPlan1(Table src) { mr.setNumReduceTasks(Integer.valueOf(1)); - + // map-side work Operator op1 = OperatorFactory.get (PlanUtils.getReduceSinkDesc @@ -206,6 +206,8 @@ Utilities.makeList(new exprNodeColumnDesc(String.class, "value")), -1, 1, -1, false)); Utilities.addMapWork(mr, src, "a", op1); + mr.setKeyDesc(op1.getConf().getKeySerializeInfo()); + mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo()); // reduce side work Operator op3 = OperatorFactory.get(new fileSinkDesc @@ -230,6 +232,8 @@ new exprNodeColumnDesc(String.class, "value")), -1, 1, -1, false)); Utilities.addMapWork(mr, src, "a", op1); + mr.setKeyDesc(op1.getConf().getKeySerializeInfo()); + mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo()); // reduce side work Operator op4 = OperatorFactory.get(new fileSinkDesc @@ -261,6 +265,8 @@ (new exprNodeColumnDesc(String.class, "value")), Byte.valueOf((byte)0), 1, -1, false)); Utilities.addMapWork(mr, src, "a", op1); + mr.setKeyDesc(op1.getConf().getKeySerializeInfo()); + mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo()); Operator op2 = OperatorFactory.get (PlanUtils.getReduceSinkDesc @@ -270,11 +276,8 @@ Integer.MAX_VALUE, -1, false)); Utilities.addMapWork(mr, src2, "b", op2); + mr.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo()); - // just to satisfy the constraint that each tag must define a schema - mr.getAliasToSchema().put("a", new schemaDesc("")); - mr.getAliasToSchema().put("b", new schemaDesc("")); - // reduce side work Operator op4 = OperatorFactory.get(new fileSinkDesc (tmpdir + "mapredplan3.out", @@ -318,6 +321,8 @@ new exprNodeColumnDesc(String.class, "value"))), op0); Utilities.addMapWork(mr, src, "a", op4); + mr.setKeyDesc(op1.getConf().getKeySerializeInfo()); + mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo()); // reduce side work Operator op3 = OperatorFactory.get(new fileSinkDesc @@ -348,6 +353,8 @@ new exprNodeColumnDesc(String.class, "value"))), op0); Utilities.addMapWork(mr, src, "a", op4); + mr.setKeyDesc(op0.getConf().getKeySerializeInfo()); + mr.getTagToValueDesc().add(op0.getConf().getValueSerializeInfo()); // reduce side work Operator op3 = OperatorFactory.get(new fileSinkDesc @@ -384,6 +391,8 @@ new exprNodeColumnDesc(String.class, "value"))), op0); Utilities.addMapWork(mr, src, "a", op4); + mr.setKeyDesc(op1.getConf().getKeySerializeInfo()); + mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo()); // reduce side work Operator op3 = OperatorFactory.get(new fileSinkDesc Index: src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java =================================================================== --- src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java (revision 0) +++ src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java (revision 0) @@ -0,0 +1,281 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.io; + +import java.io.*; +import java.util.*; +import junit.framework.TestCase; + +import org.apache.commons.logging.*; + +import org.apache.hadoop.fs.*; +import org.apache.hadoop.record.*; +import org.apache.hadoop.io.*; +import org.apache.hadoop.mapred.*; +import org.apache.hadoop.io.serializer.*; +import org.apache.hadoop.conf.*; +import org.apache.hadoop.util.ReflectionUtils; + +import com.facebook.thrift.*; +import com.facebook.thrift.transport.*; +import com.facebook.thrift.protocol.*; + +//import org.apache.hadoop.contrib.serialization.thrift.*; + +public class TestFlatFileInputFormat extends TestCase { + + public void testFlatFileInputJava() throws Exception { + Configuration conf; + JobConf job ; + FileSystem fs; + Path dir ; + Path file; + Reporter reporter; + FSDataOutputStream ds; + + try { + // + // create job and filesystem and reporter and such. + // + conf = new Configuration(); + job = new JobConf(conf); + fs = FileSystem.getLocal(conf); + dir = new Path(System.getProperty("test.build.data",".") + "/mapred"); + file = new Path(dir, "test.txt"); + reporter = Reporter.NULL; + fs.delete(dir, true); + + job.setClass(FlatFileInputFormat.SerializationImplKey, + org.apache.hadoop.io.serializer.JavaSerialization.class, + org.apache.hadoop.io.serializer.Serialization.class); + + job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey, + JavaTestObjFlatFileInputFormat.class, java.io.Serializable.class); + + // + // Write some data out to a flat file + // + FileInputFormat.setInputPaths(job, dir); + ds = fs.create(file); + Serializer serializer = new JavaSerialization().getSerializer(null); + + // construct some data and write it + serializer.open(ds); + for (int i = 0; i < 10; i++) { + serializer.serialize(new JavaTestObjFlatFileInputFormat("Hello World! " + String.valueOf(i), i)); + } + serializer.close(); + + // + // Construct the reader + // + FileInputFormat> format = + new FlatFileInputFormat(); + InputSplit[] splits = format.getSplits(job, 1); + + // construct the record reader + RecordReader> reader = + format.getRecordReader(splits[0], job, reporter); + + // create key/value + Void key = reader.createKey(); + FlatFileInputFormat.RowContainer value = reader.createValue(); + + // + // read back the data using the FlatFileRecordReader + // + int count = 0; + while (reader.next(key, value)) { + assertTrue(key == null); + assertTrue(((JavaTestObjFlatFileInputFormat)value.row).s.equals("Hello World! " +String.valueOf(count))); + assertTrue(((JavaTestObjFlatFileInputFormat)value.row).num == count); + count++; + } + reader.close(); + + } catch(Exception e) { + System.err.println("caught: " + e); + e.printStackTrace(); + } finally { + } + + } + + public void testFlatFileInputRecord() throws Exception { + Configuration conf; + JobConf job ; + FileSystem fs; + Path dir ; + Path file; + Reporter reporter; + FSDataOutputStream ds; + + try { + // + // create job and filesystem and reporter and such. + // + conf = new Configuration(); + job = new JobConf(conf); + fs = FileSystem.getLocal(conf); + dir = new Path(System.getProperty("test.build.data",".") + "/mapred"); + file = new Path(dir, "test.txt"); + reporter = Reporter.NULL; + fs.delete(dir, true); + + job.setClass(FlatFileInputFormat.SerializationImplKey, + org.apache.hadoop.io.serializer.WritableSerialization.class, + org.apache.hadoop.io.serializer.Serialization.class); + + job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey, + RecordTestObj.class, Writable.class); + + // + // Write some data out to a flat file + // + FileInputFormat.setInputPaths(job, dir); + ds = fs.create(file); + Serializer serializer = new WritableSerialization().getSerializer(Writable.class); + + // construct some data and write it + serializer.open(ds); + for (int i = 0; i < 10; i++) { + serializer.serialize(new RecordTestObj("Hello World! " + String.valueOf(i), i)); + } + serializer.close(); + + // + // Construct the reader + // + FileInputFormat> format = + new FlatFileInputFormat(); + InputSplit[] splits = format.getSplits(job, 1); + + // construct the record reader + RecordReader> reader = + format.getRecordReader(splits[0], job, reporter); + + // create key/value + Void key = reader.createKey(); + FlatFileInputFormat.RowContainer value = reader.createValue(); + + // + // read back the data using the FlatFileRecordReader + // + int count = 0; + while (reader.next(key, value)) { + assertTrue(key == null); + assertTrue(((RecordTestObj)value.row).getS().equals("Hello World! " +String.valueOf(count))); + assertTrue(((RecordTestObj)value.row).getNum() == count); + count++; + } + reader.close(); + + } catch(Exception e) { + System.err.println("caught: " + e); + e.printStackTrace(); + } finally { + } + + } + /* + public void testFlatFileInputThrift() throws Exception { + Configuration conf; + JobConf job ; + FileSystem fs; + Path dir ; + Path file; + Reporter reporter; + FSDataOutputStream ds; + + try { + // + // create job and filesystem and reporter and such. + // + conf = new Configuration(); + job = new JobConf(conf); + fs = FileSystem.getLocal(conf); + dir = new Path(System.getProperty("test.build.data",".") + "/mapred"); + file = new Path(dir, "test.txt"); + reporter = Reporter.NULL; + fs.delete(dir, true); + + job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationImplKey, + org.apache.hadoop.contrib.serialization.thrift.ThriftSerialization.class, + org.apache.hadoop.io.serializer.Serialization.class); + + job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey, + FlatFileThriftTestObj.class, TBase.class); + + // + // Write some data out to a flat file + // + FileInputFormat.setInputPaths(job, dir); + ds = fs.create(file); + Serializer serializer = new ThriftSerialization().getSerializer(TBase.class); + + // construct some data and write it + serializer.open(ds); + for (int i = 0; i < 10; i++) { + serializer.serialize(new FlatFileThriftTestObj("Hello World! " + String.valueOf(i), i)); + } + serializer.close(); + + // + // Construct the reader + // + FileInputFormat> format = + new FlatFileInputFormat(); + InputSplit[] splits = format.getSplits(job, 1); + + // construct the record reader + RecordReader> reader = + format.getRecordReader(splits[0], job, reporter); + + // create key/value + Void key = reader.createKey(); + FlatFileInputFormat.RowContainer value = reader.createValue(); + + // + // read back the data using the FlatFileRecordReader + // + int count = 0; + while (reader.next(key, value)) { + assertTrue(key == null); + assertTrue(((FlatFileThriftTestObj)value.row).s.equals("Hello World! " +String.valueOf(count))); + assertTrue(((FlatFileThriftTestObj)value.row).num == count); + count++; + } + reader.close(); + + } catch(Exception e) { + System.err.println("caught: " + e); + e.printStackTrace(); + } finally { + } + + } + */ + + + public static void main(String[] args) throws Exception { + new TestFlatFileInputFormat().testFlatFileInputJava(); + new TestFlatFileInputFormat().testFlatFileInputRecord(); + // new TestFlatFileInputFormat().testFlatFileInputThrift(); + } +} Index: src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java =================================================================== --- src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java (revision 0) +++ src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java (revision 0) @@ -0,0 +1,212 @@ +// File generated by hadoop record compiler. Do not edit. +package org.apache.hadoop.hive.ql.io; + +public class RecordTestObj extends org.apache.hadoop.record.Record { + private static final org.apache.hadoop.record.meta.RecordTypeInfo _rio_recTypeInfo; + private static org.apache.hadoop.record.meta.RecordTypeInfo _rio_rtiFilter; + private static int[] _rio_rtiFilterFields; + static { + _rio_recTypeInfo = new org.apache.hadoop.record.meta.RecordTypeInfo("RecordTestObj"); + _rio_recTypeInfo.addField("s", org.apache.hadoop.record.meta.TypeID.StringTypeID); + _rio_recTypeInfo.addField("num", org.apache.hadoop.record.meta.TypeID.LongTypeID); + } + + private String s; + private long num; + public RecordTestObj() { } + public RecordTestObj( + final String s, + final long num) { + this.s = s; + this.num = num; + } + public static org.apache.hadoop.record.meta.RecordTypeInfo getTypeInfo() { + return _rio_recTypeInfo; + } + public static void setTypeFilter(org.apache.hadoop.record.meta.RecordTypeInfo rti) { + if (null == rti) return; + _rio_rtiFilter = rti; + _rio_rtiFilterFields = null; + } + private static void setupRtiFields() + { + if (null == _rio_rtiFilter) return; + // we may already have done this + if (null != _rio_rtiFilterFields) return; + int _rio_i, _rio_j; + _rio_rtiFilterFields = new int [_rio_rtiFilter.getFieldTypeInfos().size()]; + for (_rio_i=0; _rio_i<_rio_rtiFilterFields.length; _rio_i++) { + _rio_rtiFilterFields[_rio_i] = 0; + } + java.util.Iterator _rio_itFilter = _rio_rtiFilter.getFieldTypeInfos().iterator(); + _rio_i=0; + while (_rio_itFilter.hasNext()) { + org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter.next(); + java.util.Iterator _rio_it = _rio_recTypeInfo.getFieldTypeInfos().iterator(); + _rio_j=1; + while (_rio_it.hasNext()) { + org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfo = _rio_it.next(); + if (_rio_tInfo.equals(_rio_tInfoFilter)) { + _rio_rtiFilterFields[_rio_i] = _rio_j; + break; + } + _rio_j++; + } + _rio_i++; + } + } + public String getS() { + return s; + } + public void setS(final String s) { + this.s=s; + } + public long getNum() { + return num; + } + public void setNum(final long num) { + this.num=num; + } + public void serialize(final org.apache.hadoop.record.RecordOutput _rio_a, final String _rio_tag) + throws java.io.IOException { + _rio_a.startRecord(this,_rio_tag); + _rio_a.writeString(s,"s"); + _rio_a.writeLong(num,"num"); + _rio_a.endRecord(this,_rio_tag); + } + private void deserializeWithoutFilter(final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag) + throws java.io.IOException { + _rio_a.startRecord(_rio_tag); + s=_rio_a.readString("s"); + num=_rio_a.readLong("num"); + _rio_a.endRecord(_rio_tag); + } + public void deserialize(final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag) + throws java.io.IOException { + if (null == _rio_rtiFilter) { + deserializeWithoutFilter(_rio_a, _rio_tag); + return; + } + // if we're here, we need to read based on version info + _rio_a.startRecord(_rio_tag); + setupRtiFields(); + for (int _rio_i=0; _rio_i<_rio_rtiFilter.getFieldTypeInfos().size(); _rio_i++) { + if (1 == _rio_rtiFilterFields[_rio_i]) { + s=_rio_a.readString("s"); + } + else if (2 == _rio_rtiFilterFields[_rio_i]) { + num=_rio_a.readLong("num"); + } + else { + java.util.ArrayList typeInfos = (java.util.ArrayList)(_rio_rtiFilter.getFieldTypeInfos()); + org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i).getFieldID(), typeInfos.get(_rio_i).getTypeID()); + } + } + _rio_a.endRecord(_rio_tag); + } + public int compareTo (final Object _rio_peer_) throws ClassCastException { + if (!(_rio_peer_ instanceof RecordTestObj)) { + throw new ClassCastException("Comparing different types of records."); + } + RecordTestObj _rio_peer = (RecordTestObj) _rio_peer_; + int _rio_ret = 0; + _rio_ret = s.compareTo(_rio_peer.s); + if (_rio_ret != 0) return _rio_ret; + _rio_ret = (num == _rio_peer.num)? 0 :((num<_rio_peer.num)?-1:1); + if (_rio_ret != 0) return _rio_ret; + return _rio_ret; + } + public boolean equals(final Object _rio_peer_) { + if (!(_rio_peer_ instanceof RecordTestObj)) { + return false; + } + if (_rio_peer_ == this) { + return true; + } + RecordTestObj _rio_peer = (RecordTestObj) _rio_peer_; + boolean _rio_ret = false; + _rio_ret = s.equals(_rio_peer.s); + if (!_rio_ret) return _rio_ret; + _rio_ret = (num==_rio_peer.num); + if (!_rio_ret) return _rio_ret; + return _rio_ret; + } + public Object clone() throws CloneNotSupportedException { + RecordTestObj _rio_other = new RecordTestObj(); + _rio_other.s = this.s; + _rio_other.num = this.num; + return _rio_other; + } + public int hashCode() { + int _rio_result = 17; + int _rio_ret; + _rio_ret = s.hashCode(); + _rio_result = 37*_rio_result + _rio_ret; + _rio_ret = (int) (num^(num>>>32)); + _rio_result = 37*_rio_result + _rio_ret; + return _rio_result; + } + public static String signature() { + return "LRecordTestObj(sl)"; + } + public static class Comparator extends org.apache.hadoop.record.RecordComparator { + public Comparator() { + super(RecordTestObj.class); + } + static public int slurpRaw(byte[] b, int s, int l) { + try { + int os = s; + { + int i = org.apache.hadoop.record.Utils.readVInt(b, s); + int z = org.apache.hadoop.record.Utils.getVIntSize(i); + s+=(z+i); l-= (z+i); + } + { + long i = org.apache.hadoop.record.Utils.readVLong(b, s); + int z = org.apache.hadoop.record.Utils.getVIntSize(i); + s+=z; l-=z; + } + return (os - s); + } catch(java.io.IOException e) { + throw new RuntimeException(e); + } + } + static public int compareRaw(byte[] b1, int s1, int l1, + byte[] b2, int s2, int l2) { + try { + int os1 = s1; + { + int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1); + int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2); + int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1); + int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2); + s1+=z1; s2+=z2; l1-=z1; l2-=z2; + int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2); + if (r1 != 0) { return (r1<0)?-1:0; } + s1+=i1; s2+=i2; l1-=i1; l1-=i2; + } + { + long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1); + long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2); + if (i1 != i2) { + return ((i1-i2) < 0) ? -1 : 0; + } + int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1); + int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2); + s1+=z1; s2+=z2; l1-=z1; l2-=z2; + } + return (os1 - s1); + } catch(java.io.IOException e) { + throw new RuntimeException(e); + } + } + public int compare(byte[] b1, int s1, int l1, + byte[] b2, int s2, int l2) { + int ret = compareRaw(b1,s1,l1,b2,s2,l2); + return (ret == -1)? -1 : ((ret==0)? 1 : 0);} + } + + static { + org.apache.hadoop.record.RecordComparator.define(RecordTestObj.class, new Comparator()); + } +} Index: src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java =================================================================== --- src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java (revision 0) +++ src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java (revision 0) @@ -0,0 +1,36 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.io; + +import java.io.Serializable; + +/** + * Simple test object + */ +public class JavaTestObjFlatFileInputFormat implements Serializable { + public String s; + public int num; + public JavaTestObjFlatFileInputFormat(String s, int num) { + this.s = s; + this.num = num; + } + public JavaTestObjFlatFileInputFormat() { + } +} + Index: src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java =================================================================== --- src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (revision 712243) +++ src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (working copy) @@ -646,6 +646,10 @@ // Do semantic analysis and plan generation Context ctx = new Context(conf); ctx.makeScratchDir(); + while((ast.getToken() == null) && (ast.getChildCount() > 0)) { + ast = (CommonTree)ast.getChild(0); + } + sem.analyze(ast, ctx); ctx.removeScratchDir(); return sem.getRootTasks(); Index: src/contrib/hive/ql/src/test/scripts/testgrep =================================================================== --- src/contrib/hive/ql/src/test/scripts/testgrep (revision 0) +++ src/contrib/hive/ql/src/test/scripts/testgrep (revision 0) @@ -0,0 +1,5 @@ +#!/bin/bash + +egrep '10.*' + +exit 0; Property changes on: src/contrib/hive/ql/src/test/scripts/testgrep ___________________________________________________________________ Added: svn:executable + * Index: src/contrib/hive/ql/src/test/queries/clientnegative/input1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientnegative/input1.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientnegative/input1.q (revision 0) @@ -0,0 +1 @@ +SELECT a.* FROM src; Index: src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias3.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias3.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias3.q (revision 0) @@ -0,0 +1,4 @@ +CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE; + +FROM src +INSERT OVERWRITE TABLE dest1 SELECT '1234', src.key, sum(src.value) WHERE src.key < 100 group by key; Index: src/contrib/hive/ql/src/test/queries/clientnegative/input2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientnegative/input2.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientnegative/input2.q (revision 0) @@ -0,0 +1 @@ +SELECT a.key FROM src; Index: src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias4.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias4.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias4.q (revision 0) @@ -0,0 +1,4 @@ +EXPLAIN +SELECT key from src JOIN src1 on src1.key=src.key; + +SELECT key from src JOIN src1 on src1.key=src.key; Index: src/contrib/hive/ql/src/test/queries/clientnegative/bad_sample_clause.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientnegative/bad_sample_clause.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientnegative/bad_sample_clause.q (revision 0) @@ -0,0 +1,6 @@ +CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING) STORED AS TEXTFILE; + +INSERT OVERWRITE TABLE dest1 SELECT s.* +FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 2) s +WHERE s.ds='2008-04-08' and s.hr='11'; + Index: src/contrib/hive/ql/src/test/queries/clientnegative/load_wrong_fileformat.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientnegative/load_wrong_fileformat.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientnegative/load_wrong_fileformat.q (revision 0) @@ -0,0 +1,6 @@ +-- test for loading into tables with the correct file format +-- test for loading into partitions with the correct file format + +DROP TABLE T1; +CREATE TABLE T1(name STRING) STORED AS SEQUENCEFILE; +LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1; Index: src/contrib/hive/ql/src/test/queries/clientnegative/input_testxpath4.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientnegative/input_testxpath4.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientnegative/input_testxpath4.q (revision 0) @@ -0,0 +1,6 @@ +EXPLAIN +FROM src_thrift +SELECT src_thrift.mstringstring['key_9'], lintstring.myint; + +FROM src_thrift +SELECT src_thrift.mstringstring['key_9'], lintstring.myint; Index: src/contrib/hive/ql/src/test/queries/clientnegative/invalid_tbl_name.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientnegative/invalid_tbl_name.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientnegative/invalid_tbl_name.q (revision 0) @@ -0,0 +1 @@ +create table invalid-name(a int, b string); Index: src/contrib/hive/ql/src/test/queries/clientnegative/joinneg.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientnegative/joinneg.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientnegative/joinneg.q (revision 0) @@ -0,0 +1,6 @@ +EXPLAIN FROM +(SELECT src.* FROM src) x +JOIN +(SELECT src.* FROM src) Y +ON (x.key = b.key) +SELECT Y.*; Index: src/contrib/hive/ql/src/test/queries/clientnegative/invalid_create_tbl1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientnegative/invalid_create_tbl1.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientnegative/invalid_create_tbl1.q (revision 0) @@ -0,0 +1,7 @@ +DROP TABLE inv_valid_tbl1; +CREATE TABLE inv_valid_tbl1 COMMENT 'This is a thrift based table' + PARTITIONED BY(aint DATETIME, country STRING) + CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS + ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', 'serialization.format' = 'com.facebook.thrift.protocol.TBinaryProtocol') + STORED AS SEQUENCEFILE; +DESCRIBE EXTENDED inv_valid_tbl1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/alter1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/alter1.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/alter1.q (revision 0) @@ -0,0 +1,20 @@ +drop table alter1; +create table alter1(a int, b int); +describe extended alter1; +alter table alter1 set tblproperties ('a'='1', 'c'='3'); +describe extended alter1; +alter table alter1 set tblproperties ('a'='1', 'c'='4', 'd'='3'); +describe extended alter1; + +alter table alter1 set serdeproperties('s1'='9'); +describe extended alter1; +alter table alter1 set serdeproperties('s1'='10', 's2' ='20'); +describe extended alter1; + +alter table alter1 set serde 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties('s1'='9'); +describe extended alter1; + +alter table alter1 set serde 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe'; +describe extended alter1; + +drop table alter1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input11_limit.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input11_limit.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input11_limit.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; EXPLAIN FROM src Index: src/contrib/hive/ql/src/test/queries/clientpositive/input14_limit.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input14_limit.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input14_limit.q (working copy) @@ -1,18 +1,18 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; EXPLAIN FROM ( FROM src - SELECT TRANSFORM(src.key, src.value) AS (tkey, tvalue) - USING '/bin/cat' + SELECT TRANSFORM(src.key, src.value) + USING '/bin/cat' AS (tkey, tvalue) CLUSTER BY tkey LIMIT 20 ) tmap INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; FROM ( FROM src - SELECT TRANSFORM(src.key, src.value) AS (tkey, tvalue) - USING '/bin/cat' + SELECT TRANSFORM(src.key, src.value) + USING '/bin/cat' AS (tkey, tvalue) CLUSTER BY tkey LIMIT 20 ) tmap INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; Index: src/contrib/hive/ql/src/test/queries/clientpositive/sample2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/sample2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/sample2.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; -- input pruning, no sample filter -- default table sample columns Index: src/contrib/hive/ql/src/test/queries/clientpositive/inputddl1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/inputddl1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/inputddl1.q (working copy) @@ -1,7 +1,7 @@ EXPLAIN -CREATE TABLE INPUTDDL1(key INT, value STRING); +CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE; -CREATE TABLE INPUTDDL1(key INT, value STRING); +CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE; SELECT INPUTDDL1.* from INPUTDDL1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/sample4.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/sample4.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/sample4.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; -- bucket column is the same as table sample -- No need for sample filter Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby2_map.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby2_map.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby2_map.q (revision 0) @@ -0,0 +1,12 @@ +set hive.map.aggr=true; + +CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE; + +EXPLAIN +FROM src +INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,0,1), count(DISTINCT substr(src.value,4)), concat(substr(src.key,0,1),sum(substr(src.value,4))) GROUP BY substr(src.key,0,1); + +FROM src +INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,0,1), count(DISTINCT substr(src.value,4)), concat(substr(src.key,0,1),sum(substr(src.value,4))) GROUP BY substr(src.key,0,1); + +SELECT dest1.* FROM dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/inputddl3.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/inputddl3.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/inputddl3.q (working copy) @@ -1,5 +1,5 @@ EXPLAIN -CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'; -CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'; +CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE; +CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE; DESCRIBE INPUTDDL3; DROP TABLE INPUTDDL3; Index: src/contrib/hive/ql/src/test/queries/clientpositive/sample6.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/sample6.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/sample6.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; -- both input pruning and sample filter EXPLAIN EXTENDED Index: src/contrib/hive/ql/src/test/queries/clientpositive/inputddl5.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/inputddl5.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/inputddl5.q (working copy) @@ -1,6 +1,6 @@ -- test for internationalization -- kv4.txt contains the utf-8 character 0xE982B5E993AE which we are verifying later on -CREATE TABLE INPUTDDL5(name STRING); +CREATE TABLE INPUTDDL5(name STRING) STORED AS TEXTFILE; LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE INPUTDDL5; DESCRIBE INPUTDDL5; SELECT INPUTDDL5.name from INPUTDDL5; Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby4_map.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby4_map.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby4_map.q (revision 0) @@ -0,0 +1,10 @@ +set hive.map.aggr=true; + +CREATE TABLE dest1(key INT) STORED AS TEXTFILE; + +EXPLAIN +FROM src INSERT OVERWRITE TABLE dest1 SELECT count(1); + +FROM src INSERT OVERWRITE TABLE dest1 SELECT count(1); + +SELECT dest1.* FROM dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/inputddl7.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/inputddl7.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/inputddl7.q (revision 0) @@ -0,0 +1,33 @@ +-- test for loading into tables with the correct file format +-- test for loading into partitions with the correct file format + +DROP TABLE T1; +CREATE TABLE T1(name STRING) STORED AS TEXTFILE; +LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1; +SELECT COUNT(1) FROM T1; + +DROP TABLE T2; +CREATE TABLE T2(name STRING) STORED AS SEQUENCEFILE; +LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T2; +SELECT COUNT(1) FROM T2; + +DROP TABLE T3; +CREATE TABLE T3(name STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE; +LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T3 PARTITION (ds='2008-04-09'); +SELECT COUNT(1) FROM T3 where T3.ds='2008-04-09'; + +DROP TABLE T4; +CREATE TABLE T4(name STRING) PARTITIONED BY(ds STRING) STORED AS SEQUENCEFILE; +LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T4 PARTITION (ds='2008-04-09'); +SELECT COUNT(1) FROM T4 where T4.ds='2008-04-09'; + +DESCRIBE EXTENDED T1; +DESCRIBE EXTENDED T2; +DESCRIBE EXTENDED T3 PARTITION (ds='2008-04-09'); +DESCRIBE EXTENDED T4 PARTITION (ds='2008-04-09'); + + +DROP TABLE T1; +DROP TABLE T2; +DROP TABLE T3; +DROP TABLE T4; Index: src/contrib/hive/ql/src/test/queries/clientpositive/notable_alias1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/notable_alias1.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/notable_alias1.q (revision 0) @@ -0,0 +1,10 @@ +CREATE TABLE dest1(dummy STRING, key INT, value DOUBLE) STORED AS TEXTFILE; + +EXPLAIN +FROM src +INSERT OVERWRITE TABLE dest1 SELECT '1234', key, count(1) WHERE src.key < 100 group by key; + +FROM src +INSERT OVERWRITE TABLE dest1 SELECT '1234', key, count(1) WHERE src.key < 100 group by key; + +SELECT dest1.* FROM dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input0.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input0.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input0.q (revision 0) @@ -0,0 +1,5 @@ +EXPLAIN +SELECT * FROM src; + +SELECT * FROM src; + Index: src/contrib/hive/ql/src/test/queries/clientpositive/join1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join1.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; EXPLAIN FROM src src1 JOIN src src2 ON (src1.key = src2.key) Index: src/contrib/hive/ql/src/test/queries/clientpositive/input2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input2.q (working copy) @@ -1,6 +1,8 @@ -CREATE TABLE TEST2a(A INT, B FLOAT); +DROP TABLE TEST2a; +CREATE TABLE TEST2a(A INT, B FLOAT) STORED AS TEXTFILE; DESCRIBE TEST2a; -CREATE TABLE TEST2b(A ARRAY, B FLOAT, C MAP); +DROP TABLE TEST2b; +CREATE TABLE TEST2b(A ARRAY, B FLOAT, C MAP) STORED AS TEXTFILE; DESCRIBE TEST2b; SHOW TABLES; DROP TABLE TEST2a; Index: src/contrib/hive/ql/src/test/queries/clientpositive/join3.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join3.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join3.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; EXPLAIN FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key = src3.key) Index: src/contrib/hive/ql/src/test/queries/clientpositive/input4.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input4.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input4.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE INPUT4(KEY STRING, VALUE STRING); +CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE; EXPLAIN LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4; LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4; Index: src/contrib/hive/ql/src/test/queries/clientpositive/join5.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join5.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join5.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING); +CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE; EXPLAIN FROM ( Index: src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath2.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(lint_size INT, lintstring_size INT, mstringstring_size INT); +CREATE TABLE dest1(lint_size INT, lintstring_size INT, mstringstring_size INT) STORED AS TEXTFILE; EXPLAIN FROM src_thrift Index: src/contrib/hive/ql/src/test/queries/clientpositive/input6.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input6.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input6.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key STRING, value STRING); +CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE; EXPLAIN FROM src1 Index: src/contrib/hive/ql/src/test/queries/clientpositive/join7.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join7.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join7.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING, c5 INT, c6 STRING); +CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING, c5 INT, c6 STRING) STORED AS TEXTFILE; EXPLAIN FROM ( Index: src/contrib/hive/ql/src/test/queries/clientpositive/input8.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input8.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input8.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 STRING, c2 INT, c3 DOUBLE); +CREATE TABLE dest1(c1 STRING, c2 INT, c3 DOUBLE) STORED AS TEXTFILE; EXPLAIN FROM src1 Index: src/contrib/hive/ql/src/test/queries/clientpositive/join9.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join9.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join9.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; EXPLAIN EXTENDED FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) Index: src/contrib/hive/ql/src/test/queries/clientpositive/input10.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input10.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input10.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE TEST10(key INT, value STRING) PARTITIONED BY(ds STRING, hr STRING); +CREATE TABLE TEST10(key INT, value STRING) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE; EXPLAIN DESCRIBE TEST10; Index: src/contrib/hive/ql/src/test/queries/clientpositive/udf2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/udf2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/udf2.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 STRING); +CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE; FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input4_cb_delim.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input4_cb_delim.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input4_cb_delim.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012'; +CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012' STORED AS TEXTFILE; LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB; SELECT INPUT4_CB.VALUE, INPUT4_CB.KEY FROM INPUT4_CB; DROP TABLE INPUT4_CB Index: src/contrib/hive/ql/src/test/queries/clientpositive/join11.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join11.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join11.q (revision 0) @@ -0,0 +1,14 @@ +EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src) src2 +ON src1.c1 = src2.c3 AND src1.c1 < 100; + +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src) src2 +ON src1.c1 = src2.c3 AND src1.c1 < 100; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input12.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input12.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input12.q (working copy) @@ -1,6 +1,6 @@ -CREATE TABLE dest1(key INT, value STRING); -CREATE TABLE dest2(key INT, value STRING); -CREATE TABLE dest3(key INT, value STRING) PARTITIONED BY(ds STRING, hr STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; +CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE; +CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE; EXPLAIN FROM src Index: src/contrib/hive/ql/src/test/queries/clientpositive/udf4.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/udf4.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/udf4.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 STRING); +CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE; FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86; Index: src/contrib/hive/ql/src/test/queries/clientpositive/join13.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join13.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join13.q (revision 0) @@ -0,0 +1,20 @@ +EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src) src2 +ON src1.c1 = src2.c3 AND src1.c1 < 100 +JOIN +(SELECT src.key as c5, src.value as c6 from src) src3 +ON src1.c1 + src2.c3 = src3.c5 AND src3.c5 < 200; + +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src) src2 +ON src1.c1 = src2.c3 AND src1.c1 < 100 +JOIN +(SELECT src.key as c5, src.value as c6 from src) src3 +ON src1.c1 + src2.c3 = src3.c5 AND src3.c5 < 200; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input14.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input14.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input14.q (working copy) @@ -1,18 +1,18 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; EXPLAIN FROM ( FROM src - SELECT TRANSFORM(src.key, src.value) AS (tkey, tvalue) - USING '/bin/cat' + SELECT TRANSFORM(src.key, src.value) + USING '/bin/cat' AS (tkey, tvalue) CLUSTER BY tkey ) tmap INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; FROM ( FROM src - SELECT TRANSFORM(src.key, src.value) AS (tkey, tvalue) - USING '/bin/cat' + SELECT TRANSFORM(src.key, src.value) + USING '/bin/cat' AS (tkey, tvalue) CLUSTER BY tkey ) tmap INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; Index: src/contrib/hive/ql/src/test/queries/clientpositive/join15.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join15.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join15.q (revision 0) @@ -0,0 +1,4 @@ +EXPLAIN +SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key); + +SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key); Index: src/contrib/hive/ql/src/test/queries/clientpositive/input16.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input16.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input16.q (working copy) @@ -1,5 +1,5 @@ -- TestSerDe is a user defined serde where the default delimiter is Ctrl-B -CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERIALIZER 'org.apache.hadoop.hive.serde2.TestSerDe'; +CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE; LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16; SELECT INPUT16.VALUE, INPUT16.KEY FROM INPUT16; DROP TABLE INPUT16; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING); +CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING) STORED AS TEXTFILE; EXPLAIN EXTENDED FROM srcpart Index: src/contrib/hive/ql/src/test/queries/clientpositive/input18.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input18.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input18.q (revision 0) @@ -0,0 +1,20 @@ +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; + +EXPLAIN +FROM ( + FROM src + SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) + USING '/bin/cat' + CLUSTER BY key +) tmap +INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100; + +FROM ( + FROM src + SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) + USING '/bin/cat' + CLUSTER BY key +) tmap +INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100; + +SELECT dest1.* FROM dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input_part3.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input_part3.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input_part3.q (revision 0) @@ -0,0 +1,4 @@ +EXPLAIN +SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.hr = 11; + +SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.hr = 11; Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby2.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key STRING, c1 INT, c2 STRING); +CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE; EXPLAIN FROM src Index: src/contrib/hive/ql/src/test/queries/clientpositive/input_part5.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input_part5.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input_part5.q (revision 0) @@ -0,0 +1,4 @@ +EXPLAIN +SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.key < 100; + +SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.key < 100; Index: src/contrib/hive/ql/src/test/queries/clientpositive/show_tables.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/show_tables.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/show_tables.q (working copy) @@ -1,5 +1,5 @@ -CREATE TABLE shtb_test1(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME); -CREATE TABLE shtb_test2(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME); +CREATE TABLE shtb_test1(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE; +CREATE TABLE shtb_test2(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE; EXPLAIN SHOW TABLES 'shtb_*'; Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby4.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby4.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby4.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 STRING); +CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE; EXPLAIN FROM src Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 STRING); +CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE; EXPLAIN FROM src Index: src/contrib/hive/ql/src/test/queries/clientpositive/input1_limit.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input1_limit.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input1_limit.q (working copy) @@ -1,5 +1,5 @@ -CREATE TABLE dest1(key INT, value STRING); -CREATE TABLE dest2(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; +CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE; EXPLAIN FROM src Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby8.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby8.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby8.q (working copy) @@ -1,10 +1,10 @@ -CREATE TABLE DEST1(key INT, value STRING); -CREATE TABLE DEST2(key INT, value STRING); +CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE; +CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE; FROM SRC INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,4)) GROUP BY SRC.key INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,4)) GROUP BY SRC.key; SELECT DEST1.* FROM DEST1; -SELECT DEST1.* FROM DEST2; +SELECT DEST2.* FROM DEST2; Index: src/contrib/hive/ql/src/test/queries/clientpositive/case_sensitivity.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/case_sensitivity.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/case_sensitivity.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE DEST1(Key INT, VALUE STRING); +CREATE TABLE DEST1(Key INT, VALUE STRING) STORED AS TEXTFILE; EXPLAIN FROM SRC_THRIFT Index: src/contrib/hive/ql/src/test/queries/clientpositive/scriptfile1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/scriptfile1.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/scriptfile1.q (revision 0) @@ -0,0 +1,13 @@ +CREATE TABLE dest1(key INT, value STRING); + +ADD FILE src/test/scripts/testgrep; + +FROM ( + FROM src + SELECT TRANSFORM(src.key, src.value) + USING 'testgrep' AS (tkey, tvalue) + CLUSTER BY tkey +) tmap +INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue; + +SELECT dest1.* FROM dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/sample1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/sample1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/sample1.q (working copy) @@ -1,13 +1,13 @@ -CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING); +CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING) STORED AS TEXTFILE; -- no input pruning, no sample filter EXPLAIN EXTENDED INSERT OVERWRITE TABLE dest1 SELECT s.* -FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1) s +FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1 ON rand()) s WHERE s.ds='2008-04-08' and s.hr='11'; INSERT OVERWRITE TABLE dest1 SELECT s.* -FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1) s +FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1 ON rand()) s WHERE s.ds='2008-04-08' and s.hr='11'; SELECT dest1.* FROM dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby1_map.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby1_map.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby1_map.q (revision 0) @@ -0,0 +1,10 @@ +set hive.map.aggr=true; + +CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE; + +EXPLAIN +FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,4)) GROUP BY src.key; + +FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,4)) GROUP BY src.key; + +SELECT dest1.* FROM dest1; Property changes on: src/contrib/hive/ql/src/test/queries/clientpositive/groupby1_map.q ___________________________________________________________________ Added: svn:executable + * Index: src/contrib/hive/ql/src/test/queries/clientpositive/inputddl2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/inputddl2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/inputddl2.q (working copy) @@ -1,6 +1,6 @@ EXPLAIN -CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING); -CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING); +CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING) STORED AS TEXTFILE; +CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING) STORED AS TEXTFILE; DESCRIBE INPUTDDL2; DROP TABLE INPUTDDL2; Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby1_limit.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby1_limit.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby1_limit.q (working copy) @@ -1,6 +1,6 @@ set mapred.reduce.tasks=31; -CREATE TABLE dest1(key INT, value DOUBLE); +CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE; EXPLAIN FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,4)) GROUP BY src.key LIMIT 5; Index: src/contrib/hive/ql/src/test/queries/clientpositive/sample5.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/sample5.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/sample5.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; -- no input pruning, sample filter EXPLAIN EXTENDED Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby3_map.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby3_map.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby3_map.q (revision 0) @@ -0,0 +1,12 @@ +set hive.map.aggr=true; + +CREATE TABLE dest1(c1 INT, c2 INT, c3 INT, c4 INT, c5 INT) STORED AS TEXTFILE; + +EXPLAIN +FROM src +INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,4)), avg(substr(src.value,4)), avg(DISTINCT substr(src.value,4)), max(substr(src.value,4)), min(substr(src.value,4)); + +FROM src +INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,4)), avg(substr(src.value,4)), avg(DISTINCT substr(src.value,4)), max(substr(src.value,4)), min(substr(src.value,4)); + +SELECT dest1.* FROM dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/sample7.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/sample7.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/sample7.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; -- both input pruning and sample filter EXPLAIN EXTENDED Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby5_map.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby5_map.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby5_map.q (revision 0) @@ -0,0 +1,10 @@ +set hive.map.aggr=true; + +CREATE TABLE dest1(key INT) STORED AS TEXTFILE; + +EXPLAIN +FROM src INSERT OVERWRITE TABLE dest1 SELECT sum(src.key); + +FROM src INSERT OVERWRITE TABLE dest1 SELECT sum(src.key); + +SELECT dest1.* FROM dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/inputddl6.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/inputddl6.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/inputddl6.q (working copy) @@ -2,7 +2,7 @@ -- test for describe extended table partition -- test for alter table drop partition DROP TABLE INPUTDDL6; -CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME); +CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE; LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09'); LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08'); DESCRIBE EXTENDED INPUTDDL6; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input16_cc.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input16_cc.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input16_cc.q (working copy) @@ -1,6 +1,7 @@ -- TestSerDe is a user defined serde where the default delimiter is Ctrl-B -- the user is overwriting it with ctrlC -CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERIALIZER 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val'); +DROP TABLE INPUT16_CC; +CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE; LOAD DATA LOCAL INPATH '../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC; SELECT INPUT16_CC.VALUE, INPUT16_CC.KEY FROM INPUT16_CC; DROP TABLE INPUT16_CC; Index: src/contrib/hive/ql/src/test/queries/clientpositive/cast1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/cast1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/cast1.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 INT, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 INT, c6 INT, c7 INT); +CREATE TABLE dest1(c1 INT, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 INT, c6 INT, c7 INT) STORED AS TEXTFILE; EXPLAIN FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86; Index: src/contrib/hive/ql/src/test/queries/clientpositive/inputddl8.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/inputddl8.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/inputddl8.q (revision 0) @@ -0,0 +1,8 @@ +DROP TABLE INPUTDDL8; +CREATE TABLE INPUTDDL8 COMMENT 'This is a thrift based table' + PARTITIONED BY(ds DATETIME, country STRING) + CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS + ROW FORMAT SERDE'org.apache.hadoop.hive.serde2.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', 'serialization.format' = 'com.facebook.thrift.protocol.TBinaryProtocol') + STORED AS SEQUENCEFILE; +DESCRIBE EXTENDED INPUTDDL8; +DROP TABLE INPUTDDL8; Index: src/contrib/hive/ql/src/test/queries/clientpositive/quote1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/quote1.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/quote1.q (revision 0) @@ -0,0 +1,13 @@ +CREATE TABLE dest1(`location` INT, `type` STRING) PARTITIONED BY(`table` STRING) STORED AS TEXTFILE; + +EXPLAIN +FROM src +INSERT OVERWRITE TABLE dest1 PARTITION(`table`='2008-04-08') SELECT src.key as `partition`, src.value as `from` WHERE src.key >= 200 and src.key < 300; + +EXPLAIN +SELECT `table`.`location`, `table`.`type`, `table`.`table` FROM dest1 `table` WHERE `table`.`table` = '2008-04-08'; + +FROM src +INSERT OVERWRITE TABLE dest1 PARTITION(`table`='2008-04-08') SELECT src.key as `partition`, src.value as `from` WHERE src.key >= 200 and src.key < 300; + +SELECT `table`.`location`, `table`.`type`, `table`.`table` FROM dest1 `table` WHERE `table`.`table` = '2008-04-08'; Index: src/contrib/hive/ql/src/test/queries/clientpositive/notable_alias2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/notable_alias2.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/notable_alias2.q (revision 0) @@ -0,0 +1,10 @@ +CREATE TABLE dest1(dummy STRING, key INT, value DOUBLE) STORED AS TEXTFILE; + +EXPLAIN +FROM src +INSERT OVERWRITE TABLE dest1 SELECT '1234', src.key, count(1) WHERE key < 100 group by src.key; + +FROM src +INSERT OVERWRITE TABLE dest1 SELECT '1234', src.key, count(1) WHERE key < 100 group by src.key; + +SELECT dest1.* FROM dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input1.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE TEST1(A INT, B FLOAT); +CREATE TABLE TEST1(A INT, B FLOAT) STORED AS TEXTFILE; EXPLAIN DESCRIBE TEST1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/join2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join2.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; EXPLAIN FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key) Index: src/contrib/hive/ql/src/test/queries/clientpositive/input3.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input3.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input3.q (working copy) @@ -1,9 +1,9 @@ DROP TABLE TEST3a; DROP TABLE TEST3b; DROP TABLE TEST3c; -CREATE TABLE TEST3a(A INT, B FLOAT); +CREATE TABLE TEST3a(A INT, B FLOAT) STORED AS TEXTFILE; DESCRIBE TEST3a; -CREATE TABLE TEST3b(A ARRAY, B FLOAT, C MAP); +CREATE TABLE TEST3b(A ARRAY, B FLOAT, C MAP) STORED AS TEXTFILE; DESCRIBE TEST3b; SHOW TABLES; EXPLAIN Index: src/contrib/hive/ql/src/test/queries/clientpositive/join4.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join4.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join4.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING); +CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE; EXPLAIN FROM ( Index: src/contrib/hive/ql/src/test/queries/clientpositive/input5.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input5.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input5.q (working copy) @@ -1,18 +1,18 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; EXPLAIN FROM ( FROM src_thrift - SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring) AS (tkey, tvalue) - USING '/bin/cat' + SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring) + USING '/bin/cat' AS (tkey, tvalue) CLUSTER BY tkey ) tmap INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue; FROM ( FROM src_thrift - SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring) AS (tkey, tvalue) - USING '/bin/cat' + SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring) + USING '/bin/cat' AS (tkey, tvalue) CLUSTER BY tkey ) tmap INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue; Index: src/contrib/hive/ql/src/test/queries/clientpositive/join6.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join6.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join6.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING); +CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE; EXPLAIN FROM ( Index: src/contrib/hive/ql/src/test/queries/clientpositive/input_dynamicserde.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input_dynamicserde.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input_dynamicserde.q (working copy) @@ -3,7 +3,8 @@ FIELDS TERMINATED BY '1' COLLECTION ITEMS TERMINATED BY '2' MAP KEYS TERMINATED BY '3' -LINES TERMINATED BY '10'; +LINES TERMINATED BY '10' +STORED AS TEXTFILE; EXPLAIN FROM src_thrift @@ -13,3 +14,5 @@ INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring; SELECT dest1.* FROM dest1; + +SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input7.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input7.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input7.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 DOUBLE, c2 INT); +CREATE TABLE dest1(c1 DOUBLE, c2 INT) STORED AS TEXTFILE; EXPLAIN FROM src1 Index: src/contrib/hive/ql/src/test/queries/clientpositive/join8.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join8.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join8.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING); +CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE; EXPLAIN FROM ( Index: src/contrib/hive/ql/src/test/queries/clientpositive/input9.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input9.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input9.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(value STRING, key INT); +CREATE TABLE dest1(value STRING, key INT) STORED AS TEXTFILE; EXPLAIN FROM src1 Index: src/contrib/hive/ql/src/test/queries/clientpositive/input_dfs.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input_dfs.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input_dfs.q (working copy) @@ -1,4 +1,2 @@ dfs -cat ../../../../build/contrib/hive/ql/test/data/files/kv1.txt; -set fs.default.name=file://src -dfs -ls Index: src/contrib/hive/ql/src/test/queries/clientpositive/udf1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/udf1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/udf1.q (working copy) @@ -1,7 +1,7 @@ CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, c6 STRING, c7 STRING, c8 STRING, c9 STRING, c10 STRING, c11 STRING, c12 STRING, c13 STRING, - c14 STRING, c15 STRING, c16 STRING, c17 STRING); + c14 STRING, c15 STRING, c16 STRING, c17 STRING) STORED AS TEXTFILE; EXPLAIN FROM src INSERT OVERWRITE TABLE dest1 SELECT 'a' LIKE '%a%', 'b' LIKE '%a%', 'ab' LIKE '%a%', 'ab' LIKE '%a_', Index: src/contrib/hive/ql/src/test/queries/clientpositive/join10.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join10.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join10.q (revision 0) @@ -0,0 +1,6 @@ +EXPLAIN FROM +(SELECT src.* FROM src) x +JOIN +(SELECT src.* FROM src) Y +ON (x.key = Y.key) +SELECT Y.*; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input11.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input11.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input11.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; EXPLAIN FROM src Index: src/contrib/hive/ql/src/test/queries/clientpositive/udf3.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/udf3.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/udf3.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING); +CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING) STORED AS TEXTFILE; EXPLAIN FROM src INSERT OVERWRITE TABLE dest1 SELECT count(CAST('' AS INT)), sum(CAST('' AS INT)), avg(CAST('' AS INT)), Index: src/contrib/hive/ql/src/test/queries/clientpositive/noalias_subq1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/noalias_subq1.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/noalias_subq1.q (revision 0) @@ -0,0 +1,5 @@ +EXPLAIN +SELECT c1 FROM (select value as c1, key as c2 from src) x where c2 < 100; + +SELECT c1 FROM (select value as c1, key as c2 from src) x where c2 < 100; + Index: src/contrib/hive/ql/src/test/queries/clientpositive/join12.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join12.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join12.q (revision 0) @@ -0,0 +1,20 @@ +EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src) src2 +ON src1.c1 = src2.c3 AND src1.c1 < 100 +JOIN +(SELECT src.key as c5, src.value as c6 from src) src3 +ON src1.c1 = src3.c5 AND src3.c5 < 80; + +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src) src2 +ON src1.c1 = src2.c3 AND src1.c1 < 100 +JOIN +(SELECT src.key as c5, src.value as c6 from src) src3 +ON src1.c1 = src3.c5 AND src3.c5 < 80; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING, mapvalue STRING); +CREATE TABLE dest1(key INT, value STRING, mapvalue STRING) STORED AS TEXTFILE; EXPLAIN FROM src_thrift Index: src/contrib/hive/ql/src/test/queries/clientpositive/input13.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input13.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input13.q (working copy) @@ -1,6 +1,6 @@ -CREATE TABLE dest1(key INT, value STRING); -CREATE TABLE dest2(key INT, value STRING); -CREATE TABLE dest3(key INT, value STRING) PARTITIONED BY(ds STRING, hr STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; +CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE; +CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE; EXPLAIN FROM src Index: src/contrib/hive/ql/src/test/queries/clientpositive/join14.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join14.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join14.q (revision 0) @@ -0,0 +1,10 @@ +CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE; + +EXPLAIN +FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100 +INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value; + +FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100 +INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value; + +select dest1.* from dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input15.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input15.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input15.q (working copy) @@ -1,7 +1,7 @@ EXPLAIN -CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'; +CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE; -CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'; +CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE; DESCRIBE TEST15; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input_part0.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input_part0.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input_part0.q (revision 0) @@ -0,0 +1,4 @@ +EXPLAIN +SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08'; + +SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08'; Index: src/contrib/hive/ql/src/test/queries/clientpositive/join16.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/join16.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/join16.q (revision 0) @@ -0,0 +1 @@ +EXPLAIN SELECT subq.key, tab.value FROM (select a.key, a.value from src a where a.key > 10 ) subq JOIN src tab ON (subq.key = tab.key and subq.key > 20 and subq.value = tab.value) where tab.value < 200; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input_part2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input_part2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input_part2.q (working copy) @@ -1,5 +1,5 @@ -CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING); -CREATE TABLE dest2(key INT, value STRING, hr STRING, ds STRING); +CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING) STORED AS TEXTFILE; +CREATE TABLE dest2(key INT, value STRING, hr STRING, ds STRING) STORED AS TEXTFILE; EXPLAIN EXTENDED FROM srcpart Index: src/contrib/hive/ql/src/test/queries/clientpositive/input17.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input17.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input17.q (revision 0) @@ -0,0 +1,20 @@ +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; + +EXPLAIN +FROM ( + FROM src_thrift + SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0]) + USING '/bin/cat' AS (tkey, tvalue) + CLUSTER BY tkey +) tmap +INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue; + +FROM ( + FROM src_thrift + SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0]) + USING '/bin/cat' AS (tkey, tvalue) + CLUSTER BY tkey +) tmap +INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue; + +SELECT dest1.* FROM dest1; Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby1.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value DOUBLE); +CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE; EXPLAIN FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,4)) GROUP BY src.key; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input19.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input19.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input19.q (revision 0) @@ -0,0 +1,3 @@ +create table apachelog(ipaddress STRING,identd STRING,user STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES ( 'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol', 'quote.delim'= '("|\\[|\\])', 'field.delim'=' ', 'serialization.null.format'='-' ) STORED AS TEXTFILE; +LOAD DATA LOCAL INPATH '../data/files/apache.access.log' INTO TABLE apachelog; +SELECT a.* FROM apachelog a; Index: src/contrib/hive/ql/src/test/queries/clientpositive/input_part4.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/input_part4.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/clientpositive/input_part4.q (revision 0) @@ -0,0 +1,4 @@ +EXPLAIN +SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.hr = 15; + +SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.hr = 15; Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby3.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby3.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby3.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(c1 INT, c2 INT, c3 INT, c4 INT, c5 INT); +CREATE TABLE dest1(c1 INT, c2 INT, c3 INT, c4 INT, c5 INT) STORED AS TEXTFILE; EXPLAIN FROM src Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby5.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby5.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby5.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; EXPLAIN INSERT OVERWRITE TABLE dest1 Index: src/contrib/hive/ql/src/test/queries/clientpositive/groupby7.q =================================================================== --- src/contrib/hive/ql/src/test/queries/clientpositive/groupby7.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/clientpositive/groupby7.q (working copy) @@ -1,9 +1,9 @@ -CREATE TABLE DEST1(key INT, value STRING); -CREATE TABLE DEST2(key INT, value STRING); +CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE; +CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE; FROM SRC INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,4)) GROUP BY SRC.key INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,4)) GROUP BY SRC.key; SELECT DEST1.* FROM DEST1; -SELECT DEST1.* FROM DEST2; +SELECT DEST2.* FROM DEST2; Index: src/contrib/hive/ql/src/test/queries/negative/wrong_distinct3.q =================================================================== --- src/contrib/hive/ql/src/test/queries/negative/wrong_distinct3.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/negative/wrong_distinct3.q (working copy) @@ -1,3 +0,0 @@ -FROM SRC -INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,4)) GROUP BY SRC.key -INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key Index: src/contrib/hive/ql/src/test/queries/negative/notable_alias1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/negative/notable_alias1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/negative/notable_alias1.q (working copy) @@ -1,2 +0,0 @@ -FROM src -INSERT OVERWRITE TABLE dest1 SELECT '1234', value WHERE src.key < 100 group by src.key Index: src/contrib/hive/ql/src/test/queries/negative/notable_alias2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/negative/notable_alias2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/negative/notable_alias2.q (working copy) @@ -1,2 +0,0 @@ -FROM src -INSERT OVERWRITE TABLE dest1 SELECT '1234', src.value WHERE key < 100 group by src.key Index: src/contrib/hive/ql/src/test/queries/negative/notable_alias3.q =================================================================== --- src/contrib/hive/ql/src/test/queries/negative/notable_alias3.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/negative/notable_alias3.q (working copy) @@ -1,2 +0,0 @@ -FROM src -INSERT OVERWRITE TABLE dest1 SELECT '1234', src.value WHERE src.key < 100 group by key Index: src/contrib/hive/ql/src/test/queries/negative/quoted_string.q =================================================================== --- src/contrib/hive/ql/src/test/queries/negative/quoted_string.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/negative/quoted_string.q (working copy) @@ -1,2 +1,2 @@ FROM src -INSERT OVERWRITE TABLE dest1 SELECT "1234", src.value WHERE src.key < 100 +INSERT OVERWRITE TABLE dest1 SELECT '1234", src.value WHERE src.key < 100 Index: src/contrib/hive/ql/src/test/queries/negative/insert_wrong_number_columns.q =================================================================== --- src/contrib/hive/ql/src/test/queries/negative/insert_wrong_number_columns.q (revision 0) +++ src/contrib/hive/ql/src/test/queries/negative/insert_wrong_number_columns.q (revision 0) @@ -0,0 +1,2 @@ +FROM src +INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value, 1 WHERE src.key < 100 Index: src/contrib/hive/ql/src/test/queries/positive/sample1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/sample1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/sample1.q (working copy) @@ -1,5 +1,5 @@ -- no input pruning, no sample filter -INSERT OVERWRITE TABLE dest1 SELECT s.* -FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1) s +SELECT s.* +FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1 ON rand()) s WHERE s.ds='2008-04-08' and s.hr='11' Index: src/contrib/hive/ql/src/test/queries/positive/cast1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/cast1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/cast1.q (working copy) @@ -1,2 +1,2 @@ FROM src -INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86 +SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86 Index: src/contrib/hive/ql/src/test/queries/positive/input2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/input2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/input2.q (working copy) @@ -1,4 +1,4 @@ FROM src INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100 INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200 -INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 +INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key, 2 WHERE src.key >= 200 Index: src/contrib/hive/ql/src/test/queries/positive/input3.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/input3.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/input3.q (working copy) @@ -1,5 +1,5 @@ FROM src INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100 INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200 -INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300 +INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key, 2 WHERE src.key >= 200 and src.key < 300 INSERT OVERWRITE DIRECTORY '../../../../build/contrib/hive/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300 Index: src/contrib/hive/ql/src/test/queries/positive/input4.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/input4.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/input4.q (working copy) @@ -1,7 +1,7 @@ FROM ( FROM src - SELECT TRANSFORM(src.key, src.value) AS (tkey, tvalue) - USING '/bin/cat' + SELECT TRANSFORM(src.key, src.value) + USING '/bin/cat' AS (tkey, tvalue) CLUSTER BY tkey ) tmap INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 Index: src/contrib/hive/ql/src/test/queries/positive/join4.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/join4.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/join4.q (working copy) @@ -10,5 +10,5 @@ ON (a.c1 = b.c3) SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4 ) c -INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 +SELECT c.c1, c.c2, c.c3, c.c4 Index: src/contrib/hive/ql/src/test/queries/positive/input5.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/input5.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/input5.q (working copy) @@ -1,7 +1,7 @@ FROM ( FROM src_thrift - SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring) AS (tkey, tvalue) - USING '/bin/cat' + SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring) + USING '/bin/cat' AS (tkey, tvalue) CLUSTER BY tkey ) tmap INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue Index: src/contrib/hive/ql/src/test/queries/positive/join5.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/join5.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/join5.q (working copy) @@ -10,6 +10,6 @@ ON (a.c1 = b.c3) SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4 ) c -INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 +SELECT c.c1, c.c2, c.c3, c.c4 Index: src/contrib/hive/ql/src/test/queries/positive/input_testxpath2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/input_testxpath2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/input_testxpath2.q (working copy) @@ -1,2 +1,2 @@ FROM src_thrift -INSERT OVERWRITE TABLE dest1 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL) +SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL) Index: src/contrib/hive/ql/src/test/queries/positive/join6.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/join6.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/join6.q (working copy) @@ -10,7 +10,7 @@ ON (a.c1 = b.c3) SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4 ) c -INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 +SELECT c.c1, c.c2, c.c3, c.c4 Index: src/contrib/hive/ql/src/test/queries/positive/join7.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/join7.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/join7.q (working copy) @@ -15,7 +15,7 @@ ON (a.c1 = c.c5) SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4, c.c5 AS c5, c.c6 AS c6 ) c -INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4, c.c5, c.c6 +SELECT c.c1, c.c2, c.c3, c.c4, c.c5, c.c6 Index: src/contrib/hive/ql/src/test/queries/positive/input8.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/input8.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/input8.q (working copy) @@ -1,2 +1,2 @@ FROM src1 -INSERT OVERWRITE TABLE dest1 SELECT 4 + NULL, src1.key - NULL, NULL + NULL +SELECT 4 + NULL, src1.key - NULL, NULL + NULL Index: src/contrib/hive/ql/src/test/queries/positive/join8.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/join8.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/join8.q (working copy) @@ -10,5 +10,5 @@ ON (a.c1 = b.c3) SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4 ) c -INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 where c.c3 IS NULL AND c.c1 IS NOT NULL +SELECT c.c1, c.c2, c.c3, c.c4 where c.c3 IS NULL AND c.c1 IS NOT NULL Index: src/contrib/hive/ql/src/test/queries/positive/udf1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/udf1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/udf1.q (working copy) @@ -1,4 +1,4 @@ -FROM src INSERT OVERWRITE TABLE dest1 SELECT 'a' LIKE '%a%', 'b' LIKE '%a%', 'ab' LIKE '%a%', 'ab' LIKE '%a_', +FROM src SELECT 'a' LIKE '%a%', 'b' LIKE '%a%', 'ab' LIKE '%a%', 'ab' LIKE '%a_', '%_' LIKE '\%\_', 'ab' LIKE '\%\_', 'ab' LIKE '_a%', 'ab' LIKE 'a', '' RLIKE '.*', 'a' RLIKE '[ab]', '' RLIKE '[ab]', 'hadoop' RLIKE '[a-z]*', 'hadoop' RLIKE 'o*', REGEXP_REPLACE('abc', 'b', 'c'), REGEXP_REPLACE('abc', 'z', 'a'), REGEXP_REPLACE('abbbb', 'bb', 'b'), REGEXP_REPLACE('hadoop', '(.)[a-z]*', '$1ive') Index: src/contrib/hive/ql/src/test/queries/positive/input_testxpath.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/input_testxpath.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/input_testxpath.q (working copy) @@ -1,2 +1,2 @@ FROM src_thrift -INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2'] +SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2'] Index: src/contrib/hive/ql/src/test/queries/positive/input_part1.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/input_part1.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/input_part1.q (working copy) @@ -1,2 +1,2 @@ FROM srcpart -INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12' +SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12' Index: src/contrib/hive/ql/src/test/queries/positive/groupby2.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/groupby2.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/groupby2.q (working copy) @@ -1,2 +1,2 @@ FROM src -INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,0,1), count(DISTINCT substr(src.value,4)), concat(substr(src.key,0,1),sum(substr(src.value,4))) GROUP BY substr(src.key,0,1) +SELECT substr(src.key,0,1), count(DISTINCT substr(src.value,4)), concat(substr(src.key,0,1),sum(substr(src.value,4))) GROUP BY substr(src.key,0,1) Index: src/contrib/hive/ql/src/test/queries/positive/groupby3.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/groupby3.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/groupby3.q (working copy) @@ -1,2 +1,2 @@ FROM src -INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,4)), avg(substr(src.value,4)), avg(DISTINCT substr(src.value,4)), max(substr(src.value,4)), min(substr(src.value,4)) +SELECT sum(substr(src.value,4)), avg(substr(src.value,4)), avg(DISTINCT substr(src.value,4)), max(substr(src.value,4)), min(substr(src.value,4)) Index: src/contrib/hive/ql/src/test/queries/positive/groupby4.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/groupby4.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/groupby4.q (working copy) @@ -1,2 +1,2 @@ FROM src -INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,0,1) GROUP BY substr(src.key,0,1) +SELECT substr(src.key,0,1) GROUP BY substr(src.key,0,1) Index: src/contrib/hive/ql/src/test/queries/positive/groupby5.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/groupby5.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/groupby5.q (working copy) @@ -1,4 +1,4 @@ -INSERT OVERWRITE TABLE dest1 + SELECT src.key, sum(substr(src.value,4)) FROM src GROUP BY src.key Index: src/contrib/hive/ql/src/test/queries/positive/groupby6.q =================================================================== --- src/contrib/hive/ql/src/test/queries/positive/groupby6.q (revision 712243) +++ src/contrib/hive/ql/src/test/queries/positive/groupby6.q (working copy) @@ -1,2 +1,2 @@ FROM src -INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,4,1) +SELECT DISTINCT substr(src.value,4,1) Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (working copy) @@ -23,6 +23,7 @@ import java.util.AbstractMap; import java.util.ArrayList; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; @@ -40,13 +41,13 @@ import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; -import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; -import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.InputFormat; @@ -129,6 +130,14 @@ sd.getSerdeInfo().setParameters(new HashMap()); } + public void reinitSerDe() throws HiveException { + try { + deserializer = MetaStoreUtils.getDeserializer(Hive.get().getConf(), this.getTTable()); + } catch (MetaException e) { + throw new HiveException(e); + } + } + protected void initSerDe() throws HiveException { if (deserializer == null) { try { @@ -138,13 +147,16 @@ } } } - + public void checkValidity() throws HiveException { // check for validity String name = getTTable().getTableName(); if (null == name || name.length() == 0 || !MetaStoreUtils.validateName(name)) { throw new HiveException("[" + name + "]: is not a valid table name"); } + if (0 == getCols().size()) { + throw new HiveException("atleast one column must be specified for the table"); + } if (null == getDeserializer()) { throw new HiveException("must specify a non-null serDe"); } @@ -154,6 +166,30 @@ if (null == getOutputFormatClass()) { throw new HiveException("must specify an OutputFormat class"); } + + Iterator iterCols = getCols().iterator(); + List colNames = new ArrayList(); + while (iterCols.hasNext()) { + String colName = iterCols.next().getName(); + Iterator iter = colNames.iterator(); + while (iter.hasNext()) { + String oldColName = iter.next(); + if (colName.equalsIgnoreCase(oldColName)) + throw new HiveException("Duplicate column name " + colName + " in the table definition."); + } + colNames.add(colName.toLowerCase()); + } + + if (getPartCols() != null) + { + // there is no overlap between columns and partitioning columns + Iterator partColsIter = getPartCols().iterator(); + while (partColsIter.hasNext()) { + String partCol = partColsIter.next().getName(); + if(colNames.contains(partCol.toLowerCase())) + throw new HiveException("Partition collumn name " + partCol + " conflicts with table columns."); + } + } return; } @@ -190,6 +226,13 @@ } final public Deserializer getDeserializer() { + if(deserializer == null) { + try { + initSerDe(); + } catch (HiveException e) { + LOG.error("Error in initializing serde.", e); + } + } return deserializer; } @@ -360,9 +403,30 @@ } public List getCols() { - return getTTable().getSd().getCols(); + boolean isNative = SerDeUtils.isNativeSerDe(getSerializationLib()); + if (isNative) + return getTTable().getSd().getCols(); + else { + try { + return Hive.getFieldsFromDeserializer(getName(), getDeserializer()); + } catch (HiveException e) { + LOG.error("Unable to get field from serde: " + getSerializationLib(), e); + } + return new ArrayList(); + } } + /** + * Returns a list of all the columns of the table (data columns + partition columns in that order. + * + * @return List + */ + public List getAllCols() { + ArrayList f_list = new ArrayList(); + f_list.addAll(getPartCols()); + f_list.addAll(getCols()); + return f_list; + } public void setPartCols(List partCols) { getTTable().setPartitionKeys(partCols); } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (working copy) @@ -36,7 +36,6 @@ import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; /** @@ -205,6 +204,10 @@ return(ret); } + public Path getPartitionPath() { + return this.partPath; + } + final public URI getDataLocation() { return this.partURI; } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (working copy) @@ -38,13 +38,14 @@ import org.apache.hadoop.hive.metastore.MetaStoreClient; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; -import org.apache.hadoop.hive.metastore.api.Constants; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.ql.parse.ParseDriver; +import org.apache.hadoop.hive.serde2.Deserializer; +import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.InputFormat; @@ -170,6 +171,7 @@ } tbl.setSerializationLib(org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class.getName()); tbl.setNumBuckets(bucketCount); + tbl.setBucketCols(bucketCols); createTable(tbl); } @@ -196,6 +198,9 @@ public void createTable(Table tbl) throws HiveException { try { tbl.initSerDe(); + if(tbl.getCols().size() == 0) { + tbl.setFields(MetaStoreUtils.getFieldsFromDeserializer(tbl.getName(), tbl.getDeserializer())); + } tbl.checkValidity(); msc.createTable(tbl.getTTable()); } catch (Exception e) { @@ -652,5 +657,16 @@ } return new MetaStoreClient(this.conf); } + + public static List getFieldsFromDeserializer(String name, Deserializer serde) throws HiveException { + try { + return MetaStoreUtils.getFieldsFromDeserializer(name, serde); + } catch (SerDeException e) { + throw new HiveException("Error in getting fields from serde.", e); + } catch (MetaException e) { + throw new HiveException("Error in getting fields from serde.", e); + } + } + }; Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (revision 0) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (revision 0) @@ -0,0 +1,74 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Implementation of the optimizer + */ +public class Optimizer { + private ParseContext pctx; + private List transformations; + + /** + * empty constructor + */ + public Optimizer() { + } + + /** + * create the list of transformations + */ + public void initialize() { + transformations = new ArrayList(); + transformations.add(new ColumnPruner()); + } + + /** + * invoke all the transformations one-by-one, and alter the query plan + * @return ParseContext + * @throws SemanticException + */ + public ParseContext optimize() throws SemanticException { + for (Transform t : transformations) + pctx = t.transform(pctx); + return pctx; + } + + /** + * @return the pctx + */ + public ParseContext getPctx() { + return pctx; + } + + /** + * @param pctx the pctx to set + */ + public void setPctx(ParseContext pctx) { + this.pctx = pctx; + } + + +} Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java (revision 0) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java (revision 0) @@ -0,0 +1,168 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer; + +import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.parse.QB; +import org.apache.hadoop.hive.ql.parse.OpParseContext; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory; +import org.apache.hadoop.hive.ql.parse.RowResolver; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.OperatorFactory; +import org.apache.hadoop.hive.ql.plan.exprNodeDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc; +import org.apache.hadoop.hive.ql.plan.selectDesc; +import org.apache.hadoop.hive.ql.exec.ColumnInfo; +import org.apache.hadoop.hive.ql.exec.RowSchema; +import java.io.Serializable; +import java.util.List; +import java.util.Iterator; +import java.util.ArrayList; + +/** + * Implementation of one of the rule-based optimization steps. ColumnPruner gets the current operator tree. The tree is traversed to find out the columns used + * for all the base tables. If all the columns for a table are not used, a select is pushed on top of that table (to select only those columns). Since this + * changes the row resolver, the tree is built again. This can be optimized later to patch the tree. + */ +public class ColumnPruner implements Transform { + private ParseContext pctx; + + /** + * empty constructor + */ + public ColumnPruner() { + pctx = null; + } + + /** + * Whether some column pruning needs to be done + * @param op Operator for the base table + * @param colNames columns needed by the query + * @return boolean + */ + private boolean pushSelect(Operator op, List colNames) { + if (pctx.getOpParseCtx().get(op).getRR().getColumnInfos().size() == colNames.size()) return false; + return true; + } + + /** + * update the map between operator and row resolver + * @param op operator being inserted + * @param rr row resolver of the operator + * @return + */ + @SuppressWarnings("nls") + private Operator putOpInsertMap(Operator op, RowResolver rr) { + OpParseContext ctx = new OpParseContext(rr); + pctx.getOpParseCtx().put(op, ctx); + return op; + } + + /** + * insert a select to include only columns needed by the query + * @param input operator for the base table + * @param colNames columns needed + * @return + * @throws SemanticException + */ + @SuppressWarnings("nls") + private Operator genSelectPlan(Operator input, List colNames) + throws SemanticException { + + RowResolver inputRR = pctx.getOpParseCtx().get(input).getRR(); + RowResolver outputRR = new RowResolver(); + ArrayList col_list = new ArrayList(); + + // Iterate over the selects + for (int pos = 0; pos < colNames.size(); pos++) { + String internalName = colNames.get(pos); + String[] colName = inputRR.reverseLookup(internalName); + ColumnInfo in = inputRR.get(colName[0], colName[1]); + outputRR.put(colName[0], colName[1], + new ColumnInfo((Integer.valueOf(pos)).toString(), in.getType())); + col_list.add(new exprNodeColumnDesc(in.getType(), internalName)); + } + + Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild( + new selectDesc(col_list), new RowSchema(outputRR.getColumnInfos()), input), outputRR); + + return output; + } + + /** + * reset parse context + * @param pctx parse context + */ + private void resetParseContext(ParseContext pctx) { + pctx.getAliasToPruner().clear(); + pctx.getAliasToSamplePruner().clear(); + pctx.getLoadTableWork().clear(); + pctx.getLoadFileWork().clear(); + Iterator> iter = pctx.getOpParseCtx().keySet().iterator(); + while (iter.hasNext()) { + Operator op = iter.next(); + if ((!pctx.getTopOps().containsValue(op)) && (!pctx.getTopSelOps().containsValue(op))) + iter.remove(); + } + } + + /** + * Transform the query tree. For each table under consideration, check if all columns are needed. If not, only select the operators needed at + * the beginning and proceed + */ + public ParseContext transform(ParseContext pactx) throws SemanticException { + this.pctx = pactx; + boolean done = true; + // generate useful columns for all the sources so that they can be pushed immediately after the table scan + for (String alias_id : pctx.getTopOps().keySet()) { + Operator topOp = pctx.getTopOps().get(alias_id); + + // Scan the tree bottom-up and generate columns needed for the top operator + List colNames = topOp.genColLists(pctx.getOpParseCtx()); + + // do we need to push a SELECT - all the columns of the table are not used + if (pushSelect(topOp, colNames)) { + topOp.setChildOperators(null); + + // Generate a select and make it a child of the table scan + Operator select = genSelectPlan(topOp, colNames); + pctx.getTopSelOps().put(alias_id, select); + done = false; + } + } + + // a select was pushed on top of the table. The old plan is no longer valid. Generate the plan again. + // The current tables and the select pushed above (after column pruning) are maintained in the parse context. + if (!done) { + SemanticAnalyzer sem = (SemanticAnalyzer)SemanticAnalyzerFactory.get(pctx.getConf(), pctx.getParseTree()); + + resetParseContext(pctx); + sem.init(pctx); + QB qb = new QB(null, null, false); + + sem.doPhase1(pctx.getParseTree(), qb, sem.initPhase1Ctx()); + sem.getMetaData(qb); + sem.genPlan(qb); + pctx = sem.getParseContext(); + } + return pctx; + } +} Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Transform.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Transform.java (revision 0) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Transform.java (revision 0) @@ -0,0 +1,36 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer; + +import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Optimizer interface. All the rule-based optimizations implement this interface. All the transformations are invoked sequentially. They take the current + * parse context (which contains the operator tree among other things), perform all the optimizations, and then return the updated parse context. + */ +public interface Transform { + /** + * All transformation steps implement this interface + * @param pctx input parse context + * @return ParseContext + * @throws SemanticException + */ + public ParseContext transform(ParseContext pctx) throws SemanticException; +} Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (working copy) @@ -26,14 +26,19 @@ import org.apache.log4j.*; import java.net.URL; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.commons.lang.StringUtils; public class SessionState { - + + public static Log LOG = LogFactory.getLog("SessionState"); + public static LogHelper console = new LogHelper(LOG); + /** * current configuration */ @@ -146,11 +151,13 @@ public static SessionState start(HiveConf conf) { ss = new SessionState (conf); ss.getConf().setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId()); + console = new LogHelper(LOG); return (ss); } public static SessionState start(SessionState startSs) { ss = startSs; + console = new LogHelper(LOG); ss.getConf().setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId()); return ss; } @@ -162,6 +169,10 @@ return ss; } + public static LogHelper getConsole() { + return console; + } + private static String makeSessionId() { GregorianCalendar gc = new GregorianCalendar(); String userid = System.getProperty("user.name"); @@ -242,4 +253,107 @@ LOG.error(error + StringUtils.defaultString(detail)); } } + + public static String validateFile(Set curFiles, String newFile) { + SessionState ss = SessionState.get(); + LogHelper console = SessionState.getConsole(); + Configuration conf = (ss == null) ? new Configuration() : ss.getConf(); + + try { + if(Utilities.realFile(newFile, conf) != null) + return newFile; + else { + console.printError(newFile + " does not exist"); + return null; + } + } catch (IOException e) { + console.printError("Unable to validate " + newFile + "\nException: " + e.getMessage(), + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); + return null; + } + } + + public static interface ResourceHook { + public String preHook(Set cur, String s); + } + + public static enum ResourceType { + FILE(new ResourceHook () { + public String preHook(Set cur, String s) { return validateFile(cur, s); } + }); + + public ResourceHook hook; + + ResourceType(ResourceHook hook) { + this.hook = hook; + } + }; + + public static ResourceType find_resource_type(String s) { + + s = s.trim().toUpperCase(); + + try { + return ResourceType.valueOf(s); + } catch (IllegalArgumentException e) { + } + + // try singular + if(s.endsWith("S")) { + s = s.substring(0, s.length()-1); + } else { + return null; + } + + try { + return ResourceType.valueOf(s); + } catch (IllegalArgumentException e) { + } + return null; + } + + private HashMap> resource_map = new HashMap> (); + + public void add_resource(ResourceType t, String value) { + if(resource_map.get(t) == null) { + resource_map.put(t, new HashSet ()); + } + + String fnlVal = value; + if(t.hook != null) { + fnlVal = t.hook.preHook(resource_map.get(t), value); + if(fnlVal == null) + return; + } + resource_map.get(t).add(fnlVal); + } + + public boolean delete_resource(ResourceType t, String value) { + if(resource_map.get(t) == null) { + return false; + } + return (resource_map.get(t).remove(value)); + } + + public Set list_resource(ResourceType t, List filter) { + if(resource_map.get(t) == null) { + return null; + } + Set orig = resource_map.get(t); + if(filter == null) { + return orig; + } else { + Set fnl = new HashSet (); + for(String one: orig) { + if(filter.contains(one)) { + fnl.add(one); + } + } + return fnl; + } + } + + public void delete_resource(ResourceType t) { + resource_map.remove (t); + } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (working copy) @@ -18,9 +18,12 @@ package org.apache.hadoop.hive.ql.exec; +import java.util.Arrays; import java.util.HashMap; import java.util.ArrayList; import java.util.List; +import java.util.Iterator; +import java.util.Map; import java.io.Serializable; import java.lang.reflect.Method; @@ -32,6 +35,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.parse.RowResolver; +import org.apache.hadoop.hive.ql.parse.OpParseContext; /** * GroupBy operator implementation. @@ -61,98 +66,112 @@ transient protected HashMap, UDAF[]> hashAggregations; transient boolean firstRow; - + transient long totalMemory; + transient boolean hashAggr; + public void initialize(Configuration hconf) throws HiveException { super.initialize(hconf); - try { - // init keyFields - keyFields = new ExprNodeEvaluator[conf.getKeys().size()]; - for (int i = 0; i < keyFields.length; i++) { - keyFields[i] = ExprNodeEvaluatorFactory.get(conf.getKeys().get(i)); + totalMemory = Runtime.getRuntime().totalMemory(); + + // init keyFields + keyFields = new ExprNodeEvaluator[conf.getKeys().size()]; + for (int i = 0; i < keyFields.length; i++) { + keyFields[i] = ExprNodeEvaluatorFactory.get(conf.getKeys().get(i)); + } + + // init aggregationParameterFields + aggregationParameterFields = new ExprNodeEvaluator[conf.getAggregators().size()][]; + for (int i = 0; i < aggregationParameterFields.length; i++) { + ArrayList parameters = conf.getAggregators().get(i).getParameters(); + aggregationParameterFields[i] = new ExprNodeEvaluator[parameters.size()]; + for (int j = 0; j < parameters.size(); j++) { + aggregationParameterFields[i][j] = ExprNodeEvaluatorFactory.get(parameters.get(j)); } - - // init aggregationParameterFields - aggregationParameterFields = new ExprNodeEvaluator[conf.getAggregators().size()][]; - for (int i = 0; i < aggregationParameterFields.length; i++) { - ArrayList parameters = conf.getAggregators().get(i).getParameters(); - aggregationParameterFields[i] = new ExprNodeEvaluator[parameters.size()]; - for (int j = 0; j < parameters.size(); j++) { - aggregationParameterFields[i][j] = ExprNodeEvaluatorFactory.get(parameters.get(j)); + } + // init aggregationIsDistinct + aggregationIsDistinct = new boolean[conf.getAggregators().size()]; + for(int i=0; i[]) new Class[conf.getAggregators().size()]; + for (int i = 0; i < conf.getAggregators().size(); i++) { + aggregationDesc agg = conf.getAggregators().get(i); + aggregationClasses[i] = agg.getAggregationClass(); + } + + // init aggregations, aggregationsAggregateMethods, + // aggregationsEvaluateMethods + aggregationsAggregateMethods = new Method[aggregationClasses.length]; + aggregationsEvaluateMethods = new Method[aggregationClasses.length]; + String evaluateMethodName = ((conf.getMode() == groupByDesc.Mode.PARTIAL1 || conf.getMode() == groupByDesc.Mode.HASH || + conf.getMode() == groupByDesc.Mode.PARTIAL2) + ? "evaluatePartial" : "evaluate"); + + for(int i=0; i[]) new Class[conf.getAggregators().size()]; - for (int i = 0; i < conf.getAggregators().size(); i++) { - aggregationDesc agg = conf.getAggregators().get(i); - aggregationClasses[i] = agg.getAggregationClass(); - } - - // init aggregations, aggregationsAggregateMethods, // aggregationsEvaluateMethods - aggregationsAggregateMethods = new Method[aggregationClasses.length]; - aggregationsEvaluateMethods = new Method[aggregationClasses.length]; - String aggregateMethodName = (conf.getMode() == groupByDesc.Mode.PARTIAL2 - ? "aggregatePartial" : "aggregate"); - String evaluateMethodName = ((conf.getMode() == groupByDesc.Mode.PARTIAL1 || conf.getMode() == groupByDesc.Mode.HASH) - ? "evaluatePartial" : "evaluate"); - for(int i=0; i, UDAF[]>(); + if (null == aggregationsEvaluateMethods[i]) { + throw new HiveException("Cannot find " + evaluateMethodName + " method of UDAF class " + + aggregationClasses[i].getName() + "!"); } - // init objectInspectors - int totalFields = keyFields.length + aggregationClasses.length; - objectInspectors = new ArrayList(totalFields); - for(int i=0; i, UDAF[]>(); + hashAggr = true; + } + // init objectInspectors + int totalFields = keyFields.length + aggregationClasses.length; + objectInspectors = new ArrayList(totalFields); + for(int i=0; i newKeys) throws HiveException { + // Prepare aggs for updating + UDAF[] aggs = null; + boolean newEntry = false; + + // hash-based aggregations + aggs = hashAggregations.get(newKeys); + if (aggs == null) { + aggs = newAggregations(); + hashAggregations.put(newKeys, aggs); + newEntry = true; + } + + // Update the aggs + updateAggregations(aggs, row, rowInspector, true, newEntry, null); + + // currently, we use a simple approximation - if 90% of memory is being + // used, flush + long freeMemory = Runtime.getRuntime().freeMemory(); + if (shouldBeFlushed(totalMemory, freeMemory)) { + flush(); + } + } + + private void processAggr(Object row, ObjectInspector rowInspector, ArrayList newKeys) throws HiveException { + // Prepare aggs for updating + UDAF[] aggs = null; + Object[][] lastInvoke = null; + boolean keysAreEqual = newKeys.equals(currentKeys); + + // forward the current keys if needed for sort-based aggregation + if (currentKeys != null && !keysAreEqual) + forward(currentKeys, aggregations); + + // Need to update the keys? + if (currentKeys == null || !keysAreEqual) { + currentKeys = newKeys; + + // init aggregations + for(UDAF aggregation: aggregations) + aggregation.init(); + + // clear parameters in last-invoke + for(int i=0; i= total) + return true; + return false; + } + + private void flush() throws HiveException { + // Currently, the algorithm flushes 10% of the entries - this can be + // changed in the future + + int oldSize = hashAggregations.size(); + Iterator iter = hashAggregations.entrySet().iterator(); + int numDel = 0; + while (iter.hasNext()) { + Map.Entry, UDAF[]> m = (Map.Entry)iter.next(); + forward(m.getKey(), m.getValue()); + iter.remove(); + numDel++; + if (numDel * 10 >= oldSize) + return; + } + } + /** * Forward a record of keys and aggregation results. * @@ -262,14 +338,19 @@ * The keys in the record * @throws HiveException */ - protected void forward(ArrayList keys, UDAF[] aggs) throws Exception { + protected void forward(ArrayList keys, UDAF[] aggs) throws HiveException { int totalFields = keys.size() + aggs.length; List a = new ArrayList(totalFields); for(int i=0; i genColLists(HashMap, OpParseContext> opParseCtx) { + List colLists = new ArrayList(); + ArrayList keys = conf.getKeys(); + for (exprNodeDesc key : keys) + colLists = Utilities.mergeUniqElems(colLists, key.getCols()); + + ArrayList aggrs = conf.getAggregators(); + for (aggregationDesc aggr : aggrs) { + ArrayList params = aggr.getParameters(); + for (exprNodeDesc param : params) + colLists = Utilities.mergeUniqElems(colLists, param.getCols()); + } + + return colLists; + } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java (working copy) @@ -26,6 +26,9 @@ import java.util.Stack; import java.util.Vector; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.exprNodeDesc; @@ -42,7 +45,9 @@ */ public class JoinOperator extends Operator implements Serializable { - // a list of value expressions for each alias are maintained + static final private Log LOG = LogFactory.getLog(JoinOperator.class.getName()); + + // a list of value expressions for each alias are maintained public static class JoinExprMap { ExprNodeEvaluator[] valueFields; @@ -56,62 +61,79 @@ } - public static class IntermediateObject{ + public static class IntermediateObject { ArrayList[] objs; int curSize; public IntermediateObject(ArrayList[] objs, int curSize) { - this.objs = objs; + this.objs = objs; this.curSize = curSize; } - public ArrayList[] getObjs() { return objs; } - public int getCurSize() { return curSize; } - public void pushObj(ArrayList obj) { objs[curSize++] = obj; } - public void popObj() { curSize--; } + public ArrayList[] getObjs() { + return objs; + } + + public int getCurSize() { + return curSize; + } + + public void pushObj(ArrayList obj) { + objs[curSize++] = obj; + } + + public void popObj() { + curSize--; + } } transient protected int numValues; // number of aliases transient static protected ExprNodeEvaluator aliasField; + transient static protected ExprNodeEvaluator keyField; transient protected HashMap joinExprs; - transient static protected Byte[] order; // order in which the results should be outputted + transient static protected Byte[] order; // order in which the results should + // be outputted transient protected joinCond[] condn; transient protected boolean noOuterJoin; - transient private Object[] dummyObj; // for outer joins, contains the potential nulls for the concerned aliases + transient private Object[] dummyObj; // for outer joins, contains the + // potential nulls for the concerned + // aliases transient private Vector>[] dummyObjVectors; transient private Stack>> iterators; transient private int totalSz; // total size of the composite object transient ObjectInspector joinOutputObjectInspector; - - static - { - aliasField = ExprNodeEvaluatorFactory.get(new exprNodeColumnDesc(String.class, Utilities.ReduceField.ALIAS.toString())); + + static { + aliasField = ExprNodeEvaluatorFactory.get(new exprNodeColumnDesc( + String.class, Utilities.ReduceField.ALIAS.toString())); + keyField = ExprNodeEvaluatorFactory.get(new exprNodeColumnDesc( + String.class, Utilities.ReduceField.KEY.toString())); } + + HashMap>> storage; + int joinEmitInterval = -1; - HashMap>> storage; - public void initialize(Configuration hconf) throws HiveException { super.initialize(hconf); totalSz = 0; // Map that contains the rows for each alias storage = new HashMap>>(); - + numValues = conf.getExprs().size(); joinExprs = new HashMap(); - if (order == null) - { + if (order == null) { order = new Byte[numValues]; for (int i = 0; i < numValues; i++) - order[i] = (byte)i; + order[i] = (byte) i; } condn = conf.getConds(); noOuterJoin = conf.getNoOuterJoin(); Map> map = conf.getExprs(); Iterator entryIter = map.entrySet().iterator(); while (entryIter.hasNext()) { - Map.Entry e = (Map.Entry)entryIter.next(); - Byte key = (Byte)e.getKey(); - ArrayList expr = (ArrayList)e.getValue(); + Map.Entry e = (Map.Entry) entryIter.next(); + Byte key = (Byte) e.getKey(); + ArrayList expr = (ArrayList) e.getValue(); int sz = expr.size(); totalSz += sz; @@ -123,12 +145,15 @@ joinExprs.put(key, new JoinExprMap(valueFields)); } - ArrayList structFieldObjectInspectors = new ArrayList(totalSz); - for(int i=0; i structFieldObjectInspectors = new ArrayList( + totalSz); + for (int i = 0; i < totalSz; i++) { + structFieldObjectInspectors.add(ObjectInspectorFactory + .getStandardPrimitiveObjectInspector(String.class)); } - joinOutputObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector( - ObjectInspectorUtils.getIntegerArray(totalSz), structFieldObjectInspectors); + joinOutputObjectInspector = ObjectInspectorFactory + .getStandardStructObjectInspector(ObjectInspectorUtils + .getIntegerArray(totalSz), structFieldObjectInspectors); dummyObj = new Object[numValues]; dummyObjVectors = new Vector[numValues]; @@ -149,6 +174,8 @@ } iterators = new Stack>>(); + + joinEmitInterval = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVEPARTITIONNAME); } public void startGroup() throws HiveException { @@ -159,7 +186,9 @@ } InspectableObject tempAliasInspectableObject = new InspectableObject(); - public void process(Object row, ObjectInspector rowInspector) throws HiveException { + + public void process(Object row, ObjectInspector rowInspector) + throws HiveException { try { // get alias aliasField.evaluate(row, rowInspector, tempAliasInspectableObject); @@ -176,15 +205,40 @@ nr.add(tempAliasInspectableObject.o); } + // Are we consuming too much memory + if (storage.get(alias).size() == joinEmitInterval) { + if (alias == numValues - 1) { + // The input is sorted by alias, so if we are already in the last join + // operand, + // we can emit some results now. + // Note this has to be done before adding the current row to the + // storage, + // to preserve the correctness for outer joins. + checkAndGenObject(); + storage.get(alias).clear(); + } else { + // Output a warning if we reached at least 1000 rows for a join + // operand + // We won't output a warning for the last join operand since the size + // will never goes to joinEmitInterval. + InspectableObject io = new InspectableObject(); + keyField.evaluate(row, rowInspector, io); + LOG.warn("table " + alias + + " has more than joinEmitInterval rows for join key " + io.o); + } + } + // Add the value to the vector storage.get(alias).add(nr); + } catch (Exception e) { e.printStackTrace(); throw new HiveException(e); } } - private void createForwardJoinObject(IntermediateObject intObj, boolean[] nullsArr) throws HiveException { + private void createForwardJoinObject(IntermediateObject intObj, + boolean[] nullsArr) throws HiveException { ArrayList nr = new ArrayList(totalSz); for (int i = 0; i < numValues; i++) { Byte alias = order[i]; @@ -204,15 +258,17 @@ } private void copyOldArray(boolean[] src, boolean[] dest) { - for (int i = 0; i < src.length; i++) dest[i] = src[i]; + for (int i = 0; i < src.length; i++) + dest[i] = src[i]; } - private Vector joinObjectsInnerJoin(Vector resNulls, Vector inputNulls, ArrayList newObj, IntermediateObject intObj, int left, boolean newObjNull) - { - if (newObjNull) return resNulls; + private Vector joinObjectsInnerJoin(Vector resNulls, + Vector inputNulls, ArrayList newObj, + IntermediateObject intObj, int left, boolean newObjNull) { + if (newObjNull) + return resNulls; Iterator nullsIter = inputNulls.iterator(); - while (nullsIter.hasNext()) - { + while (nullsIter.hasNext()) { boolean[] oldNulls = nullsIter.next(); boolean oldObjNull = oldNulls[left]; if (!oldObjNull) { @@ -224,12 +280,13 @@ } return resNulls; } - - private Vector joinObjectsLeftOuterJoin(Vector resNulls, Vector inputNulls, ArrayList newObj, IntermediateObject intObj, int left, boolean newObjNull) - { + + private Vector joinObjectsLeftOuterJoin( + Vector resNulls, Vector inputNulls, + ArrayList newObj, IntermediateObject intObj, int left, + boolean newObjNull) { Iterator nullsIter = inputNulls.iterator(); - while (nullsIter.hasNext()) - { + while (nullsIter.hasNext()) { boolean[] oldNulls = nullsIter.next(); boolean oldObjNull = oldNulls[left]; boolean[] newNulls = new boolean[intObj.getCurSize()]; @@ -243,25 +300,25 @@ return resNulls; } - private Vector joinObjectsRightOuterJoin(Vector resNulls, Vector inputNulls, ArrayList newObj, IntermediateObject intObj, int left, boolean newObjNull) - { - if (newObjNull) return resNulls; + private Vector joinObjectsRightOuterJoin( + Vector resNulls, Vector inputNulls, + ArrayList newObj, IntermediateObject intObj, int left, + boolean newObjNull) { + if (newObjNull) + return resNulls; boolean allOldObjsNull = true; Iterator nullsIter = inputNulls.iterator(); - while (nullsIter.hasNext()) - { + while (nullsIter.hasNext()) { boolean[] oldNulls = nullsIter.next(); - if (!oldNulls[left]) - { + if (!oldNulls[left]) { allOldObjsNull = false; break; } } nullsIter = inputNulls.iterator(); - while (nullsIter.hasNext()) - { + while (nullsIter.hasNext()) { boolean[] oldNulls = nullsIter.next(); boolean oldObjNull = oldNulls[left]; @@ -270,8 +327,7 @@ copyOldArray(oldNulls, newNulls); newNulls[oldNulls.length] = newObjNull; resNulls.add(newNulls); - } - else if (allOldObjsNull) { + } else if (allOldObjsNull) { boolean[] newNulls = new boolean[intObj.getCurSize()]; for (int i = 0; i < intObj.getCurSize() - 1; i++) newNulls[i] = true; @@ -282,12 +338,13 @@ return resNulls; } - private Vector joinObjectsFullOuterJoin(Vector resNulls, Vector inputNulls, ArrayList newObj, IntermediateObject intObj, int left, boolean newObjNull) - { + private Vector joinObjectsFullOuterJoin( + Vector resNulls, Vector inputNulls, + ArrayList newObj, IntermediateObject intObj, int left, + boolean newObjNull) { if (newObjNull) { Iterator nullsIter = inputNulls.iterator(); - while (nullsIter.hasNext()) - { + while (nullsIter.hasNext()) { boolean[] oldNulls = nullsIter.next(); boolean[] newNulls = new boolean[intObj.getCurSize()]; copyOldArray(oldNulls, newNulls); @@ -296,15 +353,13 @@ } return resNulls; } - + boolean allOldObjsNull = true; Iterator nullsIter = inputNulls.iterator(); - while (nullsIter.hasNext()) - { + while (nullsIter.hasNext()) { boolean[] oldNulls = nullsIter.next(); - if (!oldNulls[left]) - { + if (!oldNulls[left]) { allOldObjsNull = false; break; } @@ -312,24 +367,21 @@ boolean rhsPreserved = false; nullsIter = inputNulls.iterator(); - while (nullsIter.hasNext()) - { + while (nullsIter.hasNext()) { boolean[] oldNulls = nullsIter.next(); boolean oldObjNull = oldNulls[left]; - if (!oldObjNull) - { + if (!oldObjNull) { boolean[] newNulls = new boolean[intObj.getCurSize()]; copyOldArray(oldNulls, newNulls); newNulls[oldNulls.length] = newObjNull; resNulls.add(newNulls); - } - else if (oldObjNull) { + } else if (oldObjNull) { boolean[] newNulls = new boolean[intObj.getCurSize()]; copyOldArray(oldNulls, newNulls); newNulls[oldNulls.length] = true; resNulls.add(newNulls); - + if (allOldObjsNull && !rhsPreserved) { newNulls = new boolean[intObj.getCurSize()]; for (int i = 0; i < oldNulls.length; i++) @@ -344,35 +396,35 @@ } /* - * The new input is added to the list of existing inputs. Each entry in the - * array of inputNulls denotes the entries in the intermediate object to - * be used. The intermediate object is augmented with the new object, and - * list of nulls is changed appropriately. The list will contain all non-nulls - * for a inner join. The outer joins are processed appropriately. + * The new input is added to the list of existing inputs. Each entry in the + * array of inputNulls denotes the entries in the intermediate object to be + * used. The intermediate object is augmented with the new object, and list of + * nulls is changed appropriately. The list will contain all non-nulls for a + * inner join. The outer joins are processed appropriately. */ - private Vector joinObjects(Vector inputNulls, ArrayList newObj, IntermediateObject intObj, int joinPos) - { + private Vector joinObjects(Vector inputNulls, + ArrayList newObj, IntermediateObject intObj, int joinPos) { Vector resNulls = new Vector(); boolean newObjNull = newObj == dummyObj[joinPos] ? true : false; - if (joinPos == 0) - { - if (newObjNull) return null; + if (joinPos == 0) { + if (newObjNull) + return null; boolean[] nulls = new boolean[1]; nulls[0] = newObjNull; resNulls.add(nulls); return resNulls; } - + int left = condn[joinPos - 1].getLeft(); int type = condn[joinPos - 1].getType(); - + // process all nulls for RIGHT and FULL OUTER JOINS - if (((type == joinDesc.RIGHT_OUTER_JOIN) || (type == joinDesc.FULL_OUTER_JOIN)) - && !newObjNull && (inputNulls == null)) { + if (((type == joinDesc.RIGHT_OUTER_JOIN) || (type == joinDesc.FULL_OUTER_JOIN)) + && !newObjNull && (inputNulls == null)) { boolean[] newNulls = new boolean[intObj.getCurSize()]; for (int i = 0; i < newNulls.length - 1; i++) newNulls[i] = true; - newNulls[newNulls.length-1] = false; + newNulls[newNulls.length - 1] = false; resNulls.add(newNulls); return resNulls; } @@ -380,41 +432,45 @@ if (inputNulls == null) return null; - if (type == joinDesc.INNER_JOIN) - return joinObjectsInnerJoin(resNulls, inputNulls, newObj, intObj, left, newObjNull); - else if (type == joinDesc.LEFT_OUTER_JOIN) - return joinObjectsLeftOuterJoin(resNulls, inputNulls, newObj, intObj, left, newObjNull); - else if (type == joinDesc.RIGHT_OUTER_JOIN) - return joinObjectsRightOuterJoin(resNulls, inputNulls, newObj, intObj, left, newObjNull); + if (type == joinDesc.INNER_JOIN) + return joinObjectsInnerJoin(resNulls, inputNulls, newObj, intObj, left, + newObjNull); + else if (type == joinDesc.LEFT_OUTER_JOIN) + return joinObjectsLeftOuterJoin(resNulls, inputNulls, newObj, intObj, + left, newObjNull); + else if (type == joinDesc.RIGHT_OUTER_JOIN) + return joinObjectsRightOuterJoin(resNulls, inputNulls, newObj, intObj, + left, newObjNull); assert (type == joinDesc.FULL_OUTER_JOIN); - return joinObjectsFullOuterJoin(resNulls, inputNulls, newObj, intObj, left, newObjNull); + return joinObjectsFullOuterJoin(resNulls, inputNulls, newObj, intObj, left, + newObjNull); } - - /* - * genObject is a recursive function. For the inputs, a array of - * bitvectors is maintained (inputNulls) where each entry denotes whether - * the element is to be used or not (whether it is null or not). The size of - * the bitvector is same as the number of inputs under consideration - * currently. When all inputs are accounted for, the output is forwared - * appropriately. + + /* + * genObject is a recursive function. For the inputs, a array of bitvectors is + * maintained (inputNulls) where each entry denotes whether the element is to + * be used or not (whether it is null or not). The size of the bitvector is + * same as the number of inputs under consideration currently. When all inputs + * are accounted for, the output is forwared appropriately. */ - private void genObject(Vector inputNulls, int aliasNum, IntermediateObject intObj) - throws HiveException { + private void genObject(Vector inputNulls, int aliasNum, + IntermediateObject intObj) throws HiveException { if (aliasNum < numValues) { Iterator> aliasRes = storage.get(order[aliasNum]) - .iterator(); + .iterator(); iterators.push(aliasRes); while (aliasRes.hasNext()) { ArrayList newObj = aliasRes.next(); intObj.pushObj(newObj); - Vector newNulls = joinObjects(inputNulls, newObj, intObj, aliasNum); + Vector newNulls = joinObjects(inputNulls, newObj, intObj, + aliasNum); genObject(newNulls, aliasNum + 1, intObj); intObj.popObj(); } iterators.pop(); - } - else { - if (inputNulls == null) return; + } else { + if (inputNulls == null) + return; Iterator nullsIter = inputNulls.iterator(); while (nullsIter.hasNext()) { boolean[] nullsVec = nullsIter.next(); @@ -429,29 +485,27 @@ * @throws HiveException */ public void endGroup() throws HiveException { - try { - LOG.trace("Join Op: endGroup called: numValues=" + numValues); + LOG.trace("Join Op: endGroup called: numValues=" + numValues); + checkAndGenObject(); + } - // does any result need to be emitted - for (int i = 0; i < numValues; i++) { - Byte alias = order[i]; - if (storage.get(alias).iterator().hasNext() == false) { - if (noOuterJoin) { - LOG.trace("No data for alias=" + i); - return; - } else { - storage.put(alias, dummyObjVectors[i]); - } + private void checkAndGenObject() throws HiveException { + // does any result need to be emitted + for (int i = 0; i < numValues; i++) { + Byte alias = order[i]; + if (storage.get(alias).iterator().hasNext() == false) { + if (noOuterJoin) { + LOG.trace("No data for alias=" + i); + return; + } else { + storage.put(alias, dummyObjVectors[i]); } } + } - LOG.trace("calling genObject"); - genObject(null, 0, new IntermediateObject(new ArrayList[numValues], 0)); - LOG.trace("called genObject"); - } catch (Exception e) { - e.printStackTrace(); - throw new HiveException(e); - } + LOG.trace("calling genObject"); + genObject(null, 0, new IntermediateObject(new ArrayList[numValues], 0)); + LOG.trace("called genObject"); } /** @@ -462,6 +516,5 @@ LOG.trace("Join Op close"); super.close(abort); } + } - - Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (working copy) @@ -20,7 +20,12 @@ import java.io.Serializable; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import java.util.Vector; import java.util.Properties; @@ -29,8 +34,11 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.plan.fetchWork; +import org.apache.hadoop.hive.ql.plan.partitionDesc; +import org.apache.hadoop.hive.ql.plan.tableDesc; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.FileInputFormat; @@ -45,6 +53,8 @@ import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; /** * FetchTask implementation @@ -56,42 +66,27 @@ public void initialize (HiveConf conf) { super.initialize(conf); - splitNum = 0; currRecReader = null; try { // Create a file system handle fs = FileSystem.get(conf); - serde = work.getDeserializerClass().newInstance(); - serde.initialize(null, work.getSchema()); job = new JobConf(conf, ExecDriver.class); - Path inputP = work.getSrcDir(); - if(!fs.exists(inputP)) { - empty = true; - return; - } - - empty = true; - FileStatus[] fStats = fs.listStatus(inputP); - for (FileStatus fStat:fStats) { - if (fStat.getLen() > 0) { - empty = false; - break; - } - } - - if (empty) - return; - - FileInputFormat.setInputPaths(job, inputP); - inputFormat = getInputFormatFromCache(work.getInputFormatClass(), job); - inputSplits = inputFormat.getSplits(job, 1); + mSerde = new MetadataTypedColumnsetSerDe(); Properties mSerdeProp = new Properties(); mSerdeProp.put(Constants.SERIALIZATION_FORMAT, "" + Utilities.tabCode); mSerdeProp.put(Constants.SERIALIZATION_NULL_FORMAT, "NULL"); mSerde.initialize(null, mSerdeProp); + + currPath = null; + currTbl = null; + currPart = null; + iterPath = null; + iterPartDesc = null; totalRows = 0; + tblDataDone = false; + rowWithPart = new Object[2]; } catch (Exception e) { // Bail out ungracefully - we should never hit // this here - but would have hit it in SemanticAnalyzer @@ -136,11 +131,116 @@ private Deserializer serde; private MetadataTypedColumnsetSerDe mSerde; private int totalRows; - private boolean empty; + private Iterator iterPath; + private Iterator iterPartDesc; + private Path currPath; + private partitionDesc currPart; + private tableDesc currTbl; + private boolean tblDataDone; + private StructObjectInspector rowObjectInspector; + private Object[] rowWithPart; + + private void setPrtnDesc() throws Exception { + List partNames = new ArrayList(); + List partValues = new ArrayList(); + + String pcols = currPart.getTableDesc().getProperties().getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS); + LinkedHashMap partSpec = currPart.getPartSpec(); + + List partObjectInspectors = new ArrayList(); + String[] partKeys = pcols.trim().split("/"); + for(String key: partKeys) { + partNames.add(key); + partValues.add(partSpec.get(key)); + partObjectInspectors.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(String.class)); + } + StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors); + rowObjectInspector = (StructObjectInspector)serde.getObjectInspector(); + + rowWithPart[1] = partValues; + rowObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(Arrays.asList(new StructObjectInspector[]{ + rowObjectInspector, partObjectInspector})); + } + + private void getNextPath() throws Exception { + // first time + if (iterPath == null) { + if (work.getTblDir() != null) { + if (!tblDataDone) { + currPath = work.getTblDir(); + currTbl = work.getTblDesc(); + if (fs.exists(currPath)) + { + FileStatus[] fStats = fs.listStatus(currPath); + for (FileStatus fStat:fStats) { + if (fStat.getLen() > 0) { + tblDataDone = true; + break; + } + } + } + + if (!tblDataDone) currPath = null; + return; + } else { + currTbl = null; + currPath = null; + } + return; + } + else { + iterPath = work.getPartDir().iterator(); + iterPartDesc = work.getPartDesc().iterator(); + } + } + + while (iterPath.hasNext()) { + Path nxt = iterPath.next(); + partitionDesc prt = iterPartDesc.next(); + if (fs.exists(nxt)) + { + FileStatus[] fStats = fs.listStatus(nxt); + for (FileStatus fStat:fStats) { + if (fStat.getLen() > 0) { + currPath = nxt; + currPart = prt; + return; + } + } + } + } + } private RecordReader getRecordReader() throws Exception { - if (splitNum >= inputSplits.length) - return null; + if (currPath == null) { + getNextPath(); + if (currPath == null) + return null; + + FileInputFormat.setInputPaths(job, currPath); + tableDesc tmp = currTbl; + if (tmp == null) + tmp = currPart.getTableDesc(); + inputFormat = getInputFormatFromCache(tmp.getInputFileFormatClass(), job); + inputSplits = inputFormat.getSplits(job, 1); + splitNum = 0; + serde = tmp.getDeserializerClass().newInstance(); + serde.initialize(null, tmp.getProperties()); + LOG.debug("Creating fetchTask with deserializer typeinfo: " + serde.getObjectInspector().getTypeName()); + LOG.debug("deserializer properties: " + tmp.getProperties()); + if (!tblDataDone) + setPrtnDesc(); + } + + if (splitNum >= inputSplits.length) { + if (currRecReader != null) { + currRecReader.close(); + currRecReader = null; + } + currPath = null; + return getRecordReader(); + } + currRecReader = inputFormat.getRecordReader(inputSplits[splitNum++], job, Reporter.NULL); key = currRecReader.createKey(); value = currRecReader.createValue(); @@ -149,16 +249,15 @@ public boolean fetch(Vector res) { try { - if (empty) - return false; - int numRows = 0; int rowsRet = MAX_ROWS; if ((work.getLimit() >= 0) && ((work.getLimit() - totalRows) < rowsRet)) rowsRet = work.getLimit() - totalRows; if (rowsRet <= 0) { - if (currRecReader != null) + if (currRecReader != null) { currRecReader.close(); + currRecReader = null; + } return false; } @@ -174,12 +273,18 @@ } boolean ret = currRecReader.next(key, value); if (ret) { - Object obj = serde.deserialize(value); - res.add(((Text)mSerde.serialize(obj, serde.getObjectInspector())).toString()); + if (tblDataDone) { + Object obj = serde.deserialize(value); + res.add(((Text)mSerde.serialize(obj, serde.getObjectInspector())).toString()); + } else { + rowWithPart[0] = serde.deserialize(value); + res.add(((Text)mSerde.serialize(rowWithPart, rowObjectInspector)).toString()); + } numRows++; } else { currRecReader.close(); + currRecReader = null; currRecReader = getRecordReader(); if (currRecReader == null) { if (numRows == 0) Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (working copy) @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.mapred.LineRecordReader.LineReader; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.fs.FileUtil; public class ScriptOperator extends Operator implements Serializable { @@ -89,6 +90,77 @@ } } + + /** + * Maps a relative pathname to an absolute pathname using the + * PATH enviroment. + */ + public class PathFinder + { + String pathenv; // a string of pathnames + String pathSep; // the path seperator + String fileSep; // the file seperator in a directory + + /** + * Construct a PathFinder object using the path from + * the specified system environment variable. + */ + public PathFinder(String envpath) + { + pathenv = System.getenv(envpath); + pathSep = System.getProperty("path.separator"); + fileSep = System.getProperty("file.separator"); + } + + /** + * Appends the specified component to the path list + */ + public void prependPathComponent(String str) + { + pathenv = str + pathSep + pathenv; + } + + /** + * Returns the full path name of this file if it is listed in the + * path + */ + public File getAbsolutePath(String filename) + { + if (pathenv == null || pathSep == null || fileSep == null) { + return null; + } + int val = -1; + String classvalue = pathenv + pathSep; + + while (((val = classvalue.indexOf(pathSep)) >= 0) && + classvalue.length() > 0) { + // + // Extract each entry from the pathenv + // + String entry = classvalue.substring(0, val).trim(); + File f = new File(entry); + + try { + if (f.isDirectory()) { + // + // this entry in the pathenv is a directory. + // see if the required file is in this directory + // + f = new File(entry + fileSep + filename); + } + // + // see if the filename matches and we can read it + // + if (f.isFile() && f.canRead()) { + return f; + } + } catch (Exception exp){ } + classvalue = classvalue.substring(val+1).trim(); + } + return null; + } + } + public void initialize(Configuration hconf) throws HiveException { super.initialize(hconf); statsMap.put(Counter.DESERIALIZE_ERRORS, deserialize_error_count); @@ -104,6 +176,20 @@ scriptInputSerializer.initialize(hconf, conf.getScriptInputInfo().getProperties()); String [] cmdArgs = splitArgs(conf.getScriptCmd()); + + String prog = cmdArgs[0]; + File currentDir = new File(".").getAbsoluteFile(); + + if (!new File(prog).isAbsolute()) { + PathFinder finder = new PathFinder("PATH"); + finder.prependPathComponent(currentDir.toString()); + File f = finder.getAbsolutePath(prog); + if (f != null) { + cmdArgs[0] = f.getAbsolutePath(); + } + f = null; + } + String [] wrappedCmdArgs = addWrapper(cmdArgs); LOG.info("Executing " + Arrays.asList(wrappedCmdArgs)); LOG.info("tablename=" + hconf.get(HiveConf.ConfVars.HIVETABLENAME.varname)); Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java (working copy) @@ -109,7 +109,14 @@ children.add(ret); op.setChildOperators(children); } + + // add parents for the newly created operator + List> parent = new ArrayList>(); + for(Operator op: oplist) + parent.add(op); + ret.setParentOperators(parent); + return (ret); } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (working copy) @@ -22,6 +22,7 @@ import java.io.*; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.OpParseContext; import org.apache.hadoop.hive.ql.plan.mapredWork; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -31,6 +32,8 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.plan.explain; +import org.apache.hadoop.hive.ql.parse.RowResolver; +import org.apache.hadoop.hive.ql.parse.SemanticException; /** * Base operator implementation @@ -42,6 +45,7 @@ private static final long serialVersionUID = 1L; protected List> childOperators; + protected List> parentOperators; public Operator() {} @@ -53,6 +57,14 @@ return childOperators; } + public void setParentOperators(List> parentOperators) { + this.parentOperators = parentOperators; + } + + public List> getParentOperators() { + return parentOperators; + } + protected String id; protected T conf; protected boolean done; @@ -277,4 +289,22 @@ } } + public List mergeColListsFromChildren(List colList, + HashMap, OpParseContext> opParseCtx) { + return colList; + } + + public List genColLists(HashMap, OpParseContext> opParseCtx) + throws SemanticException { + List colList = new ArrayList(); + if (childOperators != null) + for(Operator o: childOperators) + colList = Utilities.mergeUniqElems(colList, o.genColLists(opParseCtx)); + + List cols = mergeColListsFromChildren(colList, opParseCtx); + OpParseContext ctx = opParseCtx.get(this); + ctx.setColNames(cols); + return cols; + } + } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (working copy) @@ -34,11 +34,13 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.FileInputFormat; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.plan.mapredWork; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.io.*; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; +import org.apache.hadoop.hive.ql.session.SessionState; public class ExecDriver extends Task implements Serializable { @@ -54,12 +56,37 @@ super(); } + public static String getRealFiles(Configuration conf) { + // fill in local files to be added to the task environment + SessionState ss = SessionState.get(); + Set files = (ss == null) ? null : ss.list_resource(SessionState.ResourceType.FILE, null); + if(files != null) { + ArrayList realFiles = new ArrayList (files.size()); + for(String one: files) { + try { + realFiles.add(Utilities.realFile(one, conf)); + } catch (IOException e) { + throw new RuntimeException ("Cannot validate file " + one + + "due to exception: " + e.getMessage(), e); + } + } + return StringUtils.join(realFiles, ","); + } else { + return ""; + } + } + + /** * Initialization when invoked from QL */ public void initialize (HiveConf conf) { super.initialize(conf); job = new JobConf(conf, ExecDriver.class); + String realFiles = getRealFiles(job); + if (realFiles != null && realFiles.length() > 0) { + job.set("tmpfiles", realFiles); + } } /** @@ -121,8 +148,7 @@ } } } - } - ); + }); } } @@ -207,6 +233,7 @@ Utilities.setMapRedWork(job, work); + for(String onefile: work.getPathToAliases().keySet()) { LOG.info("Adding input file " + onefile); FileInputFormat.addInputPaths(job, onefile); @@ -217,8 +244,8 @@ FileOutputFormat.setOutputPath(job, new Path(jobScratchDir)); job.setMapperClass(ExecMapper.class); - job.setMapOutputValueClass(Text.class); job.setMapOutputKeyClass(HiveKey.class); + job.setMapOutputValueClass(BytesWritable.class); job.setNumReduceTasks(work.getNumReduceTasks().intValue()); job.setReducerClass(ExecReducer.class); @@ -265,6 +292,10 @@ inferNumReducers(); JobClient jc = new JobClient(job); + + // make this client wait if job trcker is not behaving well. + Throttle.checkJobTracker(job, LOG); + rj = jc.submitJob(job); // add to list of running jobs so in case of abnormal shutdown can kill it. @@ -306,7 +337,8 @@ } private static void printUsage() { - System.out.println("ExecDriver -plan [-jobconf k1=v1 [-jobconf k2=v2] ...]"); + System.out.println("ExecDriver -plan [-jobconf k1=v1 [-jobconf k2=v2] ...] "+ + "[-files [,] ...]"); System.exit(1); } @@ -314,15 +346,19 @@ String planFileName = null; ArrayList jobConfArgs = new ArrayList (); boolean isSilent = false; + String files = null; try{ for(int i=0; i mergeColListsFromChildren(List colList, + HashMap, OpParseContext> opParseCtx) { + exprNodeDesc condn = conf.getPredicate(); + + // get list of columns used in the filter + List cl = condn.getCols(); + + return Utilities.mergeUniqElems(colList, cl); + } + } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java (revision 0) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java (revision 0) @@ -0,0 +1,123 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.io.*; +import java.util.*; +import java.util.regex.Pattern; +import java.net.URL; +import java.net.URLEncoder; +import java.net.URLDecoder; +import java.net.MalformedURLException; +import java.net.InetSocketAddress; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.JobTracker; + +/* + * Intelligence to make clients wait if the cluster is in a bad state. + */ +public class Throttle { + + // The percentage of maximum allocated memory that triggers GC + // on job tracker. This could be overridden thru the jobconf. + // The default is such that there is no throttling. + static private int DEFAULT_MEMORY_GC_PERCENT = 100; + + // sleep this many seconds between each retry. + // This could be overridden thru the jobconf. + static private int DEFAULT_RETRY_PERIOD = 60; + + /** + * fetch http://tracker.om:/gc.jsp?threshold=period + */ + static void checkJobTracker(JobConf conf, Log LOG) { + + try { + byte buffer[] = new byte[1024]; + int threshold = conf.getInt("mapred.throttle.threshold.percent", + DEFAULT_MEMORY_GC_PERCENT); + int retry = conf.getInt("mapred.throttle.retry.period", + DEFAULT_RETRY_PERIOD); + + // If the threshold is 100 percent, then there is no throttling + if (threshold == 100) { + return; + } + + // find the http port for the jobtracker + String infoAddr = conf.get("mapred.job.tracker.http.address"); + if (infoAddr == null) { + throw new IOException("Throttle: Unable to find job tracker info port."); + } + InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr); + int infoPort = infoSocAddr.getPort(); + + // This is the Job Tracker URL + String tracker = "http://" + + JobTracker.getAddress(conf).getHostName() + ":" + + infoPort + + "/gc.jsp?threshold=" + threshold; + + while (true) { + // read in the first 1K characters from the URL + URL url = new URL(tracker); + LOG.debug("Throttle: URL " + tracker); + InputStream in = url.openStream(); + int numRead = in.read(buffer); + in.close(); + String fetchString = new String(buffer); + + // fetch the xml tag xxx + Pattern dowait = Pattern.compile("", + Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE); + String[] results = dowait.split(fetchString); + if (results.length != 2) { + throw new IOException("Throttle: Unable to parse response of URL " + url + + ". Get retuned " + fetchString); + } + dowait = Pattern.compile("", + Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE); + results = dowait.split(results[1]); + if (results.length < 1) { + throw new IOException("Throttle: Unable to parse response of URL " + url + + ". Get retuned " + fetchString); + } + + // if the jobtracker signalled that the threshold is not exceeded, + // then we return immediately. + if (results[0].trim().compareToIgnoreCase("false") == 0) { + return; + } + + // The JobTracker has exceeded its threshold and is doing a GC. + // The client has to wait and retry. + LOG.warn("Job is being throttled because of resource crunch on the " + + "JobTracker. Will retry in " + retry + " seconds.."); + Thread.sleep(retry * 1000L); + } + } catch (Exception e) { + LOG.warn("Job is not being throttled. " + e); + } + } +} Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java (working copy) @@ -20,10 +20,11 @@ import java.io.*; import java.util.ArrayList; +import java.util.List; +import java.util.HashMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.plan.exprNodeDesc; import org.apache.hadoop.hive.ql.plan.reduceSinkDesc; import org.apache.hadoop.hive.ql.plan.tableDesc; @@ -34,7 +35,12 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.hive.ql.parse.OpParseContext; +import org.apache.hadoop.hive.ql.parse.RowResolver; +import org.apache.hadoop.hive.ql.parse.SemanticException; /** * Reduce Sink Operator sends output to the reduce stage @@ -42,15 +48,31 @@ public class ReduceSinkOperator extends TerminalOperator implements Serializable { private static final long serialVersionUID = 1L; + + /** + * The evaluators for the key columns. + * Key columns decide the sort order on the reducer side. + * Key columns are passed to the reducer in the "key". + */ transient protected ExprNodeEvaluator[] keyEval; + /** + * The evaluators for the value columns. + * Value columns are passed to reducer in the "value". + */ transient protected ExprNodeEvaluator[] valueEval; + /** + * The evaluators for the partition columns (CLUSTER BY or DISTRIBUTE BY in Hive language). + * Partition columns decide the reducer that the current row goes to. + * Partition columns are not passed to reducer. + */ + transient protected ExprNodeEvaluator[] partitionEval; // TODO: we use MetadataTypedColumnsetSerDe for now, till DynamicSerDe is ready transient Serializer keySerializer; + transient boolean keyIsText; transient Serializer valueSerializer; transient int tag; transient byte[] tagByte = new byte[1]; - transient int numPartitionFields; public void initialize(Configuration hconf) throws HiveException { super.initialize(hconf); @@ -67,6 +89,12 @@ valueEval[i++] = ExprNodeEvaluatorFactory.get(e); } + partitionEval = new ExprNodeEvaluator[conf.getPartitionCols().size()]; + i=0; + for(exprNodeDesc e: conf.getPartitionCols()) { + partitionEval[i++] = ExprNodeEvaluatorFactory.get(e); + } + tag = conf.getTag(); tagByte[0] = (byte)tag; LOG.info("Using tag = " + tag); @@ -74,13 +102,11 @@ tableDesc keyTableDesc = conf.getKeySerializeInfo(); keySerializer = (Serializer)keyTableDesc.getDeserializerClass().newInstance(); keySerializer.initialize(null, keyTableDesc.getProperties()); + keyIsText = keySerializer.getSerializedClass().equals(Text.class); tableDesc valueTableDesc = conf.getValueSerializeInfo(); valueSerializer = (Serializer)valueTableDesc.getDeserializerClass().newInstance(); valueSerializer.initialize(null, valueTableDesc.getProperties()); - - // Set the number of key fields to be used in the partitioner. - numPartitionFields = conf.getNumPartitionFields(); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); @@ -89,7 +115,7 @@ transient InspectableObject tempInspectableObject = new InspectableObject(); transient HiveKey keyWritable = new HiveKey(); - transient Text valueText; + transient Writable value; transient ObjectInspector keyObjectInspector; transient ObjectInspector valueObjectInspector; @@ -97,64 +123,138 @@ transient ArrayList valueFieldsObjectInspectors = new ArrayList(); public void process(Object row, ObjectInspector rowInspector) throws HiveException { - // TODO: use DynamicSerDe when that is ready try { - // Generate hashCode for the tuple - int keyHashCode = 0; - if (numPartitionFields == -1) { - keyHashCode = (int)(Math.random() * Integer.MAX_VALUE); - } + // Evaluate the keys ArrayList keys = new ArrayList(keyEval.length); for(ExprNodeEvaluator e: keyEval) { e.evaluate(row, rowInspector, tempInspectableObject); keys.add(tempInspectableObject.o); - if (numPartitionFields == keys.size()) { - keyHashCode = keys.hashCode(); - } + // Construct the keyObjectInspector from the first row if (keyObjectInspector == null) { keyFieldsObjectInspectors.add(tempInspectableObject.oi); } } - if (numPartitionFields > keys.size()) { - keyHashCode = keys.hashCode(); - } + // Construct the keyObjectInspector from the first row if (keyObjectInspector == null) { keyObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector( ObjectInspectorUtils.getIntegerArray(keyFieldsObjectInspectors.size()), keyFieldsObjectInspectors); } - Text key = (Text)keySerializer.serialize(keys, keyObjectInspector); - if (tag == -1) { - keyWritable.set(key.getBytes(), 0, key.getLength()); + // Serialize the keys and append the tag + if (keyIsText) { + Text key = (Text)keySerializer.serialize(keys, keyObjectInspector); + if (tag == -1) { + keyWritable.set(key.getBytes(), 0, key.getLength()); + } else { + int keyLength = key.getLength(); + keyWritable.setSize(keyLength+1); + System.arraycopy(key.getBytes(), 0, keyWritable.get(), 0, keyLength); + keyWritable.get()[keyLength] = tagByte[0]; + } } else { - int keyLength = key.getLength(); - keyWritable.setSize(keyLength+1); - System.arraycopy(key.getBytes(), 0, keyWritable.get(), 0, keyLength); - keyWritable.get()[keyLength] = tagByte[0]; + // Must be BytesWritable + BytesWritable key = (BytesWritable)keySerializer.serialize(keys, keyObjectInspector); + if (tag == -1) { + keyWritable.set(key.get(), 0, key.getSize()); + } else { + int keyLength = key.getSize(); + keyWritable.setSize(keyLength+1); + System.arraycopy(key.get(), 0, keyWritable.get(), 0, keyLength); + keyWritable.get()[keyLength] = tagByte[0]; + } } + // Set the HashCode + int keyHashCode = 0; + for(ExprNodeEvaluator e: partitionEval) { + e.evaluate(row, rowInspector, tempInspectableObject); + keyHashCode = keyHashCode * 31 + + (tempInspectableObject.o == null ? 0 : tempInspectableObject.o.hashCode()); + } keyWritable.setHashCode(keyHashCode); + // Evaluate the value ArrayList values = new ArrayList(valueEval.length); for(ExprNodeEvaluator e: valueEval) { e.evaluate(row, rowInspector, tempInspectableObject); values.add(tempInspectableObject.o); + // Construct the valueObjectInspector from the first row if (valueObjectInspector == null) { valueFieldsObjectInspectors.add(tempInspectableObject.oi); } } + // Construct the valueObjectInspector from the first row if (valueObjectInspector == null) { valueObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector( ObjectInspectorUtils.getIntegerArray(valueFieldsObjectInspectors.size()), valueFieldsObjectInspectors); } - valueText = (Text)valueSerializer.serialize(values, valueObjectInspector); + // Serialize the value + value = valueSerializer.serialize(values, valueObjectInspector); } catch (SerDeException e) { throw new HiveException(e); } + try { - out.collect(keyWritable, valueText); + out.collect(keyWritable, value); } catch (IOException e) { throw new HiveException (e); } } + + public List genColLists(HashMap, OpParseContext> opParseCtx) + throws SemanticException { + RowResolver redSinkRR = opParseCtx.get(this).getRR(); + List childColLists = new ArrayList(); + + for(Operator o: childOperators) + childColLists = Utilities.mergeUniqElems(childColLists, o.genColLists(opParseCtx)); + + List colLists = new ArrayList(); + ArrayList keys = conf.getKeyCols(); + for (exprNodeDesc key : keys) + colLists = Utilities.mergeUniqElems(colLists, key.getCols()); + + // In case of extract child, see the columns used and propagate them + if ((childOperators.size() == 1) && (childOperators.get(0) instanceof ExtractOperator)) { + assert parentOperators.size() == 1; + Operator par = parentOperators.get(0); + RowResolver parRR = opParseCtx.get(par).getRR(); + + for (String childCol : childColLists) { + String [] nm = redSinkRR.reverseLookup(childCol); + ColumnInfo cInfo = parRR.get(nm[0],nm[1]); + if (!colLists.contains(cInfo.getInternalName())) + colLists.add(cInfo.getInternalName()); + } + } + else if ((childOperators.size() == 1) && (childOperators.get(0) instanceof JoinOperator)) { + assert parentOperators.size() == 1; + Operator par = parentOperators.get(0); + RowResolver parRR = opParseCtx.get(par).getRR(); + RowResolver childRR = opParseCtx.get(childOperators.get(0)).getRR(); + + for (String childCol : childColLists) { + String [] nm = childRR.reverseLookup(childCol); + ColumnInfo cInfo = redSinkRR.get(nm[0],nm[1]); + if (cInfo != null) { + cInfo = parRR.get(nm[0], nm[1]); + if (!colLists.contains(cInfo.getInternalName())) + colLists.add(cInfo.getInternalName()); + } + } + } + else { + + // Reduce Sink contains the columns needed - no need to aggregate from children + ArrayList vals = conf.getValueCols(); + for (exprNodeDesc val : vals) + colLists = Utilities.mergeUniqElems(colLists, val.getCols()); + } + + OpParseContext ctx = opParseCtx.get(this); + ctx.setColNames(colLists); + opParseCtx.put(this, ctx); + return colLists; + } + } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java (working copy) @@ -73,4 +73,11 @@ public void setInternalName(String internalName) { this.internalName = internalName; } + + /** + * Returns the string representation of the ColumnInfo. + */ + public String toString() { + return internalName + ": " + type; + } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (working copy) @@ -140,8 +140,7 @@ // Set up distributed cache DistributedCache.createSymlink(job); String uriWithLink = planPath.toUri().toString() + "#HIVE_PLAN"; - URI[] fileURIs = new URI[] {new URI(uriWithLink)}; - DistributedCache.setCacheFiles(fileURIs, job); + DistributedCache.addCacheFile(new URI(uriWithLink), job); // Cache the object in this process too so lookups don't hit the file system synchronized (Utilities.class) { gWork = w; @@ -198,15 +197,13 @@ public static tableDesc defaultTd; static { // by default we expect ^A separated strings + // This tableDesc does not provide column names. We should always use + // PlanUtils.getDefaultTableDesc(String separatorCode, String columns) + // or getBinarySortableTableDesc(List fieldSchemas) when + // we know the column names. defaultTd = PlanUtils.getDefaultTableDesc("" + Utilities.ctrlaCode); } - public static tableDesc defaultTabTd; - static { - // Default tab-separated tableDesc - defaultTabTd = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode); - } - public final static int newLineCode = 10; public final static int tabCode = 9; public final static int ctrlaCode = 1; @@ -431,4 +428,43 @@ keyClass, valClass, compressionType, codec)); } + + /** + * Shamelessly cloned from GenericOptionsParser + */ + public static String realFile(String newFile, Configuration conf) throws IOException { + Path path = new Path(newFile); + URI pathURI = path.toUri(); + FileSystem fs; + + if (pathURI.getScheme() == null) { + fs = FileSystem.getLocal(conf); + } else { + fs = path.getFileSystem(conf); + } + + if (!fs.exists(path)) { + return null; + } + + try { + fs.close(); + } catch(IOException e){}; + + return (path.makeQualified(fs).toString()); + } + + public static List mergeUniqElems(List src, List dest) { + if (dest == null) return src; + if (src == null) return dest; + int pos = 0; + + while (pos < dest.size()) { + if (!src.contains(dest.get(pos))) + src.add(dest.get(pos)); + pos++; + } + + return src; + } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (working copy) @@ -22,6 +22,7 @@ import org.apache.commons.logging.LogFactory; import java.lang.reflect.Method; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -29,6 +30,8 @@ import java.lang.Void; import org.apache.hadoop.hive.ql.exec.FunctionInfo.OperatorType; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.groupByDesc; import org.apache.hadoop.hive.ql.udf.*; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -247,7 +250,7 @@ /** * This method is shared between UDFRegistry and UDAFRegistry. - * methodName will be "evaluate" for UDFRegistry, and "aggregate" for UDAFRegistry. + * methodName will be "evaluate" for UDFRegistry, and "aggregate"/"evaluate"/"evaluatePartial" for UDAFRegistry. */ public static Method getMethodInternal(Class udfClass, String methodName, boolean exact, List> argumentClasses) { int leastImplicitConversions = Integer.MAX_VALUE; @@ -319,6 +322,9 @@ return result; } + /** + * Returns the "aggregate" method of the UDAF. + */ public static Method getUDAFMethod(String name, List> argumentClasses) { Class udaf = getUDAF(name); if (udaf == null) @@ -327,7 +333,62 @@ argumentClasses); } + /** + * Returns the evaluate method for the UDAF based on the aggregation mode. + * See groupByDesc.Mode for details. + * + * @param name name of the UDAF + * @param mode the mode of the aggregation + * @return null if no such UDAF is found + */ + public static Method getUDAFEvaluateMethod(String name, groupByDesc.Mode mode) { + Class udaf = getUDAF(name); + if (udaf == null) + return null; + return FunctionRegistry.getMethodInternal(udaf, + (mode == groupByDesc.Mode.COMPLETE || mode == groupByDesc.Mode.FINAL) + ? "evaluate" : "evaluatePartial", true, + new ArrayList>() ); + } + + /** + * Returns the "aggregate" method of the UDAF. + */ public static Method getUDAFMethod(String name, Class... argumentClasses) { return getUDAFMethod(name, Arrays.asList(argumentClasses)); } + + public static Object invoke(Method m, Object thisObject, Object[] arguments) throws HiveException { + Object o; + try { + o = m.invoke(thisObject, arguments); + } catch (Exception e) { + String thisObjectString = "" + thisObject + " of class " + + (thisObject == null? "null" : thisObject.getClass().getName()); + + StringBuilder argumentString = new StringBuilder(); + if (arguments == null) { + argumentString.append("null"); + } else { + argumentString.append("{"); + for (int i=0; i0) { + argumentString.append(", "); + } + if (arguments[i] == null) { + argumentString.append("null"); + } else { + argumentString.append("" + arguments[i] + ":" + arguments[i].getClass().getName()); + } + } + argumentString.append("} of size " + arguments.length); + } + + throw new HiveException("Unable to execute method " + m + " " + + " on object " + thisObjectString + + " with arguments " + argumentString.toString() + + ":" + e.getMessage()); + } + return o; + } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java (working copy) @@ -72,19 +72,8 @@ paramEvaluators[i].evaluate(row, rowInspector, paramInspectableObjects[i]); paramValues[i] = paramInspectableObjects[i].o; } - try { - result.o = udfMethod.invoke(udf, paramValues); - result.oi = outputObjectInspector; - } catch (Exception e) { - if (e instanceof HiveException) { - throw (HiveException)e; - } else if (e instanceof RuntimeException) { - throw (RuntimeException)e; - } else { - throw new HiveException("Unable to execute UDF function " + udf.getClass() + " " - + udfMethod + " on inputs " + "(" + paramValues.length + ") " + Arrays.asList(paramValues) + ": " + e.getMessage(), e); - } - } + result.o = FunctionRegistry.invoke(udfMethod, udf, paramValues); + result.oi = outputObjectInspector; } public ObjectInspector evaluateInspector(ObjectInspector rowInspector) Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java (working copy) @@ -44,4 +44,6 @@ eval.evaluate(row, rowInspector, result); forward(result.o, result.oi); } + + } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -74,6 +74,7 @@ transient HiveConf conf; static final private int separator = Utilities.tabCode; + static final private int singleQuote = '\''; static final private int terminator = Utilities.newLineCode; public void initialize(HiveConf conf) { @@ -95,7 +96,6 @@ // create the table Table tbl = new Table(crtTbl.getTableName()); - tbl.setFields(crtTbl.getCols()); StorageDescriptor tblStorDesc = tbl.getTTable().getSd(); if (crtTbl.getBucketCols() != null) tblStorDesc.setBucketCols(crtTbl.getBucketCols()); @@ -169,7 +169,7 @@ List bucketCols = tbl.getBucketCols(); List sortCols = tbl.getSortCols(); - if (sortCols.size() >= bucketCols.size()) + if ( (sortCols.size() > 0) && (sortCols.size() >= bucketCols.size())) { boolean found = true; @@ -201,6 +201,10 @@ // set create time tbl.getTTable().setCreateTime((int) (System.currentTimeMillis()/1000)); + if(crtTbl.getCols() != null) { + tbl.setFields(crtTbl.getCols()); + } + // create the table db.createTable(tbl); return 0; @@ -280,6 +284,20 @@ } tbl.getTTable().getSd().setCols(alterTbl.getNewCols()); } + else if (alterTbl.getOp() == alterTableDesc.alterTableTypes.ADDPROPS) { + tbl.getTTable().getParameters().putAll(alterTbl.getProps()); + } + else if (alterTbl.getOp() == alterTableDesc.alterTableTypes.ADDSERDEPROPS) { + tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll(alterTbl.getProps()); + } + else if (alterTbl.getOp() == alterTableDesc.alterTableTypes.ADDSERDE) { + tbl.setSerializationLib(alterTbl.getSerdeName()); + if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0)) + tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll(alterTbl.getProps()); + // since serde is modified then do the appropriate things to reset columns etc + tbl.reinitSerDe(); + tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getName(), tbl.getDeserializer())); + } else { console.printError("Unsupported Alter commnad"); return 1; @@ -357,7 +375,9 @@ if (col.getComment() != null) { os.write(separator); + os.write(singleQuote); os.write(col.getComment().getBytes("UTF-8")); + os.write(singleQuote); } firstCol = false; } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (working copy) @@ -20,8 +20,11 @@ import java.io.Serializable; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.LocalFileSystem; @@ -30,6 +33,9 @@ import org.apache.hadoop.hive.ql.plan.loadTableDesc; import org.apache.hadoop.hive.ql.plan.moveWork; import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.util.StringUtils; /** @@ -108,6 +114,44 @@ String mesg_detail = " from " + tbd.getSourceDir(); console.printInfo(mesg, mesg_detail); + // Get the file format of the table + boolean tableIsSequenceFile = tbd.getTable().getInputFileFormatClass().equals(SequenceFileInputFormat.class); + // Get all files from the src directory + FileStatus [] dirs; + ArrayList files; + try { + fs = FileSystem.get(db.getTable(tbd.getTable().getTableName()).getDataLocation(), + Hive.get().getConf()); + dirs = fs.globStatus(new Path(tbd.getSourceDir())); + files = new ArrayList(); + for (int i=0; i0) break; + } + } catch (IOException e) { + throw new HiveException("addFiles: filesystem error in check phase", e); + } + // Check if the file format of the file matches that of the table. + if (files.size() > 0) { + int fileId = 0; + boolean fileIsSequenceFile = true; + try { + SequenceFile.Reader reader = new SequenceFile.Reader( + fs, files.get(fileId).getPath(), conf); + reader.close(); + } catch (IOException e) { + fileIsSequenceFile = false; + } + if (!fileIsSequenceFile && tableIsSequenceFile) { + throw new HiveException("Cannot load text files into a table stored as SequenceFile."); + } + if (fileIsSequenceFile && !tableIsSequenceFile) { + throw new HiveException("Cannot load SequenceFiles into a table stored as TextFile."); + } + } + + if(tbd.getPartitionSpec().size() == 0) { db.loadTable(new Path(tbd.getSourceDir()), tbd.getTable().getTableName(), tbd.getReplace()); } else { Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java (working copy) @@ -19,16 +19,18 @@ package org.apache.hadoop.hive.ql.exec; import java.io.*; +import java.util.HashMap; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.OpParseContext; +import org.apache.hadoop.hive.ql.parse.RowResolver; import org.apache.hadoop.hive.ql.plan.limitDesc; -import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.conf.Configuration; /** * Limit operator implementation - * Limits a subobject and passes that on. + * Limits the number of rows to be passed on. **/ public class LimitOperator extends Operator implements Serializable { private static final long serialVersionUID = 1L; @@ -50,4 +52,5 @@ else setDone(true); } + } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (working copy) @@ -62,6 +62,11 @@ String cmdLine = hadoopExec + " jar " + auxJars + " " + hiveJar + " org.apache.hadoop.hive.ql.exec.ExecDriver -plan " + planFile.toString() + " " + hiveConfArgs; + String files = ExecDriver.getRealFiles(conf); + if(!files.isEmpty()) { + cmdLine = cmdLine + " -files " + files; + } + LOG.info("Executing: " + cmdLine); Process executor = Runtime.getRuntime().exec(cmdLine); Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java (working copy) @@ -20,14 +20,18 @@ import java.io.*; import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.OpParseContext; import org.apache.hadoop.hive.ql.plan.exprNodeDesc; import org.apache.hadoop.hive.ql.plan.selectDesc; import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.parse.SemanticException; /** * Select operator implementation @@ -87,4 +91,61 @@ } forward(output, outputObjectInspector); } + + private List getColsFromExpr(HashMap, OpParseContext> opParseCtx) { + List cols = new ArrayList(); + ArrayList exprList = conf.getColList(); + for (exprNodeDesc expr : exprList) + cols = Utilities.mergeUniqElems(cols, expr.getCols()); + List listExprs = new ArrayList(); + for (int pos = 0; pos < exprList.size(); pos++) + listExprs.add(new Integer(pos)); + OpParseContext ctx = opParseCtx.get(this); + ctx.setColNames(cols); + opParseCtx.put(this, ctx); + return cols; + } + + private List getColsFromExpr(List colList, + HashMap, OpParseContext> opParseCtx) { + if (colList.isEmpty()) + return getColsFromExpr(opParseCtx); + + List cols = new ArrayList(); + ArrayList selectExprs = conf.getColList(); + List listExprs = new ArrayList(); + + for (String col : colList) { + // col is the internal name i.e. position within the expression list + Integer pos = new Integer(col); + exprNodeDesc expr = selectExprs.get(pos.intValue()); + cols = Utilities.mergeUniqElems(cols, expr.getCols()); + listExprs.add(pos); + } + + OpParseContext ctx = opParseCtx.get(this); + ctx.setColNames(cols); + opParseCtx.put(this, ctx); + return cols; + } + + public List genColLists(HashMap, OpParseContext> opParseCtx) + throws SemanticException { + List cols = new ArrayList(); + + for(Operator o: childOperators) { + // if one of my children is a fileSink, return everything + if ((o instanceof FileSinkOperator) || (o instanceof ScriptOperator)) + return getColsFromExpr(opParseCtx); + + cols = Utilities.mergeUniqElems(cols, o.genColLists(opParseCtx)); + } + + if (conf.isSelectStar()) + // The input to the select does not matter. Go over the expressions and return the ones which have a marked column + return getColsFromExpr(cols, opParseCtx); + + return getColsFromExpr(opParseCtx); + } + } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java (working copy) @@ -27,20 +27,17 @@ import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.plan.mapredWork; import org.apache.hadoop.hive.ql.plan.tableDesc; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.exec.ExecMapper.reportStats; -import org.apache.hadoop.hive.serde2.ColumnSet; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; -import org.apache.hadoop.hive.serde2.objectinspector.MetadataListStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.io.BytesWritable; -import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.Writable; public class ExecReducer extends MapReduceBase implements Reducer { @@ -74,15 +71,23 @@ reducer.setMapredWork(gWork); isTagged = gWork.getNeedsTagging(); try { - // We should initialize the SerDe with the TypeInfo when available. - tableDesc keyTableDesc = PlanUtils.getReduceKeyDesc(gWork); + tableDesc keyTableDesc = gWork.getKeyDesc(); inputKeyDeserializer = (SerDe)ReflectionUtils.newInstance(keyTableDesc.getDeserializerClass(), null); inputKeyDeserializer.initialize(null, keyTableDesc.getProperties()); - for(int tag=0; tag ois = new ArrayList(); + ois.add(keyObjectInspector); + ois.add(valueObjectInspector[tag]); + ois.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(Byte.class)); + rowObjectInspector[tag] = ObjectInspectorFactory.getStandardStructObjectInspector( + Arrays.asList(fieldNames), ois); } } catch (SerDeException e) { throw new RuntimeException(e); @@ -143,18 +148,12 @@ } catch (SerDeException e) { throw new HiveException(e); } - // This is a hack for generating the correct ObjectInspector. - // In the future, we should use DynamicSerde and initialize it using the type info. - if (keyObjectInspector == null) { - // Directly create ObjectInspector here because we didn't know the number of cols till now. - keyObjectInspector = MetadataListStructObjectInspector.getInstance(((ColumnSet)keyObject).col.size()); - } // System.err.print(keyObject.toString()); while (values.hasNext()) { - Text valueText = (Text)values.next(); + Writable valueWritable = (Writable) values.next(); //System.err.print(who.getHo().toString()); try { - valueObject[tag] = inputValueDeserializer[tag].deserialize(valueText); + valueObject[tag] = inputValueDeserializer[tag].deserialize(valueWritable); } catch (SerDeException e) { throw new HiveException(e); } @@ -162,23 +161,12 @@ row.add(keyObject); row.add(valueObject[tag]); row.add(tag); - if (valueObjectInspector[tag] == null) { - // Directly create ObjectInspector here because we didn't know the number of cols till now. - valueObjectInspector[tag] = MetadataListStructObjectInspector.getInstance(((ColumnSet)valueObject[tag]).col.size()); - ArrayList ois = new ArrayList(); - ois.add(keyObjectInspector); - ois.add(valueObjectInspector[tag]); - ois.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(Byte.class)); - rowObjectInspector[tag] = ObjectInspectorFactory.getStandardStructObjectInspector( - Arrays.asList(fieldNames), ois); - } reducer.process(row, rowObjectInspector[tag]); } - } catch (HiveException e) { abort = true; - throw new IOException (e.getMessage()); + throw new IOException (e); } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/FlatFileInputFormat.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/FlatFileInputFormat.java (revision 0) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/FlatFileInputFormat.java (revision 0) @@ -0,0 +1,330 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.io; + +import java.io.IOException; +import java.io.EOFException; +import java.io.InputStream; +import java.io.DataInputStream; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FileSystem; + +import org.apache.hadoop.mapred.FileInputFormat; +import org.apache.hadoop.mapred.FileSplit; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.InputSplit; +import org.apache.hadoop.mapred.Reporter; +import org.apache.hadoop.mapred.RecordReader; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configurable; + +import org.apache.hadoop.io.serializer.Serialization; +import org.apache.hadoop.io.serializer.Serializer; +import org.apache.hadoop.io.serializer.SerializationFactory; +import org.apache.hadoop.io.serializer.Deserializer; + +import org.apache.hadoop.io.compress.CompressionCodecFactory; +import org.apache.hadoop.io.compress.CompressionCodec; +import org.apache.hadoop.util.ReflectionUtils; + +/** An {@link InputFormat} for Plain files with {@link Deserializer} records */ +public class FlatFileInputFormat extends FileInputFormat> { + + /** + * A work-around until HADOOP-1230 is fixed. + * + * Allows boolean next(k,v) to be called by reference but still allow the deserializer to create a new + * object (i.e., row) on every call to next. + */ + static public class RowContainer { + T row; + } + + /** + * An implementation of SerializationContext is responsible for looking up the Serialization implementation + * for the given RecordReader. Potentially based on the Configuration or some other mechanism + * + * The SerializationFactory does not give this functionality since: + * 1. Requires Serialization implementations to be specified in the Configuration a-priori (although same as setting + * a SerializationContext) + * 2. Does not lookup the actual subclass being deserialized. e.g., for Serializable does not have a way of configuring + * the actual Java class being serialized/deserialized. + */ + static public interface SerializationContext extends Configurable { + + /** + * An {@link Serialization} object for objects of type S + * @return a serialization object for this context + */ + public Serialization getSerialization() throws IOException; + + /** + * Produces the specific class to deserialize + */ + public Class getRealClass() throws IOException; + } + + /** + * The JobConf keys for the Serialization implementation + */ + static public final String SerializationImplKey = "mapred.input.serialization.implKey"; + + /** + * An implementation of {@link SerializationContext} that reads the Serialization class and + * specific subclass to be deserialized from the JobConf. + * + */ + static public class SerializationContextFromConf implements FlatFileInputFormat.SerializationContext { + + /** + * The JobConf keys for the Class that is being deserialized. + */ + static public final String SerializationSubclassKey = "mapred.input.serialization.subclassKey"; + + /** + * Implements configurable so it can use the configuration to find the right classes + * Note: ReflectionUtils will automatigically call setConf with the right configuration. + */ + private Configuration conf; + + public void setConf(Configuration conf) { + this.conf = conf; + } + + public Configuration getConf() { + return conf; + } + + /** + * @return the actual class being deserialized + * @exception does not currently throw IOException + */ + public Class getRealClass() throws IOException { + return (Class)conf.getClass(SerializationSubclassKey, null, Object.class); + } + + /** + * Looks up and instantiates the Serialization Object + * + * Important to note here that we are not relying on the Hadoop SerializationFactory part of the + * Serialization framework. This is because in the case of Non-Writable Objects, we cannot make any + * assumptions about the uniformity of the serialization class APIs - i.e., there may not be a "write" + * method call and a subclass may need to implement its own Serialization classes. + * The SerializationFactory currently returns the first (de)serializer that is compatible + * with the class to be deserialized; in this context, that assumption isn't necessarily true. + * + * @return the serialization object for this context + * @exception does not currently throw any IOException + */ + public Serialization getSerialization() throws IOException { + Class> tClass = (Class>)conf.getClass(SerializationImplKey, null, Serialization.class); + return tClass == null ? null : (Serialization)ReflectionUtils.newInstance(tClass, conf); + } + } + + /** + * An {@link RecordReader} for plain files with {@link Deserializer} records + * + * Reads one row at a time of type R. + * R is intended to be a base class of something such as: Record, Writable, Text, ... + * + */ + public class FlatFileRecordReader implements RecordReader> { + + /** + * An interface for a helper class for instantiating {@link Serialization} classes. + */ + /** + * The stream in use - is fsin if not compressed, otherwise, it is dcin. + */ + private final DataInputStream in; + + /** + * The decompressed stream or null if the input is not decompressed. + */ + private final InputStream dcin; + + /** + * The underlying stream. + */ + private final FSDataInputStream fsin; + + /** + * For calculating progress + */ + private final long end; + + /** + * The constructed deserializer + */ + private final Deserializer deserializer; + + /** + * Once EOF is reached, stop calling the deserializer + */ + private boolean isEOF; + + /** + * The JobConf which contains information needed to instantiate the correct Deserializer + */ + private Configuration conf; + + /** + * The actual class of the row's we are deserializing, not just the base class + */ + private Class realRowClass; + + + /** + * FlatFileRecordReader constructor constructs the underlying stream (potentially decompressed) and + * creates the deserializer. + * + * @param conf the jobconf + * @param split the split for this file + */ + public FlatFileRecordReader(Configuration conf, + FileSplit split) throws IOException { + final Path path = split.getPath(); + FileSystem fileSys = path.getFileSystem(conf); + CompressionCodecFactory compressionCodecs = new CompressionCodecFactory(conf); + final CompressionCodec codec = compressionCodecs.getCodec(path); + this.conf = conf; + + fsin = fileSys.open(path); + if (codec != null) { + dcin = codec.createInputStream(fsin); + in = new DataInputStream(dcin); + } else { + dcin = null; + in = fsin; + } + + isEOF = false; + end = split.getLength(); + + // Instantiate a SerializationContext which this will use to lookup the Serialization class and the + // actual class being deserialized + SerializationContext sinfo; + Class> sinfoClass = + (Class>)conf.getClass(SerializationContextImplKey, SerializationContextFromConf.class); + + sinfo = (SerializationContext)ReflectionUtils.newInstance(sinfoClass, conf); + + // Get the Serialization object and the class being deserialized + Serialization serialization = sinfo.getSerialization(); + realRowClass = (Class)sinfo.getRealClass(); + + deserializer = (Deserializer)serialization.getDeserializer((Class)realRowClass); + deserializer.open(in); + } + + /** + * The actual class of the data being deserialized + */ + private Class realRowclass; + + /** + * The JobConf key of the SerializationContext to use + */ + static public final String SerializationContextImplKey = "mapred.input.serialization.context_impl"; + + /** + * @return null + */ + public Void createKey() { + return null; + } + + /** + * @return a new R instance. + */ + public RowContainer createValue() { + RowContainer r = new RowContainer(); + r.row = (R)ReflectionUtils.newInstance(realRowClass, conf); + return r; + } + + /** + * Returns the next row # and value + * + * @param key - void as these files have a value only + * @param value - the row container which is always re-used, but the internal value may be set to a new Object + * @return whether the key and value were read. True if they were and false if EOF + * @exception IOException from the deserializer + */ + public synchronized boolean next(Void key, RowContainer value) throws IOException { + if(isEOF || in.available() == 0) { + isEOF = true; + return false; + } + + // the deserializer is responsible for actually reading each record from the stream + try { + value.row = deserializer.deserialize(value.row); + if (value.row == null) { + isEOF = true; + return false; + } + return true; + } catch(EOFException e) { + isEOF = true; + return false; + } + } + + public synchronized float getProgress() throws IOException { + // this assumes no splitting + if (end == 0) { + return 0.0f; + } else { + // gives progress over uncompressed stream + // assumes deserializer is not buffering itself + return Math.min(1.0f, fsin.getPos()/(float)(end)); + } + } + + public synchronized long getPos() throws IOException { + // assumes deserializer is not buffering itself + // position over uncompressed stream. not sure what + // effect this has on stats about job + return fsin.getPos(); + } + + public synchronized void close() throws IOException { + // assuming that this closes the underlying streams + deserializer.close(); + } + } + + protected boolean isSplittable(FileSystem fs, Path filename) { + return false; + } + + public RecordReader> getRecordReader(InputSplit split, + JobConf job, Reporter reporter) + throws IOException { + + reporter.setStatus(split.toString()); + + return new FlatFileRecordReader(job, (FileSplit) split); + } +} Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (working copy) @@ -219,7 +219,6 @@ return result.toArray(new HiveInputSplit[result.size()]); } - private tableDesc getTableDescFromPath(Path dir) throws IOException { partitionDesc partDesc = pathToPartitionInfo.get(dir.toString()); Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -49,7 +49,7 @@ TOK_TRUE; TOK_FALSE; TOK_TRANSFORM; -TOK_COLLIST; +TOK_EXPLIST; TOK_ALIASLIST; TOK_GROUPBY; TOK_ORDERBY; @@ -64,6 +64,7 @@ TOK_ISNULL; TOK_ISNOTNULL; TOK_TINYINT; +TOK_SMALLINT; TOK_INT; TOK_BIGINT; TOK_BOOLEAN; @@ -81,6 +82,9 @@ TOK_ALTERTABLE_ADDCOLS; TOK_ALTERTABLE_REPLACECOLS; TOK_ALTERTABLE_DROPPARTS; +TOK_ALTERTABLE_SERDEPROPERTIES; +TOK_ALTERTABLE_SERIALIZER; +TOK_ALTERTABLE_PROPERTIES; TOK_SHOWTABLES; TOK_SHOWPARTITIONS; TOK_CREATEEXTTABLE; @@ -96,6 +100,7 @@ TOK_TABLEROWFORMATMAPKEYS; TOK_TABLEROWFORMATLINES; TOK_TBLSEQUENCEFILE; +TOK_TBLTEXTFILE; TOK_TABCOLNAME; TOK_TABLELOCATION; TOK_TABLESAMPLE; @@ -106,10 +111,10 @@ TOK_CREATEFUNCTION; TOK_EXPLAIN; TOK_TABLESERIALIZER; -TOK_TABLSERDEPROPERTIES; -TOK_TABLESERDEPROPLIST; +TOK_TABLEPROPERTIES; +TOK_TABLEPROPLIST; TOK_LIMIT; -TOKTABLESERDEPROPERTY; +TOK_TABLEPROPERTY; } @@ -157,9 +162,9 @@ ; createStatement - : KW_CREATE (ext=KW_EXTERNAL)? KW_TABLE name=Identifier LPAREN columnNameTypeList RPAREN tableComment? tablePartition? tableBuckets? tableRowFormat? tableFileFormat? tableLocation? - -> {$ext == null}? ^(TOK_CREATETABLE $name columnNameTypeList tableComment? tablePartition? tableBuckets? tableRowFormat? tableFileFormat? tableLocation?) - -> ^(TOK_CREATEEXTTABLE $name columnNameTypeList tableComment? tablePartition? tableBuckets? tableRowFormat? tableFileFormat? tableLocation?) + : KW_CREATE (ext=KW_EXTERNAL)? KW_TABLE name=Identifier (LPAREN columnNameTypeList RPAREN)? tableComment? tablePartition? tableBuckets? tableRowFormat? tableFileFormat? tableLocation? + -> {$ext == null}? ^(TOK_CREATETABLE $name columnNameTypeList? tableComment? tablePartition? tableBuckets? tableRowFormat? tableFileFormat? tableLocation?) + -> ^(TOK_CREATEEXTTABLE $name columnNameTypeList? tableComment? tablePartition? tableBuckets? tableRowFormat? tableFileFormat? tableLocation?) ; dropStatement @@ -170,6 +175,8 @@ : alterStatementRename | alterStatementAddCol | alterStatementDropPartitions + | alterStatementProperties + | alterStatementSerdeProperties ; alterStatementRename @@ -188,6 +195,18 @@ -> ^(TOK_ALTERTABLE_DROPPARTS Identifier partitionSpec+) ; +alterStatementProperties + : KW_ALTER KW_TABLE name=Identifier KW_SET KW_PROPERTIES tableProperties + -> ^(TOK_ALTERTABLE_PROPERTIES $name tableProperties) + ; + +alterStatementSerdeProperties + : KW_ALTER KW_TABLE name=Identifier KW_SET KW_SERDE serde=StringLiteral (KW_WITH KW_SERDEPROPERTIES tableProperties)? + -> ^(TOK_ALTERTABLE_SERIALIZER $name $serde tableProperties?) + | KW_ALTER KW_TABLE name=Identifier KW_SET KW_SERDEPROPERTIES tableProperties + -> ^(TOK_ALTERTABLE_SERDEPROPERTIES $name tableProperties) + ; + descStatement : KW_DESCRIBE (isExtended=KW_EXTENDED)? (tab=tabName) -> ^(TOK_DESCTABLE $tab $isExtended?) ; @@ -227,23 +246,23 @@ : KW_ROW KW_FORMAT KW_DELIMITED tableRowFormatFieldIdentifier? tableRowFormatCollItemsIdentifier? tableRowFormatMapKeysIdentifier? tableRowFormatLinesIdentifier? -> ^(TOK_TABLEROWFORMAT tableRowFormatFieldIdentifier? tableRowFormatCollItemsIdentifier? tableRowFormatMapKeysIdentifier? tableRowFormatLinesIdentifier?) - | KW_ROW KW_FORMAT KW_SERIALIZER name=StringLiteral tableSerializerProperties? - -> ^(TOK_TABLESERIALIZER $name tableSerializerProperties?) + | KW_ROW KW_FORMAT KW_SERDE name=StringLiteral (KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)? + -> ^(TOK_TABLESERIALIZER $name $serdeprops?) ; -tableSerializerProperties +tableProperties : - KW_WITH KW_PROPERTIES LPAREN propertiesList RPAREN -> ^(TOK_TABLSERDEPROPERTIES propertiesList) + LPAREN propertiesList RPAREN -> ^(TOK_TABLEPROPERTIES propertiesList) ; propertiesList : - keyValueProperty (COMMA keyValueProperty)* -> ^(TOK_TABLESERDEPROPLIST keyValueProperty+) + keyValueProperty (COMMA keyValueProperty)* -> ^(TOK_TABLEPROPLIST keyValueProperty+) ; keyValueProperty : - key=StringLiteral EQUAL value=StringLiteral -> ^(TOKTABLESERDEPROPERTY $key $value) + key=StringLiteral EQUAL value=StringLiteral -> ^(TOK_TABLEPROPERTY $key $value) ; tableRowFormatFieldIdentifier @@ -273,6 +292,7 @@ tableFileFormat : KW_STORED KW_AS KW_SEQUENCEFILE -> TOK_TBLSEQUENCEFILE + | KW_STORED KW_AS KW_TEXTFILE -> TOK_TBLTEXTFILE ; tableLocation @@ -317,6 +337,7 @@ primitiveType : KW_TINYINT -> TOK_TINYINT + | KW_SMALLINT -> TOK_SMALLINT | KW_INT -> TOK_INT | KW_BIGINT -> TOK_BIGINT | KW_BOOLEAN -> TOK_BOOLEAN @@ -420,23 +441,22 @@ selectList : - selectItem - ( COMMA selectItem )* -> selectItem+ + selectItem ( COMMA selectItem )* -> selectItem+ + | trfmClause -> ^(TOK_SELEXPR trfmClause) ; selectItem : - trfmClause -> ^(TOK_SELEXPR trfmClause) - | (selectExpression (KW_AS Identifier)?) -> ^(TOK_SELEXPR selectExpression Identifier?) + ( selectExpression (KW_AS Identifier)?) -> ^(TOK_SELEXPR selectExpression Identifier?) ; trfmClause : KW_TRANSFORM - LPAREN columnList RPAREN - KW_AS - LPAREN aliasList RPAREN - KW_USING StringLiteral -> ^(TOK_TRANSFORM columnList aliasList StringLiteral) + LPAREN expressionList RPAREN + KW_USING StringLiteral + (KW_AS LPAREN aliasList RPAREN)? + -> ^(TOK_TRANSFORM expressionList StringLiteral aliasList?) ; selectExpression @@ -448,18 +468,19 @@ tableAllColumns : - Identifier DOT STAR -> ^(TOK_ALLCOLREF Identifier) + STAR -> ^(TOK_ALLCOLREF) + | Identifier DOT STAR -> ^(TOK_ALLCOLREF Identifier) ; // table.column tableColumn : - (tab=Identifier)? DOT col=Identifier -> ^(TOK_COLREF $tab? $col) + (tab=Identifier DOT)? col=Identifier -> ^(TOK_COLREF $tab? $col) ; -columnList +expressionList : - tableColumn (COMMA tableColumn)* -> ^(TOK_COLLIST tableColumn+) + expression (COMMA expression)* -> ^(TOK_EXPLIST expression+) ; aliasList @@ -478,7 +499,7 @@ joinSource : fromSource - ( joinToken^ fromSource (KW_ON! precedenceEqualExpression)? )+ + ( joinToken^ fromSource (KW_ON! expression)? )+ ; joinToken @@ -496,7 +517,7 @@ tableSample : - KW_TABLESAMPLE LPAREN KW_BUCKET (numerator=Number) KW_OUT KW_OF (denominator=Number) (KW_ON col+=Identifier (COMMA col+=Identifier)*)? RPAREN -> ^(TOK_TABLESAMPLE $numerator $denominator $col*) + KW_TABLESAMPLE LPAREN KW_BUCKET (numerator=Number) KW_OUT KW_OF (denominator=Number) (KW_ON expr+=expression (COMMA expr+=expression)*)? RPAREN -> ^(TOK_TABLESAMPLE $numerator $denominator $expr*) ; tableSource @@ -570,12 +591,12 @@ : // LEFT and RIGHT keywords are also function names Identifier LPAREN ( - (dist=KW_DISTINCT)? - expression - (COMMA expression)* + ((dist=KW_DISTINCT)? + expression + (COMMA expression)*)? )? - RPAREN -> {$dist == null}? ^(TOK_FUNCTION Identifier expression+) - -> ^(TOK_FUNCTIONDI Identifier expression+) + RPAREN -> {$dist == null}? ^(TOK_FUNCTION Identifier (expression+)?) + -> ^(TOK_FUNCTIONDI Identifier (expression+)?) ; @@ -644,7 +665,7 @@ precedenceUnaryExpression (precedenceBitwiseXorOperator^ precedenceUnaryExpression)* ; - + precedenceStarOperator : STAR | DIVIDE | MOD @@ -808,6 +829,7 @@ KW_COMMENT: 'COMMENT'; KW_BOOLEAN: 'BOOLEAN'; KW_TINYINT: 'TINYINT'; +KW_SMALLINT: 'SMALLINT'; KW_INT: 'INT'; KW_BIGINT: 'BIGINT'; KW_FLOAT: 'FLOAT'; @@ -834,6 +856,7 @@ KW_LINES: 'LINES'; KW_STORED: 'STORED'; KW_SEQUENCEFILE: 'SEQUENCEFILE'; +KW_TEXTFILE: 'TEXTFILE'; KW_LOCATION: 'LOCATION'; KW_TABLESAMPLE: 'TABLESAMPLE'; KW_BUCKET: 'BUCKET'; @@ -849,10 +872,12 @@ KW_FUNCTION: 'FUNCTION'; KW_EXPLAIN: 'EXPLAIN'; KW_EXTENDED: 'EXTENDED'; -KW_SERIALIZER: 'SERIALIZER'; +KW_SERDE: 'SERDE'; KW_WITH: 'WITH'; -KW_PROPERTIES: 'SERDEPROPERTIES'; +KW_SERDEPROPERTIES: 'SERDEPROPERTIES'; KW_LIMIT: 'LIMIT'; +KW_SET: 'SET'; +KW_PROPERTIES: 'TBLPROPERTIES'; // Operators @@ -909,7 +934,7 @@ StringLiteral : - '\'' (~'\'')* '\'' ( '\'' (~'\'')* '\'' )* + ( '\'' (~'\'')* '\'' | '\"' (~'\"')* '\"' )+ ; CharSetLiteral @@ -926,6 +951,7 @@ Identifier : (Letter | Digit) (Letter | Digit | '_')* + | '`' (Letter | Digit) (Letter | Digit | '_')* '`' ; CharSetName Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (working copy) @@ -38,6 +38,7 @@ INVALID_OPERATOR_SIGNATURE("Operator Argument Type Mismatch"), INVALID_JOIN_CONDITION_1("Both Left and Right Aliases Encountered in Join"), INVALID_JOIN_CONDITION_2("Neither Left nor Right Aliases Encountered in Join"), + INVALID_JOIN_CONDITION_3("OR not supported in Join currently"), INVALID_TRANSFORM("TRANSFORM with Other Select Columns not Supported"), DUPLICATE_GROUPBY_KEY("Repeated Key in Group By"), UNSUPPORTED_MULTIPLE_DISTINCTS("DISTINCT on Different Columns not Supported"), @@ -52,14 +53,18 @@ INVALID_MAPINDEX_TYPE("Map Key Type does not Match Index Expression Type"), NON_COLLECTION_TYPE("[] not Valid on Non Collection Types"), SELECT_DISTINCT_WITH_GROUPBY("SELECT DISTINCT and GROUP BY can not be in the same query"), - COLUMN_REPAEATED_IN_PARTITIONING_COLS("Column repeated in partitioning columns"), + COLUMN_REPEATED_IN_PARTITIONING_COLS("Column repeated in partitioning columns"), DUPLICATE_COLUMN_NAMES("Duplicate column names"), COLUMN_REPEATED_IN_CLUSTER_SORT("Same column cannot appear in cluster and sort by"), SAMPLE_RESTRICTION("Cannot Sample on More Than Two Columns"), SAMPLE_COLUMN_NOT_FOUND("Sample Column Not Found"), NO_PARTITION_PREDICATE("No Partition Predicate Found"), - INVALID_DOT(". operator is only supported on struct or list of struct types"); - + INVALID_DOT(". operator is only supported on struct or list of struct types"), + INVALID_TBL_DDL_SERDE("Either list of columns or a custom serializer should be specified"), + TARGET_TABLE_COLUMN_MISMATCH("Cannot insert into target table because column number/types are different"), + TABLE_ALIAS_NOT_ALLOWED("Table Alias not Allowed in Sampling Clause"), + NON_BUCKETED_TABLE("Sampling Expression Needed for Non-Bucketed Table"); + private String mesg; ErrorMsg(String mesg) { this.mesg = mesg; Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (working copy) @@ -39,6 +39,9 @@ case HiveParser.TOK_ALTERTABLE_REPLACECOLS: case HiveParser.TOK_ALTERTABLE_RENAME: case HiveParser.TOK_ALTERTABLE_DROPPARTS: + case HiveParser.TOK_ALTERTABLE_PROPERTIES: + case HiveParser.TOK_ALTERTABLE_SERIALIZER: + case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: case HiveParser.TOK_SHOWTABLES: case HiveParser.TOK_SHOWPARTITIONS: return new DDLSemanticAnalyzer(conf); Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java (working copy) @@ -22,6 +22,10 @@ import org.antlr.runtime.tree.CommonTree; +/** + * Internal representation of the join tree + * + */ public class QBJoinTree { private String leftAlias; @@ -33,15 +37,29 @@ private joinCond[] joinCond; private boolean noOuterJoin; - // conditions + // join conditions private Vector> expressions; + // filters + private Vector> filters; + + /** + * constructor + */ public QBJoinTree() { nextTag = 0;} + /** + * returns left alias if any - this is used for merging later on + * @return left alias if any + */ public String getLeftAlias() { return leftAlias; } + /** + * set left alias for the join expression + * @param leftAlias String + */ public void setLeftAlias(String leftAlias) { this.leftAlias = leftAlias; } @@ -109,6 +127,21 @@ public void setNoOuterJoin(boolean noOuterJoin) { this.noOuterJoin = noOuterJoin; } + + /** + * @return the filters + */ + public Vector> getFilters() { + return filters; + } + + /** + * @param filters the filters to set + */ + public void setFilters(Vector> filters) { + this.filters = filters; + } + } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (working copy) @@ -22,7 +22,6 @@ import org.apache.hadoop.hive.ql.parse.QBParseInfo; import org.apache.hadoop.hive.ql.parse.QBMetaData; -import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -158,14 +157,6 @@ } public boolean isSelectStarQuery() { - if (!qbp.isSelectStarQuery() || !aliasToSubq.isEmpty()) - return false; - - Iterator> iter = qbm.getAliasToTable().entrySet().iterator(); - Table tab = ((Map.Entry)iter.next()).getValue(); - if (tab.isPartitioned()) - return false; - - return true; + return qbp.isSelectStarQuery() && aliasToSubq.isEmpty(); } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java (working copy) @@ -115,5 +115,4 @@ public Table getSrcForAlias(String alias) { return this.aliasToTable.get(alias.toLowerCase()); } - } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (working copy) @@ -104,9 +104,10 @@ } public static String stripQuotes(String val) throws SemanticException { - if (val.charAt(0) == '\'' && val.charAt(val.length() - 1) == '\'') { + if ((val.charAt(0) == '\'' && val.charAt(val.length() - 1) == '\'') + || (val.charAt(0) == '\"' && val.charAt(val.length() - 1) == '\"')) { val = val.substring(1, val.length() - 1); - } + } return val; } @@ -142,19 +143,48 @@ } } + /** + * Remove the encapsulating "`" pair from the identifier. + * We allow users to use "`" to escape identifier for table names, + * column names and aliases, in case that coincide with Hive language + * keywords. + */ + public static String unescapeIdentifier(String val) { + if (val == null) { + return null; + } + if (val.charAt(0) == '`' && val.charAt(val.length() - 1) == '`') { + val = val.substring(1, val.length() - 1); + } + return val; + } + @SuppressWarnings("nls") public static String unescapeSQLString(String b) { - assert(b.charAt(0) == '\''); - assert(b.charAt(b.length()-1) == '\''); + Character enclosure = null; + // Some of the strings can be passed in as unicode. For example, the // delimiter can be passed in as \002 - So, we first check if the // string is a unicode number, else go back to the old behavior StringBuilder sb = new StringBuilder(b.length()); - int i = 1; - while (i < (b.length()-1)) { - - if (b.charAt(i) == '\\' && (i+4 < b.length())) { + for (int i=0; i < b.length(); i++) { + + char currentChar = b.charAt(i); + if (enclosure == null) { + if (currentChar == '\'' || b.charAt(i) == '\"') { + enclosure = currentChar; + } + // ignore all other chars outside the enclosure + continue; + } + + if (enclosure.equals(currentChar)) { + enclosure = null; + continue; + } + + if (currentChar == '\\' && (i+4 < b.length())) { char i1 = b.charAt(i+1); char i2 = b.charAt(i+2); char i3 = b.charAt(i+3); @@ -167,12 +197,12 @@ bValArr[0] = bVal; String tmp = new String(bValArr); sb.append(tmp); - i += 4; + i += 3; continue; } } - - if (b.charAt(i) == '\\' && (i+2 < b.length())) { + + if (currentChar == '\\' && (i+2 < b.length())) { char n=b.charAt(i+1); switch(n) { case '0': sb.append("\0"); break; @@ -191,9 +221,8 @@ } i++; } else { - sb.append(b.charAt(i)); + sb.append(currentChar); } - i++; } return sb.toString(); } @@ -219,7 +248,7 @@ try { // get table metadata - tableName = ast.getChild(0).getText(); + tableName = unescapeIdentifier(ast.getChild(0).getText()); tableHandle = db.getTable(tableName); // get partition metadata if partition specified @@ -230,7 +259,7 @@ for (int i = 0; i < partspec.getChildCount(); ++i) { CommonTree partspec_val = (CommonTree) partspec.getChild(i); String val = stripQuotes(partspec_val.getChild(1).getText()); - partSpec.put(partspec_val.getChild(0).getText(), val); + partSpec.put(unescapeIdentifier(partspec_val.getChild(0).getText()), val); } partHandle = Hive.get().getPartition(tableHandle, partSpec, forceCreatePartition); if(partHandle == null) { Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java (working copy) @@ -59,6 +59,12 @@ private Table tab; private exprNodeDesc prunerExpr; + + // is set to true if the expression only contains partitioning columns and not any other column reference. + // This is used to optimize select * from table where ... scenario, when the where condition only references + // partitioning columns - the partitions are identified and streamed directly to the client without requiring + // a map-reduce job + private boolean containsPartCols; /** Creates a new instance of PartitionPruner */ public PartitionPruner(String tableAlias, QBMetaData metaData) { @@ -66,8 +72,13 @@ this.metaData = metaData; this.tab = metaData.getTableForAlias(tableAlias); this.prunerExpr = null; + containsPartCols = true; } + public boolean containsPartitionCols() { + return containsPartCols; + } + /** * We use exprNodeConstantDesc(class,null) to represent unknown values. * Except UDFOPAnd, UDFOPOr, and UDFOPNot, all UDFs are assumed to return unknown values @@ -97,12 +108,18 @@ switch (tokType) { case HiveParser.TOK_COLREF: { - assert(expr.getChildCount() == 2); - String tabAlias = expr.getChild(0).getText(); - String colName = expr.getChild(1).getText(); - if (tabAlias == null || colName == null) { - throw new SemanticException(ErrorMsg.INVALID_XPATH.getMsg(expr)); + String tabAlias = null; + String colName = null; + if (expr.getChildCount() != 1) { + assert(expr.getChildCount() == 2); + tabAlias = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText()); + colName = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(1).getText()); } + else { + colName = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText()); + tabAlias = SemanticAnalyzer.getTabAliasForCol(this.metaData, colName, (CommonTree)expr.getChild(0)); + } + // Set value to null if it's not partition column if (tabAlias.equals(tableAlias) && tab.isPartitionKey(colName)) { desc = new exprNodeColumnDesc(String.class, colName); @@ -117,6 +134,7 @@ TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector( this.metaData.getTableForAlias(tabAlias).getDeserializer().getObjectInspector()); desc = new exprNodeConstantDesc(typeInfo.getStructFieldTypeInfo(colName), null); + containsPartCols = false; } } catch (SerDeException e){ throw new RuntimeException(e); @@ -195,8 +213,8 @@ case HiveParser.TOK_COLREF: { assert(expr.getChildCount() == 2); - String tabAlias = expr.getChild(0).getText(); - String colName = expr.getChild(1).getText(); + String tabAlias = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText()); + String colName = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(1).getText()); if (tabAlias.equals(tableAlias) && tab.isPartitionKey(colName)) { hasPPred = true; } @@ -227,11 +245,30 @@ if (!(desc instanceof exprNodeConstantDesc) || ((exprNodeConstantDesc)desc).getValue() != null ) { LOG.trace("adding pruning expr = " + desc); if (this.prunerExpr == null) - this.prunerExpr = desc; + this.prunerExpr = desc; else this.prunerExpr = SemanticAnalyzer.getFuncExprNodeDesc("OR", this.prunerExpr, desc); } } + + /** + * Add an expression from the JOIN condition. Since these expressions will be used for all the where clauses, they + * are always ANDed. Then we walk through the remaining filters (in the where clause) and OR them with the existing + * condition. + */ + @SuppressWarnings("nls") + public void addJoinOnExpression(CommonTree expr) throws SemanticException { + LOG.trace("adding pruning Tree = " + expr.toStringTree()); + exprNodeDesc desc = genExprNodeDesc(expr); + // Ignore null constant expressions + if (!(desc instanceof exprNodeConstantDesc) || ((exprNodeConstantDesc)desc).getValue() != null ) { + LOG.trace("adding pruning expr = " + desc); + if (this.prunerExpr == null) + this.prunerExpr = desc; + else + this.prunerExpr = SemanticAnalyzer.getFuncExprNodeDesc("AND", this.prunerExpr, desc); + } + } /** From the table metadata prune the partitions to return the partitions **/ @SuppressWarnings("nls") @@ -282,7 +319,7 @@ } } else - ret_parts.add(part); + ret_parts.add(part); } } catch (Exception e) { Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/OperatorInfo.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/OperatorInfo.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/OperatorInfo.java (working copy) @@ -1,68 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.parse; - -import org.apache.hadoop.hive.ql.exec.Operator; -import org.apache.hadoop.hive.ql.parse.RowResolver; - -/** - * Implementation of OperatorInfo which bundles the operator and its output row resolver - * - **/ - -public class OperatorInfo implements Cloneable { - private Operator op; - private RowResolver rr; - - public OperatorInfo(Operator op, RowResolver rr) { - this.op = op; - this.rr = rr; - } - - public Object clone() { - return new OperatorInfo(op, rr); - } - - public Operator getOp() { - return op; - } - - public void setOp(Operator op) { - this.op = op; - } - - public RowResolver getRowResolver() { - return rr; - } - - public void setRowResolver(RowResolver rr) { - this.rr = rr; - } - - public String toString() { - StringBuffer sb = new StringBuffer(); - String terminal_str = op.toString(); - sb.append(terminal_str.substring(terminal_str.lastIndexOf('.')+1)); - sb.append("["); - sb.append(rr.toString()); - sb.append("]"); - return sb.toString(); - } -} - Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java (working copy) @@ -32,7 +32,6 @@ public class QBParseInfo { private boolean isSubQ; - private boolean canOptTopQ; private String alias; private CommonTree joinExpr; private HashMap aliasToSrc; @@ -67,7 +66,6 @@ this.alias = alias; this.isSubQ = isSubQ; - this.canOptTopQ = false; this.outerQueryLimit = -1; } @@ -127,6 +125,10 @@ return this.destToWhereExpr.get(clause); } + public HashMap getDestToWhereExpr() { + return destToWhereExpr; + } + public CommonTree getGroupByForClause(String clause) { return this.destToGroupby.get(clause); } @@ -151,14 +153,6 @@ return this.isSubQ; } - public boolean getCanOptTopQ() { - return this.canOptTopQ; - } - - public void setCanOptTopQ(boolean canOptTopQ) { - this.canOptTopQ = canOptTopQ; - } - public CommonTree getJoinExpr() { return this.joinExpr; } @@ -201,7 +195,6 @@ if (isSubQ || (joinExpr != null) || (!nameToSample.isEmpty()) || - (!destToWhereExpr.isEmpty()) || (!destToGroupby.isEmpty()) || (!destToClusterby.isEmpty())) return false; Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -24,6 +24,7 @@ import java.lang.reflect.Method; import org.antlr.runtime.tree.*; +import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; @@ -33,7 +34,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.*; +import org.apache.hadoop.hive.ql.optimizer.Optimizer; import org.apache.hadoop.hive.ql.plan.*; import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory; @@ -41,6 +44,7 @@ import org.apache.hadoop.hive.ql.udf.UDFOPPositive; import org.apache.hadoop.hive.ql.exec.*; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.fs.Path; @@ -54,8 +58,12 @@ private HashMap aliasToPruner; private HashMap aliasToSamplePruner; private HashMap> topOps; + private HashMap> topSelOps; + private HashMap, OpParseContext> opParseCtx; private List loadTableWork; private List loadFileWork; + private QB qb; + private CommonTree ast; private static class Phase1Ctx { String dest; @@ -69,21 +77,42 @@ this.aliasToPruner = new HashMap(); this.aliasToSamplePruner = new HashMap(); this.topOps = new HashMap>(); + this.topSelOps = new HashMap>(); this.loadTableWork = new ArrayList(); this.loadFileWork = new ArrayList(); + opParseCtx = new HashMap, OpParseContext>(); } @Override protected void reset() { super.reset(); this.aliasToPruner.clear(); - this.topOps.clear(); this.loadTableWork.clear(); this.loadFileWork.clear(); + this.topOps.clear(); + this.topSelOps.clear(); + qb = null; + ast = null; } + public void init(ParseContext pctx) { + aliasToPruner = pctx.getAliasToPruner(); + aliasToSamplePruner = pctx.getAliasToSamplePruner(); + topOps = pctx.getTopOps(); + topSelOps = pctx.getTopSelOps(); + opParseCtx = pctx.getOpParseCtx(); + loadTableWork = pctx.getLoadTableWork(); + loadFileWork = pctx.getLoadFileWork(); + ctx = pctx.getContext(); + } + + public ParseContext getParseContext() { + return new ParseContext(conf, qb, ast, aliasToPruner, aliasToSamplePruner, topOps, + topSelOps, opParseCtx, loadTableWork, loadFileWork, ctx); + } + @SuppressWarnings("nls") - private void doPhase1QBExpr(CommonTree ast, QBExpr qbexpr, String id, + public void doPhase1QBExpr(CommonTree ast, QBExpr qbexpr, String id, String alias) throws SemanticException { assert (ast.getToken() != null); @@ -142,7 +171,7 @@ || expressionTree.getToken().getType() == HiveParser.TOK_FUNCTIONDI) { assert (expressionTree.getChildCount() != 0); assert (expressionTree.getChild(0).getType() == HiveParser.Identifier); - String functionName = expressionTree.getChild(0).getText(); + String functionName = unescapeIdentifier(expressionTree.getChild(0).getText()); if (FunctionRegistry.getUDAF(functionName) != null) { aggregations.put(expressionTree.toStringTree(), expressionTree); return; @@ -195,17 +224,17 @@ tableSamplePresent = true; } CommonTree tableTree = (CommonTree)(tabref.getChild(0)); - String alias = tabref.getChild(aliasIndex).getText(); + String alias = unescapeIdentifier(tabref.getChild(aliasIndex).getText()); // If the alias is already there then we have a conflict if (qb.exists(alias)) { throw new SemanticException(ErrorMsg.AMBIGOUS_TABLE_ALIAS.getMsg(tabref.getChild(aliasIndex))); } if (tableSamplePresent) { CommonTree sampleClause = (CommonTree)tabref.getChild(1); - ArrayList sampleCols = new ArrayList(); + ArrayList sampleCols = new ArrayList(); if (sampleClause.getChildCount() > 2) { for (int i = 2; i < sampleClause.getChildCount(); i++) { - sampleCols.add(sampleClause.getChild(i).getText()); + sampleCols.add((CommonTree)sampleClause.getChild(i)); } } // TODO: For now only support sampling on up to two columns @@ -214,13 +243,13 @@ throw new SemanticException(ErrorMsg.SAMPLE_RESTRICTION.getMsg(tabref.getChild(0))); } qb.getParseInfo().setTabSample(alias, new TableSample( - sampleClause.getChild(0).getText(), - sampleClause.getChild(1).getText(), - sampleCols) + unescapeIdentifier(sampleClause.getChild(0).getText()), + unescapeIdentifier(sampleClause.getChild(1).getText()), + sampleCols) ); } // Insert this map into the stats - String table_name = tabref.getChild(0).getText(); + String table_name = unescapeIdentifier(tabref.getChild(0).getText()); qb.setTabAlias(alias, table_name); qb.getParseInfo().setSrcForAlias(alias, tableTree); @@ -233,7 +262,7 @@ throw new SemanticException(ErrorMsg.NO_SUBQUERY_ALIAS.getMsg(subq)); } CommonTree subqref = (CommonTree) subq.getChild(0); - String alias = subq.getChild(1).getText(); + String alias = unescapeIdentifier(subq.getChild(1).getText()); // Recursively do the first phase of semantic analysis for the subquery QBExpr qbexpr = new QBExpr(alias); @@ -277,7 +306,7 @@ } @SuppressWarnings({"fallthrough", "nls"}) - private void doPhase1(CommonTree ast, QB qb, Phase1Ctx ctx_1) + public void doPhase1(CommonTree ast, QB qb, Phase1Ctx ctx_1) throws SemanticException { QBParseInfo qbp = qb.getParseInfo(); @@ -379,8 +408,20 @@ } } + /** + * Generate partition pruners. The filters can occur in the where clause and in the JOIN conditions. First, walk over the + * filters in the join condition and AND them, since all of them are needed. Then for each where clause, traverse the + * filter. + * Note that, currently we do not propagate filters over subqueries. For eg: if the query is of the type: + * select ... FROM t1 JOIN (select ... t2) x where x.partition + * we will not recognize that x.partition condition introduces a parition pruner on t2 + * + */ @SuppressWarnings("nls") private void genPartitionPruners(QB qb) throws SemanticException { + Map joinPartnPruner = new HashMap(); + QBParseInfo qbp = qb.getParseInfo(); + // Recursively prune subqueries for (String alias : qb.getSubqAliases()) { QBExpr qbexpr = qb.getSubqForAlias(alias); @@ -389,21 +430,12 @@ for (String alias : qb.getTabAliases()) { String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias); - PartitionPruner pruner = new PartitionPruner(alias, - qb.getMetaData()); + + PartitionPruner pruner = new PartitionPruner(alias, qb.getMetaData()); // Pass each where clause to the pruner - QBParseInfo qbp = qb.getParseInfo(); for(String clause: qbp.getClauseNames()) { CommonTree whexp = (CommonTree)qbp.getWhrForClause(clause); - - if (pruner.getTable().isPartitioned() && - conf.getVar(HiveConf.ConfVars.HIVEPARTITIONPRUNER).equalsIgnoreCase("strict") && - (whexp == null || !pruner.hasPartitionPredicate((CommonTree)whexp.getChild(0)))) { - throw new SemanticException(ErrorMsg.NO_PARTITION_PREDICATE.getMsg(whexp != null ? whexp : qbp.getSelForClause(clause), - " for Alias " + alias + " Table " + pruner.getTable().getName())); - } - if (whexp != null) { pruner.addExpression((CommonTree)whexp.getChild(0)); } @@ -412,6 +444,54 @@ // Add the pruner to the list this.aliasToPruner.put(alias_id, pruner); } + + if (!qb.getTabAliases().isEmpty() && qb.getQbJoinTree() != null) { + int pos = 0; + for (String alias : qb.getQbJoinTree().getBaseSrc()) { + if (alias != null) { + String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias); + PartitionPruner pruner = this.aliasToPruner.get(alias_id); + if(pruner == null) { + // this means that the alias is a subquery + pos++; + continue; + } + Vector filters = qb.getQbJoinTree().getFilters().get(pos); + for (CommonTree cond : filters) { + pruner.addJoinOnExpression(cond); + if (pruner.hasPartitionPredicate(cond)) + joinPartnPruner.put(alias_id, new Boolean(true)); + } + if (qb.getQbJoinTree().getJoinSrc() != null) { + filters = qb.getQbJoinTree().getFilters().get(0); + for (CommonTree cond : filters) { + pruner.addJoinOnExpression(cond); + if (pruner.hasPartitionPredicate(cond)) + joinPartnPruner.put(alias_id, new Boolean(true)); + } + } + } + pos++; + } + } + + for (String alias : qb.getTabAliases()) { + String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias); + PartitionPruner pruner = this.aliasToPruner.get(alias_id); + if (joinPartnPruner.get(alias_id) == null) { + // Pass each where clause to the pruner + for(String clause: qbp.getClauseNames()) { + + CommonTree whexp = (CommonTree)qbp.getWhrForClause(clause); + if (pruner.getTable().isPartitioned() && + conf.getVar(HiveConf.ConfVars.HIVEPARTITIONPRUNER).equalsIgnoreCase("strict") && + (whexp == null || !pruner.hasPartitionPredicate((CommonTree)whexp.getChild(0)))) { + throw new SemanticException(ErrorMsg.NO_PARTITION_PREDICATE.getMsg(whexp != null ? whexp : qbp.getSelForClause(clause), + " for Alias " + alias + " Table " + pruner.getTable().getName())); + } + } + } + } } private void genSamplePruners(QBExpr qbexpr) throws SemanticException { @@ -451,7 +531,7 @@ } @SuppressWarnings("nls") - private void getMetaData(QB qb) throws SemanticException { + public void getMetaData(QB qb) throws SemanticException { try { LOG.info("Get metadata for source tables"); @@ -572,7 +652,7 @@ // String[] allAliases = joinTree.getAllAliases(); switch (condn.getToken().getType()) { case HiveParser.TOK_COLREF: - String tblName = condn.getChild(0).getText(); + String tblName = unescapeIdentifier(condn.getChild(0).getText().toLowerCase()); if (isPresent(joinTree.getLeftAliases(), tblName)) { if (!leftAliases.contains(tblName)) leftAliases.add(tblName); @@ -632,62 +712,169 @@ throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_2.getMsg(condn)); } - private void parseJoinCondition(CommonTree joinParseTree, - QBJoinTree joinTree, CommonTree joinCond, Vector leftSrc) + /** + * Parse the join condition. + * If the condition is a join condition, throw an error if it is not an equality. Otherwise, break it into left and + * right expressions and store in the join tree. + * If the condition is a join filter, add it to the filter list of join tree. The join condition can contains conditions + * on both the left and tree trees and filters on either. Currently, we only support equi-joins, so we throw an error + * if the condition involves both subtrees and is not a equality. Also, we only support AND i.e ORs are not supported + * currently as their semantics are not very clear, may lead to data explosion and there is no usecase. + * @param joinTree jointree to be populated + * @param joinCond join condition + * @param leftSrc left sources + * @throws SemanticException + */ + private void parseJoinCondition(QBJoinTree joinTree, CommonTree joinCond, Vector leftSrc) throws SemanticException { switch (joinCond.getToken().getType()) { + case HiveParser.KW_OR: + throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_3.getMsg(joinCond)); + case HiveParser.KW_AND: - parseJoinCondition(joinParseTree, joinTree, (CommonTree) joinCond + parseJoinCondition(joinTree, (CommonTree) joinCond .getChild(0), leftSrc); - parseJoinCondition(joinParseTree, joinTree, (CommonTree) joinCond + parseJoinCondition(joinTree, (CommonTree) joinCond .getChild(1), leftSrc); break; case HiveParser.EQUAL: CommonTree leftCondn = (CommonTree) joinCond.getChild(0); - Vector leftAliases = new Vector(); - Vector rightAliases = new Vector(); - parseJoinCondPopulateAlias(joinTree, leftCondn, leftAliases, rightAliases); - populateAliases(leftAliases, rightAliases, leftCondn, joinTree, leftSrc); + Vector leftCondAl1 = new Vector(); + Vector leftCondAl2 = new Vector(); + parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2); CommonTree rightCondn = (CommonTree) joinCond.getChild(1); - leftAliases.clear(); - rightAliases.clear(); - parseJoinCondPopulateAlias(joinTree, rightCondn, leftAliases, - rightAliases); - populateAliases(leftAliases, rightAliases, rightCondn, joinTree, leftSrc); + Vector rightCondAl1 = new Vector(); + Vector rightCondAl2 = new Vector(); + parseJoinCondPopulateAlias(joinTree, rightCondn, rightCondAl1, rightCondAl2); + + // is it a filter or a join condition + if (((leftCondAl1.size() != 0) && (leftCondAl2.size() != 0)) || + ((rightCondAl1.size() != 0) && (rightCondAl2.size() != 0))) + throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1.getMsg(joinCond)); + + if (leftCondAl1.size() != 0) { + if ((rightCondAl1.size() != 0) || ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) + joinTree.getFilters().get(0).add(joinCond); + else if (rightCondAl2.size() != 0) { + populateAliases(leftCondAl1, leftCondAl2, leftCondn, joinTree, leftSrc); + populateAliases(rightCondAl1, rightCondAl2, rightCondn, joinTree, leftSrc); + } + } + else if (leftCondAl2.size() != 0) { + if ((rightCondAl2.size() != 0) || ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) + joinTree.getFilters().get(1).add(joinCond); + else if (rightCondAl1.size() != 0) { + populateAliases(leftCondAl1, leftCondAl2, leftCondn, joinTree, leftSrc); + populateAliases(rightCondAl1, rightCondAl2, rightCondn, joinTree, leftSrc); + } + } + else if (rightCondAl1.size() != 0) + joinTree.getFilters().get(0).add(joinCond); + else + joinTree.getFilters().get(1).add(joinCond); + break; default: + boolean isFunction = (joinCond.getType() == HiveParser.TOK_FUNCTION); + + // Create all children + int childrenBegin = (isFunction ? 1 : 0); + ArrayList> leftAlias = new ArrayList>(joinCond.getChildCount() - childrenBegin); + ArrayList> rightAlias = new ArrayList>(joinCond.getChildCount() - childrenBegin); + for (int ci = 0; ci < joinCond.getChildCount() - childrenBegin; ci++) { + Vector left = new Vector(); + Vector right = new Vector(); + leftAlias.add(left); + rightAlias.add(right); + } + + for (int ci=childrenBegin; ci left : leftAlias) { + if (left.size() != 0) { + leftAliasNull = false; + break; + } + } + + boolean rightAliasNull = true; + for (Vector right : rightAlias) { + if (right.size() != 0) { + rightAliasNull = false; + break; + } + } + + if (!leftAliasNull && !rightAliasNull) + throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1.getMsg(joinCond)); + + if (!leftAliasNull) + joinTree.getFilters().get(0).add(joinCond); + else + joinTree.getFilters().get(1).add(joinCond); + break; } } + @SuppressWarnings("nls") + private Operator putOpInsertMap(Operator op, RowResolver rr) { + OpParseContext ctx = new OpParseContext(rr); + opParseCtx.put(op, ctx); + return op; + } @SuppressWarnings("nls") - private OperatorInfo genFilterPlan(String dest, QB qb, - OperatorInfo input) throws SemanticException { + private Operator genFilterPlan(String dest, QB qb, + Operator input) throws SemanticException { CommonTree whereExpr = qb.getParseInfo().getWhrForClause(dest); - OperatorInfo output = (OperatorInfo)input.clone(); - output.setOp( - OperatorFactory.getAndMakeChild( - new filterDesc(genExprNodeDesc((CommonTree)whereExpr.getChild(0), - qb.getParseInfo().getAlias(), - input.getRowResolver())), - new RowSchema(output.getRowResolver().getColumnInfos()), - input.getOp() - ) - ); - LOG.debug("Created Filter Plan for " + qb.getId() + ":" + dest + " row schema: " + output.getRowResolver().toString()); + OpParseContext inputCtx = opParseCtx.get(input); + RowResolver inputRR = inputCtx.getRR(); + Operator output = putOpInsertMap( + OperatorFactory.getAndMakeChild( + new filterDesc(genExprNodeDesc(qb.getMetaData(), (CommonTree)whereExpr.getChild(0), inputRR)), + new RowSchema(inputRR.getColumnInfos()), input), inputRR); + + LOG.debug("Created Filter Plan for " + qb.getId() + ":" + dest + " row schema: " + inputRR.toString()); return output; } + /** + * create a filter plan. The condition and the inputs are specified. + * @param qb current query block + * @param condn The condition to be resolved + * @param input the input operator + */ @SuppressWarnings("nls") - private void genColList(String alias, CommonTree sel, + private Operator genFilterPlan(QB qb, CommonTree condn, Operator input) throws SemanticException { + + OpParseContext inputCtx = opParseCtx.get(input); + RowResolver inputRR = inputCtx.getRR(); + Operator output = putOpInsertMap( + OperatorFactory.getAndMakeChild( + new filterDesc(genExprNodeDesc(qb.getMetaData(), condn, inputRR)), + new RowSchema(inputRR.getColumnInfos()), input), inputRR); + + LOG.debug("Created Filter Plan for " + qb.getId() + " row schema: " + inputRR.toString()); + return output; + } + + @SuppressWarnings("nls") + private void genColList(String tabAlias, String alias, CommonTree sel, ArrayList col_list, RowResolver input, Integer pos, RowResolver output) throws SemanticException { + + // The table alias should exist + if (tabAlias != null && !input.hasTableAlias(tabAlias)) + throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(sel)); + // TODO: Have to put in the support for AS clause // This is the tab.* case @@ -703,42 +890,77 @@ } } - @SuppressWarnings("nls") - private OperatorInfo genScriptPlan(CommonTree trfm, QB qb, - OperatorInfo input) throws SemanticException { + /** + * If the user script command needs any modifications - do it here + */ + private String getFixedCmd(String cmd) { + SessionState ss = SessionState.get(); + if(ss == null) + return cmd; - OperatorInfo output = (OperatorInfo)input.clone(); + // for local mode - replace any references to packaged files by name with + // the reference to the original file path + if(ss.getConf().get("mapred.job.tracker", "local").equals("local")) { + Set files = ss.list_resource(SessionState.ResourceType.FILE, null); + if((files != null) && !files.isEmpty()) { + int end = cmd.indexOf(" "); + String prog = (end == -1) ? cmd : cmd.substring(0, end); + String args = (end == -1) ? "" : cmd.substring(end, cmd.length()); - // Change the rws in this case - CommonTree collist = (CommonTree) trfm.getChild(1); - int ccount = collist.getChildCount(); + for(String oneFile: files) { + Path p = new Path(oneFile); + if(p.getName().equals(prog)) { + cmd = oneFile + args; + break; + } + } + } + } + + return cmd; + } + + + @SuppressWarnings("nls") + private Operator genScriptPlan(CommonTree trfm, QB qb, + Operator input) throws SemanticException { + // If there is no "AS" clause, the output schema will be "key,value" + ArrayList outputColList = new ArrayList(); + boolean defaultOutputColList = (trfm.getChildCount() < 3); + if (defaultOutputColList) { + outputColList.add("key"); + outputColList.add("value"); + } else { + CommonTree collist = (CommonTree) trfm.getChild(2); + int ccount = collist.getChildCount(); + for (int i=0; i < ccount; ++i) { + outputColList.add(unescapeIdentifier(((CommonTree)collist.getChild(i)).getText())); + } + } + RowResolver out_rwsch = new RowResolver(); - StringBuilder sb = new StringBuilder(); - - for (int i = 0; i < ccount; ++i) { + StringBuilder columns = new StringBuilder(); + for (int i = 0; i < outputColList.size(); ++i) { if (i != 0) { - sb.append(","); + columns.append(","); } - sb.append(((CommonTree)collist.getChild(i)).getText()); + columns.append(outputColList.get(i)); out_rwsch.put( qb.getParseInfo().getAlias(), - ((CommonTree)collist.getChild(i)).getText(), - new ColumnInfo(((CommonTree)collist.getChild(i)).getText(), - String.class) // Everything is a string right now + outputColList.get(i), + new ColumnInfo(outputColList.get(i), String.class) // Script output is always a string ); } - output - .setOp(OperatorFactory + Operator output = putOpInsertMap(OperatorFactory .getAndMakeChild( new scriptDesc( - stripQuotes(trfm.getChild(2).getText()), - PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.tabCode), sb.toString()), + getFixedCmd(stripQuotes(trfm.getChild(1).getText())), + PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.tabCode), columns.toString(), defaultOutputColList), PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.tabCode), "")), new RowSchema( - out_rwsch.getColumnInfos()), input.getOp())); + out_rwsch.getColumnInfos()), input), out_rwsch); - output.setRowResolver(out_rwsch); return output; } @@ -776,17 +998,22 @@ private static String getColAlias(CommonTree selExpr, String defaultName) { if (selExpr.getChildCount() == 2) { // return zz for "xx + yy AS zz" - return selExpr.getChild(1).getText(); + return unescapeIdentifier(selExpr.getChild(1).getText()); } CommonTree root = (CommonTree)selExpr.getChild(0); while (root.getType() == HiveParser.DOT || root.getType() == HiveParser.TOK_COLREF) { - assert(root.getChildCount() == 2); - root = (CommonTree) root.getChild(1); + if (root.getType() == HiveParser.TOK_COLREF && root.getChildCount() == 1) { + root = (CommonTree) root.getChild(0); + } + else { + assert(root.getChildCount() == 2); + root = (CommonTree) root.getChild(1); + } } if (root.getType() == HiveParser.Identifier) { // Return zz for "xx.zz" and "xx.yy.zz" - return root.getText(); + return unescapeIdentifier(root.getText()); } else { // Return defaultName if selExpr is not a simple xx.yy.zz return defaultName; @@ -794,8 +1021,8 @@ } @SuppressWarnings("nls") - private OperatorInfo genSelectPlan(String dest, QB qb, - OperatorInfo input) throws SemanticException { + private Operator genSelectPlan(String dest, QB qb, + Operator input) throws SemanticException { CommonTree selExprList = qb.getParseInfo().getSelForClause(dest); @@ -804,7 +1031,9 @@ CommonTree trfm = null; String alias = qb.getParseInfo().getAlias(); Integer pos = Integer.valueOf(0); - + RowResolver inputRR = opParseCtx.get(input).getRR(); + boolean selectStar = false; + // Iterate over the selects for (int i = 0; i < selExprList.getChildCount(); ++i) { @@ -812,10 +1041,13 @@ CommonTree selExpr = (CommonTree) selExprList.getChild(i); String colAlias = getColAlias(selExpr, "_C" + i); CommonTree sel = (CommonTree)selExpr.getChild(0); - + if (sel.getToken().getType() == HiveParser.TOK_ALLCOLREF) { - genColList(qb.getParseInfo().getAlias(), sel, col_list, - input.getRowResolver(), pos, out_rwsch); + String tabAlias = null; + if (sel.getChildCount() == 1) + tabAlias = unescapeIdentifier(sel.getChild(0).getText().toLowerCase()); + genColList(tabAlias, alias, sel, col_list, inputRR, pos, out_rwsch); + selectStar = true; } else if (sel.getToken().getType() == HiveParser.TOK_TRANSFORM) { if (i > 0) { throw new SemanticException(ErrorMsg.INVALID_TRANSFORM.getMsg(sel)); @@ -825,26 +1057,28 @@ for (int j = 0; j < cols.getChildCount(); ++j) { CommonTree expr = (CommonTree) cols.getChild(j); if (expr.getToken().getType() == HiveParser.TOK_ALLCOLREF) { - genColList(alias, expr, - col_list, input.getRowResolver(), - pos, out_rwsch); + String tabAlias = null; + if (sel.getChildCount() == 1) + tabAlias = unescapeIdentifier(sel.getChild(0).getText().toLowerCase()); + + genColList(tabAlias, alias, expr, col_list, inputRR, pos, out_rwsch); + selectStar = true; } else { - exprNodeDesc exp = genExprNodeDesc(expr, alias, input.getRowResolver()); + exprNodeDesc exp = genExprNodeDesc(qb.getMetaData(), expr, inputRR); col_list.add(exp); if (!StringUtils.isEmpty(alias) && (out_rwsch.get(alias, colAlias) != null)) { throw new SemanticException(ErrorMsg.AMBIGOUS_COLUMN.getMsg(expr.getChild(1))); } - out_rwsch.put(alias, expr.getText(), + out_rwsch.put(alias, unescapeIdentifier(expr.getText()), new ColumnInfo((Integer.valueOf(pos)).toString(), - exp.getTypeInfo())); // Everything is a string right now + exp.getTypeInfo())); } } } else { // Case when this is an expression - exprNodeDesc exp = genExprNodeDesc(sel, qb.getParseInfo() - .getAlias(), input.getRowResolver()); + exprNodeDesc exp = genExprNodeDesc(qb.getMetaData(), sel, inputRR); col_list.add(exp); if (!StringUtils.isEmpty(alias) && (out_rwsch.get(alias, colAlias) != null)) { @@ -854,7 +1088,7 @@ // of the expression as the column name out_rwsch.put(alias, colAlias, new ColumnInfo((Integer.valueOf(pos)).toString(), - exp.getTypeInfo())); // Everything is a string right now + exp.getTypeInfo())); } pos = Integer.valueOf(pos.intValue() + 1); } @@ -865,29 +1099,64 @@ } } - OperatorInfo output = (OperatorInfo) input.clone(); - output.setOp(OperatorFactory.getAndMakeChild( - new selectDesc(col_list), new RowSchema(out_rwsch.getColumnInfos()), - input.getOp())); + Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild( + new selectDesc(col_list, (selExprList.getChildCount() == 1) && selectStar), new RowSchema(out_rwsch.getColumnInfos()), + input), out_rwsch); - output.setRowResolver(out_rwsch); - if (trfm != null) { output = genScriptPlan(trfm, qb, output); } - LOG.debug("Created Select Plan for clause: " + dest + " row schema: " - + output.getRowResolver().toString()); + LOG.debug("Created Select Plan for clause: " + dest + " row schema: " + out_rwsch.toString()); return output; } + /** + * Class to store UDAF related information. + */ + static class UDAFInfo { + ArrayList convertedParameters; + Method aggregateMethod; + Method evaluateMethod; + } + + /** + * Returns the UDAFInfo struct for the aggregation + * @param aggName The name of the UDAF. + * @param mode The mode of the aggregation. This affects the evaluate method. + * @param aggClasses The classes of the parameters to the UDAF. + * @param aggParameters The actual exprNodeDesc of the parameters. + * @param aggTree The CommonTree node of the UDAF in the query. + * @return UDAFInfo + * @throws SemanticException when the UDAF is not found or has problems. + */ + UDAFInfo getUDAFInfo(String aggName, groupByDesc.Mode mode, ArrayList> aggClasses, + ArrayList aggParameters, CommonTree aggTree) throws SemanticException { + UDAFInfo r = new UDAFInfo(); + r.aggregateMethod = FunctionRegistry.getUDAFMethod(aggName, aggClasses); + if (null == r.aggregateMethod) { + String reason = "Looking for UDAF \"" + aggName + "\" with parameters " + aggClasses; + throw new SemanticException(ErrorMsg.INVALID_FUNCTION_SIGNATURE.getMsg((CommonTree)aggTree.getChild(0), reason)); + } + + r.convertedParameters = convertParameters(r.aggregateMethod, aggParameters); + + r.evaluateMethod = FunctionRegistry.getUDAFEvaluateMethod(aggName, mode); + if (r.evaluateMethod == null) { + String reason = "UDAF \"" + aggName + "\" does not have evaluate()/evaluatePartial() methods."; + throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg((CommonTree)aggTree.getChild(0), reason)); + } + + return r; + } + @SuppressWarnings("nls") - private OperatorInfo genGroupByPlanGroupByOperator( - QBParseInfo parseInfo, String dest, OperatorInfo reduceSinkOperatorInfo, + private Operator genGroupByPlanGroupByOperator( + QBParseInfo parseInfo, String dest, Operator reduceSinkOperatorInfo, groupByDesc.Mode mode) throws SemanticException { - RowResolver groupByInputRowResolver = reduceSinkOperatorInfo.getRowResolver(); + RowResolver groupByInputRowResolver = opParseCtx.get(reduceSinkOperatorInfo).getRR(); RowResolver groupByOutputRowResolver = new RowResolver(); groupByOutputRowResolver.setIsExprResolver(true); ArrayList groupByKeys = new ArrayList(); @@ -933,41 +1202,38 @@ aggClasses.add(paraExprInfo.getType().getPrimitiveClass()); } - if (null == FunctionRegistry.getUDAFMethod(aggName, aggClasses)) { - String reason = "Looking for UDAF \"" + aggName + "\" with parameters " + aggClasses; - throw new SemanticException(ErrorMsg.INVALID_FUNCTION_SIGNATURE.getMsg((CommonTree)value.getChild(0), reason)); - } + UDAFInfo udaf = getUDAFInfo(aggName, mode, aggClasses, aggParameters, value); - aggregations.add(new aggregationDesc(aggClass, aggParameters, + aggregations.add(new aggregationDesc(aggClass, udaf.convertedParameters, value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)); groupByOutputRowResolver.put("",value.toStringTree(), new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() -1).toString(), - String.class)); // Everything is a string right now + udaf.evaluateMethod.getReturnType())); } - return new OperatorInfo( - OperatorFactory.getAndMakeChild(new groupByDesc(mode, groupByKeys, aggregations), - new RowSchema(groupByOutputRowResolver.getColumnInfos()), - reduceSinkOperatorInfo.getOp()), + return + putOpInsertMap(OperatorFactory.getAndMakeChild(new groupByDesc(mode, groupByKeys, aggregations), + new RowSchema(groupByOutputRowResolver.getColumnInfos()), + reduceSinkOperatorInfo), groupByOutputRowResolver ); } @SuppressWarnings("nls") - private OperatorInfo genGroupByPlanGroupByOpForward( - QBParseInfo parseInfo, String dest, OperatorInfo forwardOpInfo, + private Operator genGroupByPlanGroupByOperator1( + QBParseInfo parseInfo, String dest, Operator reduceSinkOperatorInfo, groupByDesc.Mode mode) throws SemanticException { - RowResolver inputRS = forwardOpInfo.getRowResolver(); - RowResolver outputRS = new RowResolver(); - outputRS.setIsExprResolver(true); + RowResolver groupByInputRowResolver = opParseCtx.get(reduceSinkOperatorInfo).getRR(); + RowResolver groupByOutputRowResolver = new RowResolver(); + groupByOutputRowResolver.setIsExprResolver(true); ArrayList groupByKeys = new ArrayList(); ArrayList aggregations = new ArrayList(); List grpByExprs = getGroupByForClause(parseInfo, dest); - for (int i = 0; i < grpByExprs.size(); i++) { + for (int i = 0; i < grpByExprs.size(); ++i) { CommonTree grpbyExpr = grpByExprs.get(i); String text = grpbyExpr.toStringTree(); - ColumnInfo exprInfo = inputRS.get("",text); + ColumnInfo exprInfo = groupByInputRowResolver.get("",text); if (exprInfo == null) { throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr)); @@ -975,16 +1241,13 @@ groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), exprInfo.getInternalName())); String field = (Integer.valueOf(i)).toString(); - outputRS.put("", text, - new ColumnInfo(field, exprInfo.getType())); + groupByOutputRowResolver.put("",grpbyExpr.toStringTree(), + new ColumnInfo(field, exprInfo.getType())); } - // For each aggregation - HashMap aggregationTrees = parseInfo - .getAggregationExprsForClause(dest); - assert (aggregationTrees != null); - for (Map.Entry entry : aggregationTrees.entrySet()) { - CommonTree value = entry.getValue(); + // If there is a distinctFuncExp, add all parameters to the reduceKeys. + if (parseInfo.getDistinctFuncExprForClause(dest) != null) { + CommonTree value = parseInfo.getDistinctFuncExprForClause(dest); String aggName = value.getChild(0).getText(); Class aggClass = FunctionRegistry.getUDAF(aggName); assert (aggClass != null); @@ -994,7 +1257,7 @@ for (int i = 1; i < value.getChildCount(); i++) { String text = value.getChild(i).toStringTree(); CommonTree paraExpr = (CommonTree)value.getChild(i); - ColumnInfo paraExprInfo = inputRS.get("", text); + ColumnInfo paraExprInfo = groupByInputRowResolver.get("",text); if (paraExprInfo == null) { throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(paraExpr)); } @@ -1005,46 +1268,244 @@ aggClasses.add(paraExprInfo.getType().getPrimitiveClass()); } - if (null == FunctionRegistry.getUDAFMethod(aggName, aggClasses)) { - String reason = "Looking for UDAF \"" + aggName + "\" with parameters " + aggClasses; - throw new SemanticException(ErrorMsg.INVALID_FUNCTION_SIGNATURE.getMsg((CommonTree)value.getChild(0), reason)); - } + UDAFInfo udaf = getUDAFInfo(aggName, mode, aggClasses, aggParameters, value); - aggregations.add(new aggregationDesc(aggClass, aggParameters, - value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)); - outputRS.put("",value.toStringTree(), + aggregations.add(new aggregationDesc(aggClass, udaf.convertedParameters, true)); + groupByOutputRowResolver.put("",value.toStringTree(), new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() -1).toString(), - String.class)); // Everything is a string right now + udaf.evaluateMethod.getReturnType())); } - return new OperatorInfo( + HashMap aggregationTrees = parseInfo + .getAggregationExprsForClause(dest); + for (Map.Entry entry : aggregationTrees.entrySet()) { + CommonTree value = entry.getValue(); + if (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI) + continue; + + String aggName = value.getChild(0).getText(); + Class aggClass = FunctionRegistry.getUDAF(aggName); + assert (aggClass != null); + ArrayList aggParameters = new ArrayList(); + String text = entry.getKey(); + ColumnInfo paraExprInfo = groupByInputRowResolver.get("",text); + if (paraExprInfo == null) { + throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(value)); + } + String paraExpression = paraExprInfo.getInternalName(); + assert(paraExpression != null); + aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression)); + aggregations.add(new aggregationDesc(aggClass, aggParameters, ((mode == groupByDesc.Mode.FINAL) ? false : (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)))); + groupByOutputRowResolver.put("", value.toStringTree(), + new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() - 1).toString(), + paraExprInfo.getType())); + } + + return putOpInsertMap( OperatorFactory.getAndMakeChild(new groupByDesc(mode, groupByKeys, aggregations), - new RowSchema(outputRS.getColumnInfos()), - forwardOpInfo.getOp()), - outputRS - ); + new RowSchema(groupByOutputRowResolver.getColumnInfos()), + reduceSinkOperatorInfo), + groupByOutputRowResolver); } @SuppressWarnings("nls") - private OperatorInfo genGroupByPlanReduceSinkOperator(QBParseInfo parseInfo, - String dest, OperatorInfo inputOperatorInfo, int numPartitionFields) + private Operator genGroupByPlanMapGroupByOperator(QB qb, String dest, Operator inputOperatorInfo, + groupByDesc.Mode mode) throws SemanticException { + + RowResolver groupByInputRowResolver = opParseCtx.get(inputOperatorInfo).getRR(); + QBParseInfo parseInfo = qb.getParseInfo(); + RowResolver groupByOutputRowResolver = new RowResolver(); + groupByOutputRowResolver.setIsExprResolver(true); + ArrayList groupByKeys = new ArrayList(); + ArrayList aggregations = new ArrayList(); + List grpByExprs = getGroupByForClause(parseInfo, dest); + for (int i = 0; i < grpByExprs.size(); ++i) { + CommonTree grpbyExpr = grpByExprs.get(i); + exprNodeDesc grpByExprNode = genExprNodeDesc(qb.getMetaData(), grpbyExpr, groupByInputRowResolver); + + groupByKeys.add(grpByExprNode); + String field = (Integer.valueOf(i)).toString(); + groupByOutputRowResolver.put("",grpbyExpr.toStringTree(), + new ColumnInfo(field, grpByExprNode.getTypeInfo())); + } + + // If there is a distinctFuncExp, add all parameters to the reduceKeys. + if (parseInfo.getDistinctFuncExprForClause(dest) != null) { + CommonTree value = parseInfo.getDistinctFuncExprForClause(dest); + int numDistn=0; + // 0 is function name + for (int i = 1; i < value.getChildCount(); i++) { + CommonTree parameter = (CommonTree) value.getChild(i); + String text = parameter.toStringTree(); + if (groupByOutputRowResolver.get("",text) == null) { + exprNodeDesc distExprNode = genExprNodeDesc(qb.getMetaData(), parameter, groupByInputRowResolver); + groupByKeys.add(distExprNode); + numDistn++; + String field = (Integer.valueOf(grpByExprs.size() + numDistn -1)).toString(); + groupByOutputRowResolver.put("", text, new ColumnInfo(field, distExprNode.getTypeInfo())); + } + } + } + + // For each aggregation + HashMap aggregationTrees = parseInfo + .getAggregationExprsForClause(dest); + assert (aggregationTrees != null); + + for (Map.Entry entry : aggregationTrees.entrySet()) { + CommonTree value = entry.getValue(); + String aggName = value.getChild(0).getText(); + Class aggClass = FunctionRegistry.getUDAF(aggName); + assert (aggClass != null); + ArrayList aggParameters = new ArrayList(); + ArrayList> aggClasses = new ArrayList>(); + // 0 is the function name + for (int i = 1; i < value.getChildCount(); i++) { + CommonTree paraExpr = (CommonTree)value.getChild(i); + exprNodeDesc paraExprNode = genExprNodeDesc(qb.getMetaData(), paraExpr, groupByInputRowResolver); + + aggParameters.add(paraExprNode); + aggClasses.add(paraExprNode.getTypeInfo().getPrimitiveClass()); + } + + UDAFInfo udaf = getUDAFInfo(aggName, mode, aggClasses, aggParameters, value); + + aggregations.add(new aggregationDesc(aggClass, udaf.convertedParameters, + value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)); + groupByOutputRowResolver.put("",value.toStringTree(), + new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() -1).toString(), + udaf.evaluateMethod.getReturnType())); + } + + return putOpInsertMap( + OperatorFactory.getAndMakeChild(new groupByDesc(mode, groupByKeys, aggregations), + new RowSchema(groupByOutputRowResolver.getColumnInfos()), + inputOperatorInfo), + groupByOutputRowResolver); + } + + private ArrayList convertParameters(Method m, ArrayList aggParameters) { + + ArrayList newParameters = new ArrayList(); + Class[] pTypes = m.getParameterTypes(); + + // 0 is the function name + for (int i = 0; i < aggParameters.size(); i++) { + exprNodeDesc desc = aggParameters.get(i); + Class pType = ObjectInspectorUtils.generalizePrimitive(pTypes[i]); + if (desc instanceof exprNodeNullDesc) { + exprNodeConstantDesc newCh = new exprNodeConstantDesc(TypeInfoFactory.getPrimitiveTypeInfo(pType), null); + newParameters.add(newCh); + } else if (pType.isAssignableFrom(desc.getTypeInfo().getPrimitiveClass())) { + // no type conversion needed + newParameters.add(desc); + } else { + // must be implicit type conversion + Class from = desc.getTypeInfo().getPrimitiveClass(); + Class to = pType; + assert(FunctionRegistry.implicitConvertable(from, to)); + Method conv = FunctionRegistry.getUDFMethod(to.getName(), true, from); + assert(conv != null); + Class c = FunctionRegistry.getUDFClass(to.getName()); + assert(c != null); + + // get the conversion method + ArrayList conversionArg = new ArrayList(1); + conversionArg.add(desc); + newParameters.add(new exprNodeFuncDesc(TypeInfoFactory.getPrimitiveTypeInfo(pType), + c, conv, conversionArg)); + } + } + + return newParameters; + } + + @SuppressWarnings("nls") + private Operator genGroupByPlanReduceSinkOperator(QBParseInfo parseInfo, + String dest, Operator inputOperatorInfo) throws SemanticException { - RowResolver reduceSinkInputRowResolver = inputOperatorInfo.getRowResolver(); + RowResolver reduceSinkInputRowResolver = opParseCtx.get(inputOperatorInfo).getRR(); RowResolver reduceSinkOutputRowResolver = new RowResolver(); reduceSinkOutputRowResolver.setIsExprResolver(true); ArrayList reduceKeys = new ArrayList(); + // Pre-compute group-by keys and store in reduceKeys + List grpByExprs = getGroupByForClause(parseInfo, dest); + for (int i = 0; i < grpByExprs.size(); ++i) { + CommonTree grpbyExpr = grpByExprs.get(i); + String text = grpbyExpr.toStringTree(); + if (reduceSinkOutputRowResolver.get("", text) == null) { + ColumnInfo exprInfo = reduceSinkInputRowResolver.get("", text); + reduceKeys.add(new exprNodeColumnDesc(exprInfo.getType(), exprInfo.getInternalName())); + reduceSinkOutputRowResolver.put("", text, + new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + Integer.valueOf(reduceKeys.size() - 1).toString(), + exprInfo.getType())); + } + } + + // If there is a distinctFuncExp, add all parameters to the reduceKeys. + if (parseInfo.getDistinctFuncExprForClause(dest) != null) { + CommonTree value = parseInfo.getDistinctFuncExprForClause(dest); + // 0 is function name + for (int i = 1; i < value.getChildCount(); i++) { + CommonTree parameter = (CommonTree) value.getChild(i); + String text = parameter.toStringTree(); + if (reduceSinkOutputRowResolver.get("",text) == null) { + ColumnInfo exprInfo = reduceSinkInputRowResolver.get("", text); + reduceKeys.add(new exprNodeColumnDesc(exprInfo.getType(), exprInfo.getInternalName())); + reduceSinkOutputRowResolver.put("", text, + new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + Integer.valueOf(reduceKeys.size() - 1).toString(), + exprInfo.getType())); + } + } + } + + // Put partial aggregation results in reduceValues + ArrayList reduceValues = new ArrayList(); + HashMap aggregationTrees = parseInfo + .getAggregationExprsForClause(dest); + int inputField = reduceKeys.size(); + + for (Map.Entry entry : aggregationTrees.entrySet()) { + + TypeInfo type = reduceSinkInputRowResolver.getColumnInfos().get(inputField).getType(); + reduceValues.add(new exprNodeColumnDesc( + type, (Integer.valueOf(inputField)).toString())); + inputField++; + reduceSinkOutputRowResolver.put("", ((CommonTree)entry.getValue()).toStringTree(), + new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + (Integer.valueOf(reduceValues.size()-1)).toString(), + type)); + } + + return putOpInsertMap( + OperatorFactory.getAndMakeChild( + PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, -1, + (parseInfo.getDistinctFuncExprForClause(dest) == null ? -1 : Integer.MAX_VALUE), -1, false), + new RowSchema(reduceSinkOutputRowResolver.getColumnInfos()), + inputOperatorInfo), + reduceSinkOutputRowResolver); + } + + @SuppressWarnings("nls") + private Operator genGroupByPlanReduceSinkOperator(QB qb, + String dest, Operator inputOperatorInfo, int numPartitionFields) throws SemanticException { + RowResolver reduceSinkInputRowResolver = opParseCtx.get(inputOperatorInfo).getRR(); + QBParseInfo parseInfo = qb.getParseInfo(); + RowResolver reduceSinkOutputRowResolver = new RowResolver(); + reduceSinkOutputRowResolver.setIsExprResolver(true); + ArrayList reduceKeys = new ArrayList(); + // Pre-compute group-by keys and store in reduceKeys + List grpByExprs = getGroupByForClause(parseInfo, dest); for (int i = 0; i < grpByExprs.size(); ++i) { CommonTree grpbyExpr = grpByExprs.get(i); - reduceKeys.add(genExprNodeDesc(grpbyExpr, parseInfo.getAlias(), - reduceSinkInputRowResolver)); + reduceKeys.add(genExprNodeDesc(qb.getMetaData(), grpbyExpr, reduceSinkInputRowResolver)); String text = grpbyExpr.toStringTree(); if (reduceSinkOutputRowResolver.get("", text) == null) { reduceSinkOutputRowResolver.put("", text, new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + Integer.valueOf(reduceKeys.size() - 1).toString(), - String.class)); // Everything is a string right now + reduceKeys.get(reduceKeys.size()-1).getTypeInfo())); } else { throw new SemanticException(ErrorMsg.DUPLICATE_GROUPBY_KEY.getMsg(grpbyExpr)); } @@ -1058,10 +1519,10 @@ CommonTree parameter = (CommonTree) value.getChild(i); String text = parameter.toStringTree(); if (reduceSinkOutputRowResolver.get("",text) == null) { - reduceKeys.add(genExprNodeDesc(parameter, parseInfo.getAlias(), reduceSinkInputRowResolver)); + reduceKeys.add(genExprNodeDesc(qb.getMetaData(), parameter, reduceSinkInputRowResolver)); reduceSinkOutputRowResolver.put("", text, new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + Integer.valueOf(reduceKeys.size() - 1).toString(), - String.class)); // Everything is a string right now + reduceKeys.get(reduceKeys.size()-1).getTypeInfo())); } } } @@ -1077,130 +1538,28 @@ CommonTree parameter = (CommonTree) value.getChild(i); String text = parameter.toStringTree(); if (reduceSinkOutputRowResolver.get("",text) == null) { - reduceValues.add(genExprNodeDesc(parameter, parseInfo.getAlias(), reduceSinkInputRowResolver)); + reduceValues.add(genExprNodeDesc(qb.getMetaData(), parameter, reduceSinkInputRowResolver)); reduceSinkOutputRowResolver.put("", text, new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + Integer.valueOf(reduceValues.size() - 1).toString(), - String.class)); // Everything is a string right now + reduceValues.get(reduceValues.size()-1).getTypeInfo())); } } } - return new OperatorInfo( + return putOpInsertMap( OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, -1, numPartitionFields, -1, false), new RowSchema(reduceSinkOutputRowResolver.getColumnInfos()), - inputOperatorInfo.getOp()), + inputOperatorInfo), reduceSinkOutputRowResolver ); } @SuppressWarnings("nls") - private OperatorInfo genGroupByPlanReduceSinkOperator(QBParseInfo parseInfo, - OperatorInfo input, CommonTree distinctText, TreeSet ks) - throws SemanticException { - RowResolver inputRS = input.getRowResolver(); - RowResolver outputRS = new RowResolver(); - outputRS.setIsExprResolver(true); - ArrayList reduceKeys = new ArrayList(); - - // Spray on distinctText first - if (distinctText != null) - { - reduceKeys.add(genExprNodeDesc(distinctText, parseInfo.getAlias(), inputRS)); - String text = distinctText.toStringTree(); - assert (outputRS.get("", text) == null); - outputRS.put("", text, - new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + Integer.valueOf(reduceKeys.size() - 1).toString(), - String.class)); - } - else { - // dummy key - reduceKeys.add(new exprNodeConstantDesc(0)); - } - - // copy the input row resolver - ArrayList reduceValues = new ArrayList(); - Iterator keysIter = inputRS.getTableNames().iterator(); - while (keysIter.hasNext()) - { - String key = keysIter.next(); - HashMap map = inputRS.getFieldMap(key); - Iterator fNamesIter = map.keySet().iterator(); - while (fNamesIter.hasNext()) - { - String field = fNamesIter.next(); - ColumnInfo valueInfo = inputRS.get(key, field); - - if (outputRS.get(key, field) == null) - { - reduceValues.add(new exprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName())); - outputRS.put(key, field, new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + Integer.valueOf(reduceValues.size() - 1).toString(), - valueInfo.getType())); - } - } - } - - for (String dest : ks) { - List grpByExprs = getGroupByForClause(parseInfo, dest); - - // send all the group by expressions - for (int i = 0; i < grpByExprs.size(); ++i) { - CommonTree grpbyExpr = grpByExprs.get(i); - String text = grpbyExpr.toStringTree(); - if (outputRS.get("", text) == null) { - exprNodeDesc grpbyExprNode = genExprNodeDesc(grpbyExpr, parseInfo.getAlias(), inputRS); - reduceValues.add(grpbyExprNode); - outputRS.put("", text, - new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + Integer.valueOf(reduceValues.size() - 1).toString(), - grpbyExprNode.getTypeInfo())); - } - } - - // send all the aggregation expressions - HashMap aggregationTrees = parseInfo.getAggregationExprsForClause(dest); - for (Map.Entry entry : aggregationTrees.entrySet()) { - CommonTree value = entry.getValue(); - // 0 is function name - for (int i = 1; i < value.getChildCount(); i++) { - CommonTree parameter = (CommonTree) value.getChild(i); - String text = parameter.toStringTree(); - if (outputRS.get("",text) == null) { - exprNodeDesc pNode = genExprNodeDesc(parameter, parseInfo.getAlias(), inputRS); - reduceValues.add(pNode); - outputRS.put("", text, - new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + Integer.valueOf(reduceValues.size() - 1).toString(), - pNode.getTypeInfo())); - } - } - } - } - - return new OperatorInfo( - OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, - -1, distinctText == null ? -1 : 1, -1, false), - new RowSchema(outputRS.getColumnInfos()), input.getOp()), - outputRS); - } - - @SuppressWarnings("nls") - private OperatorInfo genGroupByPlanForwardOperator(QBParseInfo parseInfo, OperatorInfo input) - throws SemanticException { - RowResolver outputRS = input.getRowResolver();; - - Operator forward = OperatorFactory.get(forwardDesc.class, - new RowSchema(outputRS.getColumnInfos())); - // set forward operator as child of each of input - List> child = new ArrayList>(); - child.add(forward); - input.getOp().setChildOperators(child); - - return new OperatorInfo(forward, outputRS); - } - - @SuppressWarnings("nls") - private OperatorInfo genGroupByPlanReduceSinkOperator2MR( - QBParseInfo parseInfo, String dest, OperatorInfo groupByOperatorInfo, - int numPartitionFields) { + private Operator genGroupByPlanReduceSinkOperator2MR( + QBParseInfo parseInfo, String dest, Operator groupByOperatorInfo, int numPartitionFields) + throws SemanticException { + RowResolver reduceSinkInputRowResolver2 = opParseCtx.get(groupByOperatorInfo).getRR(); RowResolver reduceSinkOutputRowResolver2 = new RowResolver(); reduceSinkOutputRowResolver2.setIsExprResolver(true); ArrayList reduceKeys = new ArrayList(); @@ -1209,10 +1568,11 @@ for (int i = 0; i < grpByExprs.size(); ++i) { CommonTree grpbyExpr = grpByExprs.get(i); String field = (Integer.valueOf(i)).toString(); - reduceKeys.add(new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), field)); + TypeInfo typeInfo = reduceSinkInputRowResolver2.get("", grpbyExpr.toStringTree()).getType(); + reduceKeys.add(new exprNodeColumnDesc(typeInfo, field)); reduceSinkOutputRowResolver2.put("", grpbyExpr.toStringTree(), new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + field, - String.class)); // Everything is a string right now + typeInfo)); } // Get partial aggregation results and store in reduceValues ArrayList reduceValues = new ArrayList(); @@ -1220,28 +1580,30 @@ HashMap aggregationTrees = parseInfo .getAggregationExprsForClause(dest); for (Map.Entry entry : aggregationTrees.entrySet()) { - reduceValues.add(new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), - (Integer.valueOf(inputField)).toString())); + String field = (Integer.valueOf(inputField)).toString(); + CommonTree t = entry.getValue(); + TypeInfo typeInfo = reduceSinkInputRowResolver2.get("", t.toStringTree()).getType(); + reduceValues.add(new exprNodeColumnDesc(typeInfo, field)); inputField++; - reduceSinkOutputRowResolver2.put("", ((CommonTree)entry.getValue()).toStringTree(), + reduceSinkOutputRowResolver2.put("", t.toStringTree(), new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + (Integer.valueOf(reduceValues.size()-1)).toString(), - String.class)); // Everything is a string right now + typeInfo)); } - return new OperatorInfo( + return putOpInsertMap( OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, -1, numPartitionFields, -1, true), new RowSchema(reduceSinkOutputRowResolver2.getColumnInfos()), - groupByOperatorInfo.getOp()), + groupByOperatorInfo), reduceSinkOutputRowResolver2 ); } @SuppressWarnings("nls") - private OperatorInfo genGroupByPlanGroupByOperator2MR( - QBParseInfo parseInfo, String dest, OperatorInfo reduceSinkOperatorInfo2) + private Operator genGroupByPlanGroupByOperator2MR( + QBParseInfo parseInfo, String dest, Operator reduceSinkOperatorInfo2, groupByDesc.Mode mode) throws SemanticException { - RowResolver groupByInputRowResolver2 = reduceSinkOperatorInfo2.getRowResolver(); + RowResolver groupByInputRowResolver2 = opParseCtx.get(reduceSinkOperatorInfo2).getRR(); RowResolver groupByOutputRowResolver2 = new RowResolver(); groupByOutputRowResolver2.setIsExprResolver(true); ArrayList groupByKeys = new ArrayList(); @@ -1274,20 +1636,19 @@ if (paraExprInfo == null) { throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(value)); } - String paraExpression = paraExprInfo.getInternalName(); assert(paraExpression != null); aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression)); - aggregations.add(new aggregationDesc(aggClass, aggParameters, false)); + aggregations.add(new aggregationDesc(aggClass, aggParameters, ((mode == groupByDesc.Mode.FINAL) ? false : (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)))); groupByOutputRowResolver2.put("", value.toStringTree(), new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() - 1).toString(), - paraExprInfo.getType())); // Everything is a string right now + paraExprInfo.getType())); } - return new OperatorInfo( - OperatorFactory.getAndMakeChild(new groupByDesc(groupByDesc.Mode.PARTIAL2, groupByKeys, aggregations), - new RowSchema(groupByOutputRowResolver2.getColumnInfos()), - reduceSinkOperatorInfo2.getOp()), + return putOpInsertMap( + OperatorFactory.getAndMakeChild(new groupByDesc(mode, groupByKeys, aggregations), + new RowSchema(groupByOutputRowResolver2.getColumnInfos()), + reduceSinkOperatorInfo2), groupByOutputRowResolver2 ); } @@ -1308,20 +1669,18 @@ * @throws SemanticException */ @SuppressWarnings({ "unused", "nls" }) - private OperatorInfo genGroupByPlan1MR(String dest, QB qb, - OperatorInfo input) throws SemanticException { + private Operator genGroupByPlan1MR(String dest, QB qb, + Operator input) throws SemanticException { - OperatorInfo inputOperatorInfo = input; QBParseInfo parseInfo = qb.getParseInfo(); // ////// 1. Generate ReduceSinkOperator - OperatorInfo reduceSinkOperatorInfo = genGroupByPlanReduceSinkOperator( - parseInfo, dest, inputOperatorInfo, - getGroupByForClause(parseInfo, dest).size()); + Operator reduceSinkOperatorInfo = genGroupByPlanReduceSinkOperator( + qb, dest, input, getGroupByForClause(parseInfo, dest).size()); // ////// 2. Generate GroupbyOperator - OperatorInfo groupByOperatorInfo = genGroupByPlanGroupByOperator(parseInfo, + Operator groupByOperatorInfo = genGroupByPlanGroupByOperator(parseInfo, dest, reduceSinkOperatorInfo, groupByDesc.Mode.COMPLETE); return groupByOperatorInfo; @@ -1345,10 +1704,9 @@ * @throws SemanticException */ @SuppressWarnings("nls") - private OperatorInfo genGroupByPlan2MR(String dest, QB qb, - OperatorInfo input) throws SemanticException { + private Operator genGroupByPlan2MR(String dest, QB qb, + Operator input) throws SemanticException { - OperatorInfo inputOperatorInfo = input; QBParseInfo parseInfo = qb.getParseInfo(); // ////// 1. Generate ReduceSinkOperator @@ -1356,59 +1714,78 @@ // reducers for load balancing problem. That happens when there is no DISTINCT // operator. We set the numPartitionColumns to -1 for this purpose. This is // captured by WritableComparableHiveObject.hashCode() function. - OperatorInfo reduceSinkOperatorInfo = genGroupByPlanReduceSinkOperator( - parseInfo, dest, inputOperatorInfo, (parseInfo - .getDistinctFuncExprForClause(dest) == null ? -1 + Operator reduceSinkOperatorInfo = genGroupByPlanReduceSinkOperator( + qb, dest, input, (parseInfo.getDistinctFuncExprForClause(dest) == null ? -1 : Integer.MAX_VALUE)); // ////// 2. Generate GroupbyOperator - OperatorInfo groupByOperatorInfo = genGroupByPlanGroupByOperator(parseInfo, + Operator groupByOperatorInfo = genGroupByPlanGroupByOperator(parseInfo, dest, reduceSinkOperatorInfo, groupByDesc.Mode.PARTIAL1); // ////// 3. Generate ReduceSinkOperator2 - OperatorInfo reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR( + Operator reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR( parseInfo, dest, groupByOperatorInfo, getGroupByForClause(parseInfo, dest).size()); // ////// 4. Generate GroupbyOperator2 - OperatorInfo groupByOperatorInfo2 = genGroupByPlanGroupByOperator2MR( - parseInfo, dest, reduceSinkOperatorInfo2); + Operator groupByOperatorInfo2 = + genGroupByPlanGroupByOperator2MR(parseInfo, dest, reduceSinkOperatorInfo2, groupByDesc.Mode.FINAL); return groupByOperatorInfo2; } + private boolean optimizeMapAggrGroupBy(String dest, QB qb) { + List grpByExprs = getGroupByForClause(qb.getParseInfo(), dest); + if ((grpByExprs != null) && !grpByExprs.isEmpty()) + return false; + + if (qb.getParseInfo().getDistinctFuncExprForClause(dest) != null) + return false; + + return true; + } + /** - * Generate a Group-By plan using a 2 map-reduce jobs. The first map-reduce - * job has already been constructed. Evaluate partial aggregates first, - * followed by actual aggregates. The first map-reduce stage will be - * shared by all groupbys. + * Generate a Group-By plan using a 2 map-reduce jobs. First perform a map + * side partial aggregation (to reduce the amount of data). Then spray by + * the distinct key (or a random number) in hope of getting a uniform + * distribution, and compute partial aggregates grouped by that distinct key. + * Evaluate partial aggregates first, followed by actual aggregates. */ @SuppressWarnings("nls") - private OperatorInfo genGroupByPlan3MR(String dest, QB qb, - OperatorInfo input) throws SemanticException { + private Operator genGroupByPlan4MR(String dest, QB qb, + Operator inputOperatorInfo) throws SemanticException { - OperatorInfo inputOperatorInfo = input; QBParseInfo parseInfo = qb.getParseInfo(); - // ////// Generate GroupbyOperator - OperatorInfo groupByOperatorInfo = genGroupByPlanGroupByOpForward(parseInfo, - dest, inputOperatorInfo, groupByDesc.Mode.PARTIAL1); + // ////// Generate GroupbyOperator for a map-side partial aggregation + Operator groupByOperatorInfo = genGroupByPlanMapGroupByOperator(qb, + dest, inputOperatorInfo, groupByDesc.Mode.HASH); - // ////// Generate ReduceSinkOperator2 - OperatorInfo reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR( - parseInfo, dest, groupByOperatorInfo, - getGroupByForClause(parseInfo, dest).size()); + // ////// Generate ReduceSink Operator + Operator reduceSinkOperatorInfo = + genGroupByPlanReduceSinkOperator(parseInfo, dest, groupByOperatorInfo); - // ////// Generate GroupbyOperator2 - OperatorInfo groupByOperatorInfo2 = genGroupByPlanGroupByOperator2MR( - parseInfo, dest, reduceSinkOperatorInfo2); + // Optimize the scenario when there are no grouping keys and no distinct - 2 map-reduce jobs are not needed + if (!optimizeMapAggrGroupBy(dest, qb)) { + // ////// Generate GroupbyOperator for a partial aggregation + Operator groupByOperatorInfo2 = genGroupByPlanGroupByOperator1(parseInfo, dest, reduceSinkOperatorInfo, + groupByDesc.Mode.PARTIAL2); + + // ////// Generate ReduceSinkOperator2 + Operator reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR(parseInfo, dest, groupByOperatorInfo2, + getGroupByForClause(parseInfo, dest).size()); - return groupByOperatorInfo2; + // ////// Generate GroupbyOperator3 + return genGroupByPlanGroupByOperator2MR(parseInfo, dest, reduceSinkOperatorInfo2, groupByDesc.Mode.FINAL); + } + else + return genGroupByPlanGroupByOperator2MR(parseInfo, dest, reduceSinkOperatorInfo, groupByDesc.Mode.FINAL); } @SuppressWarnings("nls") - private OperatorInfo genConversionOps(String dest, QB qb, - OperatorInfo input) throws SemanticException { + private Operator genConversionOps(String dest, QB qb, + Operator input) throws SemanticException { Integer dest_type = qb.getMetaData().getDestTypeForAlias(dest); Table dest_tab = null; @@ -1433,9 +1810,10 @@ } @SuppressWarnings("nls") - private OperatorInfo genFileSinkPlan(String dest, QB qb, - OperatorInfo input) throws SemanticException { + private Operator genFileSinkPlan(String dest, QB qb, + Operator input) throws SemanticException { + RowResolver inputRR = opParseCtx.get(input).getRR(); // Generate the destination file String queryTmpdir = this.scratchDir + File.separator + this.randomid + '.' + this.pathid + '.' + dest ; this.pathid ++; @@ -1471,10 +1849,8 @@ } case QBMetaData.DEST_LOCAL_FILE: case QBMetaData.DEST_DFS_FILE: { - table_desc = Utilities.defaultTd; dest_path = qb.getMetaData().getDestFileForAlias(dest); String cols = new String(); - RowResolver inputRR = input.getRowResolver(); Vector colInfos = inputRR.getColumnInfos(); boolean first = true; @@ -1492,54 +1868,129 @@ this.loadFileWork.add(new loadFileDesc(queryTmpdir, dest_path, (dest_type.intValue() == QBMetaData.DEST_DFS_FILE), cols)); + table_desc = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode), + cols); break; } default: throw new SemanticException("Unknown destination type: " + dest_type); } - OperatorInfo output = (OperatorInfo)input.clone(); - output.setOp( + input = genConversionSelectOperator(dest, qb, input, table_desc); + + Operator output = putOpInsertMap( OperatorFactory.getAndMakeChild( new fileSinkDesc(queryTmpdir, table_desc), - new RowSchema(output.getRowResolver().getColumnInfos()), input.getOp() - ) - ); + new RowSchema(inputRR.getColumnInfos()), input), inputRR); LOG.debug("Created FileSink Plan for clause: " + dest + "dest_path: " + dest_path + " row schema: " - + output.getRowResolver().toString()); + + inputRR.toString()); return output; } + /** + * Generate the conversion SelectOperator that converts the columns into + * the types that are expected by the table_desc. + */ + Operator genConversionSelectOperator(String dest, QB qb, + Operator input, tableDesc table_desc) throws SemanticException { + StructObjectInspector oi = null; + try { + Deserializer deserializer = table_desc.getDeserializerClass().newInstance(); + deserializer.initialize(null, table_desc.getProperties()); + oi = (StructObjectInspector) deserializer.getObjectInspector(); + } catch (Exception e) { + throw new SemanticException(e); + } + + // Check column number + List tableFields = oi.getAllStructFieldRefs(); + Vector rowFields = opParseCtx.get(input).getRR().getColumnInfos(); + if (tableFields.size() != rowFields.size()) { + String reason = "Table " + dest + " has " + tableFields.size() + " columns but query has " + + rowFields + "."; + throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH.getMsg( + qb.getParseInfo().getDestForClause(dest), reason)); + } + + // Check column types + boolean converted = false; + int columnNumber = tableFields.size(); + ArrayList expressions = new ArrayList(columnNumber); + // MetadataTypedColumnsetSerDe does not need type conversions because it does + // the conversion to String by itself. + if (! table_desc.getDeserializerClass().equals(MetadataTypedColumnsetSerDe.class)) { + for (int i=0; i keyCols = new ArrayList(); - + RowResolver inputRR = opParseCtx.get(input).getRR(); + CommonTree clby = qb.getParseInfo().getClusterByForClause(dest); if (clby != null) { int ccount = clby.getChildCount(); for(int i=0; i[] rightOps = new Operator[right.length]; @@ -1625,13 +2070,13 @@ HashMap> exprMap = new HashMap>(); - for (OperatorInfo input : right) + for (Operator input : right) { ArrayList keyDesc = new ArrayList(); if (input == null) input = left; - Byte tag = Byte.valueOf((byte)(((reduceSinkDesc)(input.getOp().getConf())).getTag())); - RowResolver inputRS = input.getRowResolver(); + Byte tag = Byte.valueOf((byte)(((reduceSinkDesc)(input.getConf())).getTag())); + RowResolver inputRS = opParseCtx.get(input).getRR(); Iterator keysIter = inputRS.getTableNames().iterator(); while (keysIter.hasNext()) { @@ -1650,7 +2095,7 @@ } exprMap.put(tag, keyDesc); - rightOps[pos++] = input.getOp(); + rightOps[pos++] = input; } org.apache.hadoop.hive.ql.plan.joinCond[] joinCondns = new org.apache.hadoop.hive.ql.plan.joinCond[join.getJoinCond().length]; @@ -1659,14 +2104,15 @@ joinCondns[i] = new org.apache.hadoop.hive.ql.plan.joinCond(condn); } - return new OperatorInfo(OperatorFactory.getAndMakeChild(new joinDesc(exprMap, joinCondns), - new RowSchema(outputRS.getColumnInfos()), rightOps), outputRS); + return putOpInsertMap( + OperatorFactory.getAndMakeChild(new joinDesc(exprMap, joinCondns), + new RowSchema(outputRS.getColumnInfos()), rightOps), outputRS); } @SuppressWarnings("nls") - private OperatorInfo genJoinReduceSinkChild(QB qb, QBJoinTree joinTree, - OperatorInfo child, String srcName, int pos) throws SemanticException { - RowResolver inputRS = child.getRowResolver(); + private Operator genJoinReduceSinkChild(QB qb, QBJoinTree joinTree, + Operator child, String srcName, int pos) throws SemanticException { + RowResolver inputRS = opParseCtx.get(child).getRR(); RowResolver outputRS = new RowResolver(); ArrayList reduceKeys = new ArrayList(); @@ -1674,7 +2120,7 @@ Vector exprs = joinTree.getExpressions().get(pos); for (int i = 0; i < exprs.size(); i++) { CommonTree expr = exprs.get(i); - reduceKeys.add(genExprNodeDesc(expr, srcName, inputRS)); + reduceKeys.add(genExprNodeDesc(qb.getMetaData(), expr, inputRS)); } // Walk over the input row resolver and copy in the output @@ -1696,28 +2142,32 @@ } } - return new OperatorInfo( + return putOpInsertMap( OperatorFactory.getAndMakeChild( PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, joinTree.getNextTag(), reduceKeys.size(), -1, false), new RowSchema(outputRS.getColumnInfos()), - child.getOp()), outputRS); + child), outputRS); } - private OperatorInfo genJoinOperator(QB qb, QBJoinTree joinTree, - HashMap map) throws SemanticException { + private Operator genJoinOperator(QB qb, QBJoinTree joinTree, + HashMap map) throws SemanticException { QBJoinTree leftChild = joinTree.getJoinSrc(); - OperatorInfo joinSrcOp = null; + Operator joinSrcOp = null; if (leftChild != null) { - OperatorInfo joinOp = genJoinOperator(qb, leftChild, map); + Operator joinOp = genJoinOperator(qb, leftChild, map); + Vector filter = joinTree.getFilters().get(0); + for (CommonTree cond: filter) + joinOp = genFilterPlan(qb, cond, joinOp); + joinSrcOp = genJoinReduceSinkChild(qb, joinTree, joinOp, null, 0); } - OperatorInfo[] srcOps = new OperatorInfo[joinTree.getBaseSrc().length]; + Operator[] srcOps = new Operator[joinTree.getBaseSrc().length]; int pos = 0; for (String src : joinTree.getBaseSrc()) { if (src != null) { - OperatorInfo srcOp = map.get(src); + Operator srcOp = map.get(src); srcOps[pos] = genJoinReduceSinkChild(qb, joinTree, srcOp, src, pos); pos++; } else { @@ -1732,13 +2182,13 @@ return genJoinOperatorChildren(joinTree, joinSrcOp, srcOps); } - private void genJoinOperatorTypeCheck(OperatorInfo left, OperatorInfo[] right) throws SemanticException { + private void genJoinOperatorTypeCheck(Operator left, Operator[] right) throws SemanticException { // keys[i] -> ArrayList for the i-th join operator key list ArrayList> keys = new ArrayList>(); int keyLength = 0; for (int i=0; i map) + private Operator genJoinPlan(QB qb, HashMap map) throws SemanticException { QBJoinTree joinTree = qb.getQbJoinTree(); - OperatorInfo joinOp = genJoinOperator(qb, joinTree, map); + Operator joinOp = genJoinOperator(qb, joinTree, map); return joinOp; } + /** + * Extract the filters from the join condition and push them on top of the source operators. This procedure + * traverses the query tree recursively, + */ + private void pushJoinFilters(QB qb, QBJoinTree joinTree, HashMap map) throws SemanticException { + Vector> filters = joinTree.getFilters(); + if (joinTree.getJoinSrc() != null) + pushJoinFilters(qb, joinTree.getJoinSrc(), map); + + int pos = 0; + for (String src : joinTree.getBaseSrc()) { + if (src != null) { + Operator srcOp = map.get(src); + Vector filter = filters.get(pos); + for (CommonTree cond: filter) + srcOp = genFilterPlan(qb, cond, srcOp); + map.put(src, srcOp); + } + pos++; + } + } + private QBJoinTree genJoinTree(CommonTree joinParseTree) throws SemanticException { QBJoinTree joinTree = new QBJoinTree(); @@ -1807,9 +2287,9 @@ if ((left.getToken().getType() == HiveParser.TOK_TABREF) || (left.getToken().getType() == HiveParser.TOK_SUBQUERY)) { - String table_name = left.getChild(0).getText(); - String alias = left.getChildCount() == 1 ? table_name : left.getChild(1) - .getText(); + String table_name = unescapeIdentifier(left.getChild(0).getText()); + String alias = left.getChildCount() == 1 ? table_name : + unescapeIdentifier(left.getChild(1).getText().toLowerCase()); joinTree.setLeftAlias(alias); String[] leftAliases = new String[1]; leftAliases[0] = alias; @@ -1832,9 +2312,9 @@ if ((right.getToken().getType() == HiveParser.TOK_TABREF) || (right.getToken().getType() == HiveParser.TOK_SUBQUERY)) { - String table_name = right.getChild(0).getText(); - String alias = right.getChildCount() == 1 ? table_name : right.getChild(1) - .getText(); + String table_name = unescapeIdentifier(right.getChild(0).getText()); + String alias = right.getChildCount() == 1 ? table_name : + unescapeIdentifier(right.getChild(1).getText().toLowerCase()); String[] rightAliases = new String[1]; rightAliases[0] = alias; joinTree.setRightAliases(rightAliases); @@ -1850,10 +2330,16 @@ expressions.add(new Vector()); expressions.add(new Vector()); joinTree.setExpressions(expressions); + + Vector> filters = new Vector>(); + filters.add(new Vector()); + filters.add(new Vector()); + joinTree.setFilters(filters); + CommonTree joinCond = (CommonTree) joinParseTree.getChild(2); assert joinCond != null; Vector leftSrc = new Vector(); - parseJoinCondition(joinParseTree, joinTree, joinCond, leftSrc); + parseJoinCondition(joinTree, joinCond, leftSrc); if (leftSrc.size() == 1) joinTree.setLeftAlias(leftSrc.get(0)); @@ -1887,6 +2373,15 @@ for (int i = 0; i < nodeRightAliases.length; i++) expr.add(node.getExpressions().get(i + 1)); + Vector> filter = target.getFilters(); + for (int i = 0; i < nodeRightAliases.length; i++) + filter.add(node.getFilters().get(i + 1)); + + if (node.getFilters().get(0).size() != 0) { + Vector filterPos = filter.get(pos); + filterPos.addAll(node.getFilters().get(0)); + } + if (qb.getQbJoinTree() == node) qb.setQbJoinTree(node.getJoinSrc()); else @@ -1988,7 +2483,7 @@ } @SuppressWarnings("nls") - private OperatorInfo genBodyPlan(QB qb, OperatorInfo input) + private Operator genBodyPlan(QB qb, Operator input) throws SemanticException { QBParseInfo qbp = qb.getParseInfo(); @@ -1996,71 +2491,24 @@ TreeSet ks = new TreeSet(); ks.addAll(qbp.getClauseNames()); - String distinctText = null; - CommonTree distn = null; - OperatorInfo op = null; - boolean grpBy = false; - int numGrpBy = 0; - - // In case of a multiple group bys, all of them should have the same distinct key - for (String dest : ks) { - // is it a group by - if ((qbp.getAggregationExprsForClause(dest).size() != 0) - || (getGroupByForClause(qbp, dest).size() > 0)) { - grpBy = true; - numGrpBy++; - - // If there is a distinctFuncExp, add all parameters to the reduceKeys. - if (qbp.getDistinctFuncExprForClause(dest) != null) { - CommonTree value = qbp.getDistinctFuncExprForClause(dest); - if (value.getChildCount() != 2) - throw new SemanticException(ErrorMsg.UNSUPPORTED_MULTIPLE_DISTINCTS.getMsg(value)); - distn = (CommonTree)value.getChild(1); - String dist = distn.toStringTree();; - if (distinctText == null) - distinctText = dist; - if (!distinctText.equals(dist)) - throw new SemanticException(ErrorMsg.UNSUPPORTED_MULTIPLE_DISTINCTS.getMsg(value)); - } - } - } - - // In the first stage, copy the input and all the group by expressions - // and aggregate paramaters. This can be optimized in the future to only - // evaluate expressions that occur frequently. For a single groupby, no need to do so - if (grpBy && (numGrpBy > 1)) { - OperatorInfo reduceSinkOperatorInfo = - genGroupByPlanReduceSinkOperator(qbp, input, distn, ks); - - // ////// 2. Generate GroupbyOperator - OperatorInfo forwardOperatorInfo = genGroupByPlanForwardOperator(qbp, reduceSinkOperatorInfo); - op = forwardOperatorInfo; - } - // Go over all the destination tables - OperatorInfo curr = null; + Operator curr = null; for (String dest : ks) { - boolean groupByExpr = false; - if (qbp.getAggregationExprsForClause(dest).size() != 0 - || getGroupByForClause(qbp, dest).size() > 0) - groupByExpr = true; - curr = input; - if (groupByExpr && (numGrpBy > 1)) - curr = op; if (qbp.getWhrForClause(dest) != null) { curr = genFilterPlan(dest, qb, curr); } if (qbp.getAggregationExprsForClause(dest).size() != 0 - || getGroupByForClause(qbp, dest).size() > 0) { - if (numGrpBy > 1) - curr = genGroupByPlan3MR(dest, qb, curr); + || getGroupByForClause(qbp, dest).size() > 0) + { + if (conf.getVar(HiveConf.ConfVars.HIVEMAPSIDEAGGREGATE).equalsIgnoreCase("true")) + curr = genGroupByPlan4MR(dest, qb, curr); else curr = genGroupByPlan2MR(dest, qb, curr); } - + curr = genSelectPlan(dest, qb, curr); Integer limit = qbp.getDestLimit(dest); @@ -2087,12 +2535,12 @@ } @SuppressWarnings("nls") - private OperatorInfo genUnionPlan(String unionalias, String leftalias, - OperatorInfo leftOp, String rightalias, OperatorInfo rightOp) + private Operator genUnionPlan(String unionalias, String leftalias, + Operator leftOp, String rightalias, Operator rightOp) throws SemanticException { - RowResolver leftRR = leftOp.getRowResolver(); - RowResolver rightRR = rightOp.getRowResolver(); + RowResolver leftRR = opParseCtx.get(leftOp).getRR(); + RowResolver rightRR = opParseCtx.get(rightOp).getRR(); HashMap leftmap = leftRR.getFieldMap(leftalias); HashMap rightmap = rightRR.getFieldMap(rightalias); // make sure the schemas of both sides are the same @@ -2126,15 +2574,41 @@ // set forward operator as child of each of leftOp and rightOp List> child = new ArrayList>(); child.add(unionforward); - rightOp.getOp().setChildOperators(child); - leftOp.getOp().setChildOperators(child); + rightOp.setChildOperators(child); + leftOp.setChildOperators(child); + List> parent = new ArrayList>(); + parent.add(leftOp); + parent.add(rightOp); + unionforward.setParentOperators(parent); // create operator info list to return - OperatorInfo unionout = new OperatorInfo(unionforward, unionoutRR); - return unionout; + return putOpInsertMap(unionforward, unionoutRR); } - private exprNodeDesc genSamplePredicate(TableSample ts) { - // ((default_sample_hashfn(cols) & Integer.MAX_VALUE) % denominator) == numerator + /** + * Generates the sampling predicate from the TABLESAMPLE clause information. This function uses the + * bucket column list to decide the expression inputs to the predicate hash function in case useBucketCols + * is set to true, otherwise the expression list stored in the TableSample is used. The bucket columns of + * the table are used to generate this predicate in case no expressions are provided on the TABLESAMPLE + * clause and the table has clustering columns defined in it's metadata. + * The predicate created has the following structure: + * + * ((default_sample_hashfn(expressions) & Integer.MAX_VALUE) % denominator) == numerator + * + * @param ts TABLESAMPLE clause information + * @param bucketCols The clustering columns of the table + * @param useBucketCols Flag to indicate whether the bucketCols should be used as input to the hash + * function + * @param alias The alias used for the table in the row resolver + * @param rwsch The row resolver used to resolve column references + * @param qbm The metadata information for the query block which is used to resolve unaliased columns + * @return exprNodeDesc + * @exception SemanticException + */ + private exprNodeDesc genSamplePredicate(TableSample ts, List bucketCols, + boolean useBucketCols, String alias, + RowResolver rwsch, QBMetaData qbm) + throws SemanticException { + exprNodeDesc numeratorExpr = new exprNodeConstantDesc( TypeInfoFactory.getPrimitiveTypeInfo(Integer.class), Integer.valueOf(ts.getNumerator() - 1)); @@ -2146,11 +2620,20 @@ exprNodeDesc intMaxExpr = new exprNodeConstantDesc( TypeInfoFactory.getPrimitiveTypeInfo(Integer.class), Integer.valueOf(Integer.MAX_VALUE)); + ArrayList args = new ArrayList(); - for (String col: ts.getCols()) { - // TODO: change type to the one in the table schema - args.add(new exprNodeColumnDesc(String.class, col)); + if (useBucketCols) { + for (String col : bucketCols) { + ColumnInfo ci = rwsch.get(alias, col); + // TODO: change type to the one in the table schema + args.add(new exprNodeColumnDesc(ci.getType().getPrimitiveClass(), col)); + } } + else { + for(CommonTree expr: ts.getExprs()) { + args.add(genExprNodeDesc(qbm, expr, rwsch)); + } + } exprNodeDesc hashfnExpr = getFuncExprNodeDesc("default_sample_hashfn", args); assert(hashfnExpr != null); @@ -2169,92 +2652,100 @@ } @SuppressWarnings("nls") - private OperatorInfo genTablePlan(String alias, QB qb) - throws SemanticException { + private Operator genTablePlan(String alias, QB qb) throws SemanticException { + String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias); Table tab = qb.getMetaData().getSrcForAlias(alias); + RowResolver rwsch; + + // is the table already present + Operator top = this.topOps.get(alias_id); + Operator dummySel = this.topSelOps.get(alias_id); + if (dummySel != null) + top = dummySel; + + if (top == null) { + rwsch = new RowResolver(); + try { + StructObjectInspector rowObjectInspector = (StructObjectInspector)tab.getDeserializer().getObjectInspector(); + List fields = rowObjectInspector.getAllStructFieldRefs(); + for (int i=0; i fields = rowObjectInspector.getAllStructFieldRefs(); - for (int i=0; i top = OperatorFactory.get(forwardDesc.class, - new RowSchema(rwsch.getColumnInfos())); - String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias); - - // Add this to the list of top operators - we always start from a table scan - this.topOps.put(alias_id, top); // check if this table is sampled and needs more than input pruning - Operator tableOp = top; - TableSample ts = qb.getParseInfo().getTabSample(alias); + Operator tableOp = top; + TableSample ts = qb.getParseInfo().getTabSample(alias); if (ts != null) { int num = ts.getNumerator(); int den = ts.getDenominator(); - ArrayList sampleCols = ts.getCols(); - List tabCols = tab.getCols(); - // check if sampleCols are present in the table - for (String col: sampleCols) { - boolean found = false; - for (FieldSchema s: tabCols) { - if (col.equalsIgnoreCase(s.getName())) { - found = true; - } - } - if (!found) { - throw new SemanticException(ErrorMsg.SAMPLE_COLUMN_NOT_FOUND.getMsg( - qb.getParseInfo().getSrcForAlias(alias), "Sampling column " + - col + " not found in table " + tab.getName())); - } - } + ArrayList sampleExprs = ts.getExprs(); + + // TODO: Do the type checking of the expressions List tabBucketCols = tab.getBucketCols(); int numBuckets = tab.getNumBuckets(); + + // If there are no sample cols and no bucket cols then throw an error + if (tabBucketCols.size() == 0 && sampleExprs.size() == 0) { + throw new SemanticException(ErrorMsg.NON_BUCKETED_TABLE.getMsg() + " " + tab.getName()); + } + // check if a predicate is needed // predicate is needed if either input pruning is not enough // or if input pruning is not possible // check if the sample columns are the same as the table bucket columns - // and if they are, create a new array of column names which is in the - // same order as tabBucketCols. - // if sample cols is not specified then default is bucket cols boolean colsEqual = true; - if ( (sampleCols.size() != tabBucketCols.size()) && (sampleCols.size() != 0) ) { + if ( (sampleExprs.size() != tabBucketCols.size()) && (sampleExprs.size() != 0) ) { colsEqual = false; } - for (int i = 0; i < sampleCols.size() && colsEqual; i++) { + + for (int i = 0; i < sampleExprs.size() && colsEqual; i++) { boolean colFound = false; for (int j = 0; j < tabBucketCols.size() && !colFound; j++) { - if (sampleCols.get(i).equalsIgnoreCase(tabBucketCols.get(j))) { + if (sampleExprs.get(i).getToken().getType() != HiveParser.TOK_COLREF) { + break; + } + + if (sampleExprs.get(i).getChildCount() != 1) { + throw new SemanticException(ErrorMsg.TABLE_ALIAS_NOT_ALLOWED.getMsg()); + } + + if (((CommonTree)sampleExprs.get(i).getChild(0)).getText().equalsIgnoreCase(tabBucketCols.get(j))) { colFound = true; } } - colsEqual = colFound; + colsEqual = (colsEqual && colFound); } - // if the sample columns are the same, we need them in the same order - // as tabBucketCols - if (colsEqual) { - ts.setCols(new ArrayList(tabBucketCols)); - } + // Check if input can be pruned + ts.setInputPruning((sampleExprs == null || sampleExprs.size() == 0 || colsEqual)); + // check if input pruning is enough - if ((sampleCols == null || sampleCols.size() == 0 || colsEqual) + if ((sampleExprs == null || sampleExprs.size() == 0 || colsEqual) && (num == den || den <= numBuckets && numBuckets % den == 0)) { // input pruning is enough; no need for filter LOG.info("No need for sample filter"); @@ -2263,26 +2754,27 @@ // need to add filter // create tableOp to be filterDesc and set as child to 'top' LOG.info("Need sample filter"); - exprNodeDesc samplePredicate = genSamplePredicate(ts); + exprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols, colsEqual, alias, rwsch, qb.getMetaData()); tableOp = OperatorFactory.getAndMakeChild( new filterDesc(samplePredicate), top); } } - OperatorInfo output = new OperatorInfo(tableOp, rwsch); + + Operator output = putOpInsertMap(tableOp, rwsch); LOG.debug("Created Table Plan for " + alias + " " + tableOp.toString()); return output; } - private OperatorInfo genPlan(QBExpr qbexpr) throws SemanticException { + private Operator genPlan(QBExpr qbexpr) throws SemanticException { if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) { return genPlan(qbexpr.getQB()); } if (qbexpr.getOpcode() == QBExpr.Opcode.UNION) { - OperatorInfo qbexpr1Ops = genPlan(qbexpr.getQBExpr1()); - OperatorInfo qbexpr2Ops = genPlan(qbexpr.getQBExpr2()); + Operator qbexpr1Ops = genPlan(qbexpr.getQBExpr1()); + Operator qbexpr2Ops = genPlan(qbexpr.getQBExpr2()); return genUnionPlan(qbexpr.getAlias(), qbexpr.getQBExpr1().getAlias(), qbexpr1Ops, qbexpr.getQBExpr2().getAlias(), qbexpr2Ops); @@ -2291,10 +2783,10 @@ } @SuppressWarnings("nls") - private OperatorInfo genPlan(QB qb) throws SemanticException { + public Operator genPlan(QB qb) throws SemanticException { // First generate all the opInfos for the elements in the from clause - HashMap aliasToOpInfo = new HashMap(); + HashMap aliasToOpInfo = new HashMap(); // Recurse over the subqueries to fill the subquery part of the plan for (String alias : qb.getSubqAliases()) { @@ -2308,7 +2800,7 @@ aliasToOpInfo.put(alias, genTablePlan(alias, qb)); } - OperatorInfo srcOpInfo = null; + Operator srcOpInfo = null; // process join if (qb.getParseInfo().getJoinExpr() != null) { @@ -2316,6 +2808,9 @@ QBJoinTree joinTree = genJoinTree(joinExpr); qb.setQbJoinTree(joinTree); mergeJoinTree(qb); + + // if any filters are present in the join tree, push them on top of the table + pushJoinFilters(qb, qb.getQbJoinTree(), aliasToOpInfo); srcOpInfo = genJoinPlan(qb, aliasToOpInfo); } else @@ -2323,13 +2818,10 @@ // later we can extend this to the union all case as well srcOpInfo = aliasToOpInfo.values().iterator().next(); - OperatorInfo bodyOpInfo = genBodyPlan(qb, srcOpInfo); + Operator bodyOpInfo = genBodyPlan(qb, srcOpInfo); LOG.debug("Created Plan for Query Block " + qb.getId()); - - // is it a top level QB, and can it be optimized ? For eg: select * from T does not need a map-reduce job - QBParseInfo qbp = qb.getParseInfo(); - qbp.setCanOptTopQ(qb.isSelectStarQuery()); - + + this.qb = qb; return bodyOpInfo; } @@ -2363,15 +2855,43 @@ Task mvTask = null; Task fetchTask = null; - if (qb.getParseInfo().getCanOptTopQ()) { + if (qb.isSelectStarQuery()) { Iterator> iter = qb.getMetaData().getAliasToTable().entrySet().iterator(); Table tab = ((Map.Entry)iter.next()).getValue(); - fetch = new fetchWork(tab.getPath(), tab.getDeserializer().getClass(), - tab.getInputFormatClass(), tab.getSchema(), qb.getParseInfo().getOuterQueryLimit()); - - fetchTask = TaskFactory.get(fetch, this.conf); - setFetchTask(fetchTask); - return; + if (!tab.isPartitioned()) { + if (qb.getParseInfo().getDestToWhereExpr().isEmpty()) + fetch = new fetchWork(tab.getPath(), Utilities.getTableDesc(tab), qb.getParseInfo().getOuterQueryLimit()); + } + else { + if (aliasToPruner.size() == 1) { + Iterator> iterP = aliasToPruner.entrySet().iterator(); + PartitionPruner pr = ((Map.Entry)iterP.next()).getValue(); + if (pr.containsPartitionCols()) { + List listP = new ArrayList(); + List partP = new ArrayList(); + Set parts = null; + try { + parts = pr.prune(); + Iterator iterParts = parts.iterator(); + while (iterParts.hasNext()) { + Partition part = iterParts.next(); + listP.add(part.getPartitionPath()); + partP.add(Utilities.getPartitionDesc(part)); + } + fetch = new fetchWork(listP, partP, qb.getParseInfo().getOuterQueryLimit()); + } catch (HiveException e) { + // Has to use full name to make sure it does not conflict with org.apache.commons.lang.StringUtils + LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); + throw new SemanticException(e.getMessage(), e); + } + } + } + } + if (fetch != null) { + fetchTask = TaskFactory.get(fetch, this.conf); + setFetchTask(fetchTask); + return; + } } // In case of a select, use a fetch task instead of a move task @@ -2379,9 +2899,14 @@ if ((!loadTableWork.isEmpty()) || (loadFileWork.size() != 1)) throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg()); String cols = loadFileWork.get(0).getColumns(); - fetch = new fetchWork(new Path(loadFileWork.get(0).getSourceDir()), - MetadataTypedColumnsetSerDe.class, TextInputFormat.class, - Utilities.makeProperties("columns", cols), qb.getParseInfo().getOuterQueryLimit()); + + fetch = new fetchWork(new Path(loadFileWork.get(0).getSourceDir()), + new tableDesc(MetadataTypedColumnsetSerDe.class, TextInputFormat.class, + IgnoreKeyTextOutputFormat.class, + Utilities.makeProperties( + org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "" + Utilities.ctrlaCode, + "columns", cols)), + qb.getParseInfo().getOuterQueryLimit()); fetchTask = TaskFactory.get(fetch, this.conf); setFetchTask(fetchTask); @@ -2458,10 +2983,30 @@ } } plan.getAliasToWork().put(alias_id, topOp); + setKeyAndValueDesc(plan, topOp); LOG.debug("Created Map Work for " + alias_id); } } + private void setKeyAndValueDesc(mapredWork plan, Operator topOp) { + if (topOp instanceof ReduceSinkOperator) { + ReduceSinkOperator rs = (ReduceSinkOperator)topOp; + plan.setKeyDesc(rs.getConf().getKeySerializeInfo()); + int tag = Math.max(0, rs.getConf().getTag()); + List tagToSchema = plan.getTagToValueDesc(); + while (tag + 1 > tagToSchema.size()) { + tagToSchema.add(null); + } + tagToSchema.set(tag, rs.getConf().getValueSerializeInfo()); + } else { + List> children = topOp.getChildOperators(); + if (children != null) { + for(Operator op: children) { + setKeyAndValueDesc(plan, op); + } + } + } + } @SuppressWarnings("nls") private void genTaskPlan(Operator op, Task currTask, HashMap, Task> redTaskMap, @@ -2536,15 +3081,19 @@ isfirst = false; } - tableDesc tt_desc = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode), - sb.toString()); + tableDesc tt_desc = PlanUtils.getBinaryTableDesc( + PlanUtils.getFieldSchemasFromRowSchema(op.getSchema(), "temporarycol")); // Create a file sink operator for this file name - Operator fs_op = OperatorFactory.get(new fileSinkDesc(taskTmpDir, tt_desc), - op.getSchema()); + Operator fs_op = putOpInsertMap(OperatorFactory.get(new fileSinkDesc(taskTmpDir, tt_desc), + op.getSchema()), null); // replace the reduce child with this operator childOps.set(i, fs_op); + + List> parent = new ArrayList>(); + parent.add(op); + fs_op.setParentOperators(parent); // Add the path to alias mapping if (cplan.getPathToAliases().get(taskTmpDir) == null) { @@ -2564,6 +3113,7 @@ new partitionDesc(tt_desc, null)); cplan.getAliasToWork().put(streamDesc, child); + setKeyAndValueDesc(cplan, child); // Make this task dependent on the current task currTask.addDependentTask(ctask); @@ -2584,7 +3134,7 @@ } @SuppressWarnings("nls") - private Phase1Ctx initPhase1Ctx() { + public Phase1Ctx initPhase1Ctx() { Phase1Ctx ctx_1 = new Phase1Ctx(); ctx_1.nextNum = 0; @@ -2599,11 +3149,17 @@ work.setPathToAliases(new LinkedHashMap>()); work.setPathToPartitionInfo(new LinkedHashMap()); work.setAliasToWork(new HashMap>()); + work.setTagToValueDesc(new ArrayList()); work.setReducer(null); return work; } + private boolean pushSelect(Operator op, List colNames) { + if (opParseCtx.get(op).getRR().getColumnInfos().size() == colNames.size()) return false; + return true; + } + @Override @SuppressWarnings("nls") public void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException { @@ -2611,6 +3167,8 @@ reset(); QB qb = new QB(null, null, false); + this.qb = qb; + this.ast = ast; LOG.info("Starting Semantic Analysis"); doPhase1(ast, qb, initPhase1Ctx()); @@ -2621,6 +3179,16 @@ genPlan(qb); + ParseContext pCtx = new ParseContext(conf, qb, ast, aliasToPruner, aliasToSamplePruner, topOps, + topSelOps, opParseCtx, loadTableWork, loadFileWork, ctx); + + Optimizer optm = new Optimizer(); + optm.setPctx(pCtx); + optm.initialize(); + pCtx = optm.optimize(); + init(pCtx); + qb = pCtx.getQB(); + // Do any partition pruning genPartitionPruners(qb); LOG.info("Completed partition pruning"); @@ -2735,8 +3303,18 @@ } + /** + * Generates and expression node descriptor for the expression passed in the arguments. This + * function uses the row resolver and the metadata informatinon that are passed as arguments + * to resolve the column names to internal names. + * @param qbm The metadata infromation for the query block + * @param expr The expression + * @param input The row resolver + * @return exprNodeDesc + * @throws SemanticException + */ @SuppressWarnings("nls") - private exprNodeDesc genExprNodeDesc(CommonTree expr, String alias, RowResolver input) + private exprNodeDesc genExprNodeDesc(QBMetaData qbm, CommonTree expr, RowResolver input) throws SemanticException { // We recursively create the exprNodeDesc. Base cases: when we encounter // a column ref, we convert that into an exprNodeColumnDesc; when we encounter @@ -2762,25 +3340,29 @@ switch (tokType) { case HiveParser.TOK_COLREF: { - // For now only allow columns of the form tab.col - if (expr.getChildCount() == 1) { - throw new SemanticException(ErrorMsg.NO_TABLE_ALIAS.getMsg(expr.getChild(0))); + String tabAlias = null; + String colName = null; + if (expr.getChildCount() != 1) { + tabAlias = unescapeIdentifier(expr.getChild(0).getText()); + colName = unescapeIdentifier(expr.getChild(1).getText()); } - - String tabAlias = expr.getChild(0).getText(); - String colName = expr.getChild(1).getText(); - if (tabAlias == null || colName == null) { + else { + colName = unescapeIdentifier(expr.getChild(0).getText()); + } + + if (colName == null) { throw new SemanticException(ErrorMsg.INVALID_XPATH.getMsg(expr)); } + colInfo = input.get(tabAlias, colName); if (colInfo == null && input.getIsExprResolver()) { throw new SemanticException(ErrorMsg.NON_KEY_EXPR_IN_GROUPBY.getMsg(expr)); } - else if (!input.hasTableAlias(tabAlias)) { + else if (tabAlias != null && !input.hasTableAlias(tabAlias)) { throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(expr.getChild(0))); } else if (colInfo == null) { - throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1))); + throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(tabAlias == null? expr.getChild(0) : expr.getChild(1))); } desc = new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName()); @@ -2794,7 +3376,7 @@ int childrenBegin = (isFunction ? 1 : 0); ArrayList children = new ArrayList(expr.getChildCount() - childrenBegin); for (int ci=childrenBegin; ci ent: qbm.getAliasToTable().entrySet()) { + for(FieldSchema field: ent.getValue().getAllCols()) { + if (colName.equalsIgnoreCase(field.getName())) { + if (found) { + throw new SemanticException(ErrorMsg.AMBIGOUS_COLUMN.getMsg(pt)); + } + + found = true; + tabAlias = ent.getKey(); + } + } + } + return tabAlias; + } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SamplePruner.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SamplePruner.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SamplePruner.java (working copy) @@ -22,43 +22,96 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.fs.Path; -import java.util.*; +/** + * + * This class stores the mapping from table alias to the parse tree information of the table + * sample clause(stored in the TableSample class). + * + */ +public class SamplePruner { -public class SamplePruner { + /** + * Table alias for the table e.g. in case of FROM t TABLESAMPLE(1 OUT OF 2 ON rand()) a + * "a" is the table alias + */ private String tabAlias; + + /** + * The parse tree corresponding to the TABLESAMPLE clause. e.g. in case of + * FROM t TABLESAMPLE(1 OUT OF 2 ON rand()) a the parse tree of + * "TABLESAMPLE(1 OUT OF 2 ON rand())" is parsed out and stored in tableSample + */ private TableSample tableSample; - // The log - @SuppressWarnings("nls") - private static final Log LOG = LogFactory.getLog("hive.ql.parse.SamplePruner"); + + /** + * The log handle for this class + */ + @SuppressWarnings("nls") + private static final Log LOG = LogFactory.getLog("hive.ql.parse.SamplePruner"); + /** + * Constructs the SamplePruner given the table alias and the table sample + * + * @param alias The alias of the table specified in the query + * @param tableSample The parse infromation of the TABLESAMPLE clause + */ public SamplePruner(String alias, TableSample tableSample) { this.tabAlias = alias; this.tableSample = tableSample; } + + /** + * Gets the table alias + * + * @return String + */ public String getTabAlias() { return this.tabAlias; } + + /** + * Sets the table alias + * + * @param tabAlias The table alias as specified in the query + */ public void setTabAlias(String tabAlias) { this.tabAlias = tabAlias; } + + /** + * Gets the parse information of the associated table sample clause + * + * @return TableSample + */ public TableSample getTableSample() { return this.tableSample; } + + /** + * Sets the parse information of the associated table sample clause + * + * @param tableSample Information related to the table sample clause + */ public void setTableSample(TableSample tableSample) { this.tableSample = tableSample; } + /** + * Prunes to get all the files in the partition that satisfy the TABLESAMPLE clause + * + * @param part The partition to prune + * @return Path[] + * @throws SemanticException + */ @SuppressWarnings("nls") public Path[] prune(Partition part) throws SemanticException { int num = this.tableSample.getNumerator(); int den = this.tableSample.getDenominator(); int bucketCount = part.getBucketCount(); - List tabBucketCols = part.getBucketCols(); - ArrayList sampleCols = this.tableSample.getCols(); String fullScanMsg = ""; // check if input pruning is possible - if (sampleCols == null || sampleCols.size() == 0 || tabBucketCols.equals(sampleCols)) { + if (this.tableSample.getInputPruning()) { LOG.trace("numerator = " + num); LOG.trace("denominator = " + den); LOG.trace("bucket count = " + bucketCount); Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/OpParseContext.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/OpParseContext.java (revision 0) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/OpParseContext.java (revision 0) @@ -0,0 +1,69 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse; + +import java.util.List; + +/** + * Implementation of the Operator Parse Context. It maintains the parse context + * that may be needed by an operator. Currently, it only maintains the row + * resolver and the list of columns used by the operator + **/ + +public class OpParseContext { + private RowResolver rr; // row resolver for the operator + + // list of internal column names used + private List colNames; + + /** + * @param rr row resolver + */ + public OpParseContext(RowResolver rr) { + this.rr = rr; + } + + /** + * @return the row resolver + */ + public RowResolver getRR() { + return rr; + } + + /** + * @param rr the row resolver to set + */ + public void setRR(RowResolver rr) { + this.rr = rr; + } + + /** + * @return the column names desired + */ + public List getColNames() { + return colNames; + } + + /** + * @param colNames the column names to set + */ + public void setColNames(List colNames) { + this.colNames = colNames; + } +} Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (revision 0) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (revision 0) @@ -0,0 +1,264 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.List; + +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.plan.loadFileDesc; +import org.apache.hadoop.hive.ql.plan.loadTableDesc; +import org.antlr.runtime.tree.CommonTree; +import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.conf.HiveConf; + +/** + * Parse Context: The current parse context. This is passed to the optimizer + * which then transforms the operator tree using the parse context. All the + * optimizations are performed sequentially and then the new parse context + * populated. Note that since the parse context contains the operator tree, it + * can be easily retrieved by the next optimization step or finally for task + * generation after the plan has been completely optimized. + * + **/ + +public class ParseContext { + private QB qb; + private CommonTree ast; + private HashMap aliasToPruner; + private HashMap aliasToSamplePruner; + private HashMap> topOps; + private HashMap> topSelOps; + private HashMap, OpParseContext> opParseCtx; + private List loadTableWork; + private List loadFileWork; + private Context ctx; + private HiveConf conf; + + /** + * @param qb + * current QB + * @param ast + * current parse tree + * @param aliasToPruner + * partition pruner list + * @param aliasToSamplePruner + * sample pruner list + * @param loadFileWork + * list of destination files being loaded + * @param loadTableWork + * list of destination tables being loaded + * @param opParseCtx + * operator parse context - contains a mapping from operator to + * operator parse state (row resolver etc.) + * @param topOps + * list of operators for the top query + * @param topSelOps + * list of operators for the selects introduced for column pruning + */ + public ParseContext(HiveConf conf, QB qb, CommonTree ast, + HashMap aliasToPruner, + HashMap aliasToSamplePruner, + HashMap> topOps, + HashMap> topSelOps, + HashMap, OpParseContext> opParseCtx, + List loadTableWork, List loadFileWork, + Context ctx) { + this.conf = conf; + this.qb = qb; + this.ast = ast; + this.aliasToPruner = aliasToPruner; + this.aliasToSamplePruner = aliasToSamplePruner; + this.loadFileWork = loadFileWork; + this.loadTableWork = loadTableWork; + this.opParseCtx = opParseCtx; + this.topOps = topOps; + this.topSelOps = topSelOps; + this.ctx = ctx; + } + + /** + * @return the qb + */ + public QB getQB() { + return qb; + } + + /** + * @param qb + * the qb to set + */ + public void setQB(QB qb) { + this.qb = qb; + } + + /** + * @return the context + */ + public Context getContext() { + return ctx; + } + + /** + * @param ctx + * the context to set + */ + public void setContext(Context ctx) { + this.ctx = ctx; + } + + /** + * @return the hive conf + */ + public HiveConf getConf() { + return conf; + } + + /** + * @param conf + * the conf to set + */ + public void setConf(HiveConf conf) { + this.conf = conf; + } + + /** + * @return the ast + */ + public CommonTree getParseTree() { + return ast; + } + + /** + * @param ast + * the parsetree to set + */ + public void setParseTree(CommonTree ast) { + this.ast = ast; + } + + /** + * @return the aliasToPruner + */ + public HashMap getAliasToPruner() { + return aliasToPruner; + } + + /** + * @param aliasToPruner + * the aliasToPruner to set + */ + public void setAliasToPruner(HashMap aliasToPruner) { + this.aliasToPruner = aliasToPruner; + } + + /** + * @return the aliasToSamplePruner + */ + public HashMap getAliasToSamplePruner() { + return aliasToSamplePruner; + } + + /** + * @param aliasToSamplePruner + * the aliasToSamplePruner to set + */ + public void setAliasToSamplePruner( + HashMap aliasToSamplePruner) { + this.aliasToSamplePruner = aliasToSamplePruner; + } + + /** + * @return the topOps + */ + public HashMap> getTopOps() { + return topOps; + } + + /** + * @param topOps + * the topOps to set + */ + public void setTopOps(HashMap> topOps) { + this.topOps = topOps; + } + + /** + * @return the topSelOps + */ + public HashMap> getTopSelOps() { + return topSelOps; + } + + /** + * @param topSelOps + * the topSelOps to set + */ + public void setTopSelOps( + HashMap> topSelOps) { + this.topSelOps = topSelOps; + } + + /** + * @return the opParseCtx + */ + public HashMap, OpParseContext> getOpParseCtx() { + return opParseCtx; + } + + /** + * @param opParseCtx + * the opParseCtx to set + */ + public void setOpParseCtx( + HashMap, OpParseContext> opParseCtx) { + this.opParseCtx = opParseCtx; + } + + /** + * @return the loadTableWork + */ + public List getLoadTableWork() { + return loadTableWork; + } + + /** + * @param loadTableWork + * the loadTableWork to set + */ + public void setLoadTableWork(List loadTableWork) { + this.loadTableWork = loadTableWork; + } + + /** + * @return the loadFileWork + */ + public List getLoadFileWork() { + return loadFileWork; + } + + /** + * @param loadFileWork + * the loadFileWork to set + */ + public void setLoadFileWork(List loadFileWork) { + this.loadFileWork = loadFileWork; + } +} Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (working copy) @@ -77,14 +77,49 @@ return rslvMap.get(tab_alias.toLowerCase()) != null; } - public ColumnInfo get(String tab_alias, String col_alias) { - tab_alias = tab_alias.toLowerCase(); + /** + * Gets the column Info to tab_alias.col_alias type of a column reference. I the tab_alias is not + * provided as can be the case with an non aliased column, this function looks up the column in all + * the table aliases in this row resolver and returns the match. It also throws an exception if + * the column is found in multiple table aliases. If no match is found a null values is returned. + * + * This allows us to interpret both select t.c1 type of references and select c1 kind of refereneces. + * The later kind are what we call non aliased column references in the query. + * + * @param tab_alias The table alias to match (this is null if the column reference is non aliased) + * @param col_alias The column name that is being searched for + * @return ColumnInfo + * @throws SemanticException + */ + public ColumnInfo get(String tab_alias, String col_alias) + throws SemanticException { col_alias = col_alias.toLowerCase(); - HashMap f_map = rslvMap.get(tab_alias); - if (f_map == null) { - return null; + ColumnInfo ret = null; + + if (tab_alias != null) { + tab_alias = tab_alias.toLowerCase(); + HashMap f_map = rslvMap.get(tab_alias); + if (f_map == null) { + return null; + } + ret = f_map.get(col_alias); } - return f_map.get(col_alias); + else { + boolean found = false; + for(LinkedHashMap cmap: rslvMap.values()) { + for(Map.Entry cmapEnt: cmap.entrySet()) { + if (col_alias.equalsIgnoreCase((String)cmapEnt.getKey())) { + if (found) { + throw new SemanticException("Column " + col_alias + " Found in more than One Tables/Subqueries"); + } + found = true; + ret = (ColumnInfo)cmapEnt.getValue(); + } + } + } + } + + return ret; } public Vector getColumnInfos() { Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java (working copy) @@ -19,34 +19,127 @@ package org.apache.hadoop.hive.ql.parse; import java.util.ArrayList; +import org.antlr.runtime.tree.CommonTree; +/** + * + * This class stores all the information specified in the TABLESAMPLE clause. e.g. + * for the clause "FROM t TABLESAMPLE(1 OUT OF 2 ON c1) it will store the numerator + * 1, the denominator 2 and the list of expressions(in this case c1) in the appropriate + * fields. The afore-mentioned sampling clause causes the 1st bucket to be picked out of + * the 2 buckets created by hashing on c1. + * + */ public class TableSample { + + /** + * The numerator of the TABLESAMPLE clause + */ private int numerator; + + /** + * The denominator of the TABLESAMPLE clause + */ private int denominator; - private ArrayList cols; - public TableSample(String num, String den, ArrayList cols) { + /** + * The list of expressions following ON part of the TABLESAMPLE clause. This list is + * empty in case there are no expressions such as in the clause + * "FROM t TABLESAMPLE(1 OUT OF 2)". For this expression the sampling is done + * on the tables clustering column(as specified when the table was created). In case + * the table does not have any clustering column, the usage of a table sample clause + * without an ON part is disallowed by the compiler + */ + private ArrayList exprs; + + /** + * Flag to indicate that input files can be pruned + */ + private boolean inputPruning; + + /** + * Constructs the TableSample given the numerator, denominator and the list of + * ON clause expressions + * + * @param num The numerator + * @param den The denominator + * @param exprs The list of expressions in the ON part of the TABLESAMPLE clause + */ + public TableSample(String num, String den, ArrayList exprs) { this.numerator = Integer.valueOf(num).intValue(); this.denominator = Integer.valueOf(den).intValue(); - this.cols = cols; + this.exprs = exprs; } + + /** + * Gets the numerator + * + * @return int + */ public int getNumerator() { return this.numerator; } + + /** + * Sets the numerator + * + * @param num The numerator + */ public void setNumerator(int num) { this.numerator = num; } + + /** + * Gets the denominator + * + * @return int + */ public int getDenominator() { return this.denominator; } + + /** + * Sets the denominator + * + * @param den The denominator + */ public void setDenominator(int den) { this.denominator = den; } - public ArrayList getCols() { - return this.cols; + + /** + * Gets the ON part's expression list + * + * @return ArrayList + */ + public ArrayList getExprs() { + return this.exprs; } - public void setCols(ArrayList cols) { - this.cols = cols; + + /** + * Sets the expression list + * + * @param exprs The expression list + */ + public void setExprs(ArrayList exprs) { + this.exprs = exprs; } + /** + * Gets the flag that indicates whether input pruning is possible + * + * @return boolean + */ + public boolean getInputPruning() { + return this.inputPruning; + } + + /** + * Sets the flag that indicates whether input pruning is possible or not + * + * @param inputPruning true if input pruning is possible + */ + public void setInputPruning(boolean inputPruning) { + this.inputPruning = inputPruning; + } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -27,6 +27,7 @@ import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; @@ -45,12 +46,15 @@ import org.apache.hadoop.hive.ql.plan.showTablesDesc; import org.apache.hadoop.hive.ql.plan.alterTableDesc.alterTableTypes; import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde2.SerDeUtils; public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { private static final Log LOG = LogFactory.getLog("hive.ql.parse.DDLSemanticAnalyzer"); public static final Map TokenToTypeName = new HashMap(); static { + TokenToTypeName.put(HiveParser.TOK_BOOLEAN, Constants.BOOLEAN_TYPE_NAME); TokenToTypeName.put(HiveParser.TOK_TINYINT, Constants.TINYINT_TYPE_NAME); + TokenToTypeName.put(HiveParser.TOK_SMALLINT, Constants.SMALLINT_TYPE_NAME); TokenToTypeName.put(HiveParser.TOK_INT, Constants.INT_TYPE_NAME); TokenToTypeName.put(HiveParser.TOK_BIGINT, Constants.BIGINT_TYPE_NAME); TokenToTypeName.put(HiveParser.TOK_FLOAT, Constants.FLOAT_TYPE_NAME); @@ -96,18 +100,26 @@ analyzeAlterTableModifyCols(ast, alterTableTypes.REPLACECOLS); else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) analyzeAlterTableDropParts(ast); + else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_PROPERTIES) + analyzeAlterTableProps(ast); + else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES) + analyzeAlterTableSerdeProps(ast); + else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERIALIZER) + analyzeAlterTableSerde(ast); else if (ast.getToken().getType() == HiveParser.TOK_SHOWPARTITIONS) { ctx.setResFile(new Path(getTmpFileName())); analyzeShowPartitions(ast); } + else { + throw new SemanticException("Unsupported command."); + } } private void analyzeCreateTable(CommonTree ast, boolean isExt) throws SemanticException { - String tableName = ast.getChild(0).getText(); - CommonTree colList = (CommonTree)ast.getChild(1); - List cols = getColumns(colList); + String tableName = unescapeIdentifier(ast.getChild(0).getText()); + List cols = null; List partCols = null; List bucketCols = null; List sortCols = null; @@ -117,19 +129,23 @@ String mapKeyDelim = null; String lineDelim = null; String comment = null; - boolean isSequenceFile = false; + boolean isSequenceFile = + "SequenceFile".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT)); String location = null; String serde = null; Map mapProp = null; LOG.info("Creating table" + tableName); int numCh = ast.getChildCount(); - for (int num = 2; num < numCh; num++) + for (int num = 1; num < numCh; num++) { CommonTree child = (CommonTree)ast.getChild(num); switch (child.getToken().getType()) { + case HiveParser.TOK_TABCOLLIST: + cols = getColumns(child); + break; case HiveParser.TOK_TABLECOMMENT: - comment = child.getChild(0).getText(); + comment = unescapeSQLString(child.getChild(0).getText()); break; case HiveParser.TOK_TABLEPARTCOLS: partCols = getColumns((CommonTree)child.getChild(0)); @@ -181,6 +197,9 @@ case HiveParser.TOK_TBLSEQUENCEFILE: isSequenceFile = true; break; + case HiveParser.TOK_TBLTEXTFILE: + isSequenceFile = false; + break; case HiveParser.TOK_TABLELOCATION: location = unescapeSQLString(child.getChild(0).getText()); break; @@ -203,6 +222,15 @@ // no duplicate column names // currently, it is a simple n*n algorithm - this can be optimized later if need be // but it should not be a major bottleneck as the number of columns are anyway not so big + + if((crtTblDesc.getCols() == null) || (crtTblDesc.getCols().size() == 0)) { + // for now make sure that serde exists + if(StringUtils.isEmpty(crtTblDesc.getSerName()) || SerDeUtils.isNativeSerDe(crtTblDesc.getSerName())) { + throw new SemanticException(ErrorMsg.INVALID_TBL_DDL_SERDE.getMsg()); + } + return; + } + Iterator iterCols = crtTblDesc.getCols().iterator(); List colNames = new ArrayList(); while (iterCols.hasNext()) { @@ -264,9 +292,9 @@ String partCol = partColsIter.next().getName(); Iterator colNamesIter = colNames.iterator(); while (colNamesIter.hasNext()) { - String colName = colNamesIter.next(); + String colName = unescapeIdentifier(colNamesIter.next()); if (partCol.equalsIgnoreCase(colName)) - throw new SemanticException(ErrorMsg.COLUMN_REPAEATED_IN_PARTITIONING_COLS.getMsg()); + throw new SemanticException(ErrorMsg.COLUMN_REPEATED_IN_PARTITIONING_COLS.getMsg()); } } } @@ -274,11 +302,52 @@ private void analyzeDropTable(CommonTree ast) throws SemanticException { - String tableName = ast.getChild(0).getText(); + String tableName = unescapeIdentifier(ast.getChild(0).getText()); dropTableDesc dropTblDesc = new dropTableDesc(tableName); rootTasks.add(TaskFactory.get(new DDLWork(dropTblDesc), conf)); } + private void analyzeAlterTableProps(CommonTree ast) throws SemanticException { + String tableName = unescapeIdentifier(ast.getChild(0).getText()); + HashMap mapProp = getProps((CommonTree)(ast.getChild(1)).getChild(0)); + alterTableDesc alterTblDesc = new alterTableDesc(alterTableTypes.ADDPROPS); + alterTblDesc.setProps(mapProp); + alterTblDesc.setOldName(tableName); + rootTasks.add(TaskFactory.get(new DDLWork(alterTblDesc), conf)); + } + + private void analyzeAlterTableSerdeProps(CommonTree ast) throws SemanticException { + String tableName = unescapeIdentifier(ast.getChild(0).getText()); + HashMap mapProp = getProps((CommonTree)(ast.getChild(1)).getChild(0)); + alterTableDesc alterTblDesc = new alterTableDesc(alterTableTypes.ADDSERDEPROPS); + alterTblDesc.setProps(mapProp); + alterTblDesc.setOldName(tableName); + rootTasks.add(TaskFactory.get(new DDLWork(alterTblDesc), conf)); + } + + private void analyzeAlterTableSerde(CommonTree ast) throws SemanticException { + String tableName = unescapeIdentifier(ast.getChild(0).getText()); + String serdeName = unescapeSQLString(ast.getChild(1).getText()); + alterTableDesc alterTblDesc = new alterTableDesc(alterTableTypes.ADDSERDE); + if(ast.getChildCount() > 2) { + HashMap mapProp = getProps((CommonTree)(ast.getChild(2)).getChild(0)); + alterTblDesc.setProps(mapProp); + } + alterTblDesc.setOldName(tableName); + alterTblDesc.setSerdeName(serdeName); + rootTasks.add(TaskFactory.get(new DDLWork(alterTblDesc), conf)); + } + + private HashMap getProps(CommonTree prop) { + HashMap mapProp = new HashMap(); + for (int propChild = 0; propChild < prop.getChildCount(); propChild++) { + String key = unescapeSQLString(prop.getChild(propChild).getChild(0).getText()); + String value = unescapeSQLString(prop.getChild(propChild).getChild(1).getText()); + mapProp.put(key,value); + } + return mapProp; + } + private List getColumns(CommonTree ast) { List colList = new ArrayList(); @@ -286,7 +355,7 @@ for (int i = 0; i < numCh; i++) { FieldSchema col = new FieldSchema(); CommonTree child = (CommonTree)ast.getChild(i); - col.setName(child.getChild(0).getText()); + col.setName(unescapeIdentifier(child.getChild(0).getText())); CommonTree typeChild = (CommonTree)(child.getChild(1)); if (typeChild.getToken().getType() == HiveParser.TOK_LIST) { @@ -303,7 +372,7 @@ col.setType(getTypeName(typeChild.getToken().getType())); if (child.getChildCount() == 3) - col.setComment(child.getChild(2).getText()); + col.setComment(unescapeSQLString(child.getChild(2).getText())); colList.add(col); } return colList; @@ -315,7 +384,7 @@ int numCh = ast.getChildCount(); for (int i = 0; i < numCh; i++) { CommonTree child = (CommonTree)ast.getChild(i); - colList.add(child.getText()); + colList.add(unescapeIdentifier(child.getText())); } return colList; } @@ -327,9 +396,9 @@ for (int i = 0; i < numCh; i++) { CommonTree child = (CommonTree)ast.getChild(i); if (child.getToken().getType() == HiveParser.TOK_TABSORTCOLNAMEASC) - colList.add(new Order(child.getChild(0).getText(), 1)); + colList.add(new Order(unescapeIdentifier(child.getChild(0).getText()), 1)); else - colList.add(new Order(child.getChild(0).getText(), 0)); + colList.add(new Order(unescapeIdentifier(child.getChild(0).getText()), 0)); } return colList; } @@ -359,7 +428,7 @@ private void analyzeShowPartitions(CommonTree ast) throws SemanticException { showPartitionsDesc showPartsDesc; - String tableName = ast.getChild(0).getText(); + String tableName = unescapeIdentifier(ast.getChild(0).getText()); showPartsDesc = new showPartitionsDesc(tableName, ctx.getResFile()); rootTasks.add(TaskFactory.get(new DDLWork(showPartsDesc), conf)); } @@ -379,13 +448,15 @@ private void analyzeAlterTableRename(CommonTree ast) throws SemanticException { - alterTableDesc alterTblDesc = new alterTableDesc(ast.getChild(0).getText(), ast.getChild(1).getText()); + alterTableDesc alterTblDesc = new alterTableDesc( + unescapeIdentifier(ast.getChild(0).getText()), + unescapeIdentifier(ast.getChild(1).getText())); rootTasks.add(TaskFactory.get(new DDLWork(alterTblDesc), conf)); } private void analyzeAlterTableModifyCols(CommonTree ast, alterTableTypes alterType) throws SemanticException { - String tblName = ast.getChild(0).getText(); + String tblName = unescapeIdentifier(ast.getChild(0).getText()); List newCols = getColumns((CommonTree)ast.getChild(1)); alterTableDesc alterTblDesc = new alterTableDesc(tblName, newCols, alterType); rootTasks.add(TaskFactory.get(new DDLWork(alterTblDesc), conf)); @@ -396,7 +467,7 @@ List> partSpecs = new ArrayList>(); int childIndex = 0; // get table metadata - tblName = ast.getChild(0).getText(); + tblName = unescapeIdentifier(ast.getChild(0).getText()); // get partition metadata if partition specified for( childIndex = 1; childIndex < ast.getChildCount(); childIndex++) { CommonTree partspec = (CommonTree) ast.getChild(childIndex); Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java (working copy) @@ -19,20 +19,22 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory; +import org.apache.hadoop.hive.ql.parse.RowResolver; +import org.apache.hadoop.hive.ql.exec.ColumnInfo; public class exprNodeColumnDesc extends exprNodeDesc implements Serializable { private static final long serialVersionUID = 1L; private String column; - private boolean isVirtual; public exprNodeColumnDesc() {} public exprNodeColumnDesc(TypeInfo typeInfo, String column) { super(typeInfo); this.column = column; - this.isVirtual = isVirtual; } public exprNodeColumnDesc(Class c, String column) { super(TypeInfoFactory.getPrimitiveTypeInfo(c)); @@ -54,4 +56,11 @@ public String getExprString() { return getColumn(); } + + public List getCols() { + List lst = new ArrayList(); + lst.add(column); + return lst; + } + } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java (working copy) @@ -19,8 +19,13 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.parse.RowResolver; + public class exprNodeIndexDesc extends exprNodeDesc implements Serializable { @@ -61,5 +66,15 @@ @Override public String getExprString() { return this.desc.getExprString() + "[" + this.index.getExprString() + "]"; - } -} \ No newline at end of file + } + + public List getCols() { + List colList = new ArrayList(); + if (desc != null) + colList = Utilities.mergeUniqElems(colList, desc.getCols()); + if (index != null) + colList = Utilities.mergeUniqElems(colList, index.getCols()); + + return colList; + } +} Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java (working copy) @@ -39,10 +39,10 @@ // map<->reduce interface // schema of the map-reduce 'key' object - this is homogeneous - private schemaDesc keySchema; + private tableDesc keyDesc; // schema of the map-reduce 'val' object - this is heterogeneous - private HashMap aliasToSchema; + private List tagToValueDesc; private Operator reducer; @@ -57,16 +57,16 @@ final LinkedHashMap> pathToAliases, final LinkedHashMap pathToPartitionInfo, final HashMap> aliasToWork, - final schemaDesc keySchema, - HashMap aliasToSchema, + final tableDesc keyDesc, + List tagToValueDesc, final Operator reducer, final Integer numReduceTasks) { this.command = command; this.pathToAliases = pathToAliases; this.pathToPartitionInfo = pathToPartitionInfo; this.aliasToWork = aliasToWork; - this.keySchema = keySchema; - this.aliasToSchema = aliasToSchema; + this.keyDesc = keyDesc; + this.tagToValueDesc = tagToValueDesc; this.reducer = reducer; this.numReduceTasks = numReduceTasks; } @@ -100,17 +100,17 @@ public void setAliasToWork(final HashMap> aliasToWork) { this.aliasToWork=aliasToWork; } - public schemaDesc getKeySchema() { - return this.keySchema; + public tableDesc getKeyDesc() { + return this.keyDesc; } - public void setKeySchema(final schemaDesc keySchema) { - this.keySchema = keySchema; + public void setKeyDesc(final tableDesc keyDesc) { + this.keyDesc = keyDesc; } - public HashMap getAliasToSchema() { - return this.aliasToSchema; + public List getTagToValueDesc() { + return tagToValueDesc; } - public void setAliasToSchema(final HashMap aliasToSchema) { - this.aliasToSchema = aliasToSchema; + public void setTagToValueDesc(final List tagToValueDesc) { + this.tagToValueDesc = tagToValueDesc; } @explain(displayName="Reduce Operator Tree") Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java (working copy) @@ -24,11 +24,18 @@ public class selectDesc implements Serializable { private static final long serialVersionUID = 1L; private java.util.ArrayList colList; + private boolean selectStar; public selectDesc() { } + public selectDesc(final java.util.ArrayList colList) { + this(colList, false); + } + public selectDesc( - final java.util.ArrayList colList) { + final java.util.ArrayList colList, final boolean selectStar) { this.colList = colList; + this.selectStar = selectStar; } + @explain(displayName="expressions") public java.util.ArrayList getColList() { return this.colList; @@ -36,4 +43,17 @@ public void setColList(final java.util.ArrayList colList) { this.colList=colList; } + + /** + * @return the selectStar + */ + public boolean isSelectStar() { + return selectStar; + } + /** + * @param selectStar the selectStar to set + */ + public void setSelectStar(boolean selectStar) { + this.selectStar = selectStar; + } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java (working copy) @@ -20,6 +20,7 @@ import java.io.Serializable; import java.util.List; +import java.util.Map; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -28,12 +29,14 @@ public class alterTableDesc extends ddlDesc implements Serializable { private static final long serialVersionUID = 1L; - public static enum alterTableTypes {RENAME, ADDCOLS, REPLACECOLS}; + public static enum alterTableTypes {RENAME, ADDCOLS, REPLACECOLS, ADDPROPS, ADDSERDE, ADDSERDEPROPS}; alterTableTypes op; String oldName; String newName; List newCols; + String serdeName; + Map props; /** * @param oldName old name of the table @@ -54,6 +57,13 @@ this.oldName = name; this.newCols = newCols; } + + /** + * @param alterType type of alter op + */ + public alterTableDesc(alterTableTypes alterType) { + this.op = alterType; + } /** * @return the old name of the table @@ -130,4 +140,34 @@ this.newCols = newCols; } + /** + * @return the serdeName + */ + @explain(displayName="deserializer library") + public String getSerdeName() { + return serdeName; + } + + /** + * @param serdeName the serdeName to set + */ + public void setSerdeName(String serdeName) { + this.serdeName = serdeName; + } + + /** + * @return the props + */ + @explain(displayName="properties") + public Map getProps() { + return props; + } + + /** + * @param props the props to set + */ + public void setProps(Map props) { + this.props = props; + } + } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java (working copy) @@ -19,9 +19,11 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.List; import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.ql.parse.RowResolver; public class exprNodeDesc implements Serializable { private static final long serialVersionUID = 1L; @@ -51,4 +53,9 @@ public String getTypeString() { return typeInfo.getTypeName(); } + + public List getCols() { + return null; + } + } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java (working copy) @@ -23,11 +23,12 @@ /** Group-by Mode: * COMPLETE: complete 1-phase aggregation: aggregate, evaluate * PARTIAL1: partial aggregation - first phase: aggregate, evaluatePartial - * PARTIAL2: partial aggregation - second phase: aggregatePartial, evaluate + * PARTIAL2: partial aggregation - second phase: aggregatePartial, evaluatePartial + * FINAL: partial aggregation - final phase: aggregatePartial, evaluate * HASH: the same as PARTIAL1 but use hash-table-based aggregation */ private static final long serialVersionUID = 1L; - public static enum Mode { COMPLETE, PARTIAL1, PARTIAL2, HASH }; + public static enum Mode { COMPLETE, PARTIAL1, PARTIAL2, FINAL, HASH }; private Mode mode; private java.util.ArrayList keys; private java.util.ArrayList aggregators; Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java (working copy) @@ -21,12 +21,15 @@ import java.io.Serializable; import java.lang.reflect.Method; import java.util.ArrayList; +import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.parse.RowResolver; /** * The reason that we have to store UDFClass as well as UDFMethod is because @@ -133,4 +136,18 @@ return sb.toString(); } + + public List getCols() { + List colList = new ArrayList(); + if (children != null) { + int pos = 0; + while (pos < children.size()) { + List colCh = children.get(pos).getCols(); + colList = Utilities.mergeUniqElems(colList, colCh); + pos++; + } + } + + return colList; + } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (working copy) @@ -20,13 +20,27 @@ import java.util.*; import java.io.*; + +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; +import org.apache.hadoop.hive.ql.typeinfo.TypeInfoUtils; +import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; +import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol; +import org.apache.hadoop.mapred.SequenceFileInputFormat; +import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; +import com.facebook.thrift.protocol.TBinaryProtocol; + public class PlanUtils { public static enum ExpressionTypes {FIELD, JEXL}; @@ -37,21 +51,45 @@ new LinkedHashMap> (), new LinkedHashMap (), new HashMap> (), - new schemaDesc(), - new HashMap (), + new tableDesc(), + new ArrayList (), null, Integer.valueOf (1)); } + /** + * Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode + * and column names (comma separated string). + */ public static tableDesc getDefaultTableDesc(String separatorCode, String columns) { + return getDefaultTableDesc(separatorCode, columns, false); + } + + /** + * Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode + * and column names (comma separated string), and whether the last column should take + * the rest of the line. + */ + public static tableDesc getDefaultTableDesc(String separatorCode, String columns, + boolean lastColumnTakesRestOfTheLine) { + Properties properties = Utilities.makeProperties( + Constants.SERIALIZATION_FORMAT, separatorCode, + "columns", columns); + if (lastColumnTakesRestOfTheLine) { + properties.setProperty( + Constants.SERIALIZATION_LAST_COLUMN_TAKES_REST, + "true"); + } return new tableDesc( MetadataTypedColumnsetSerDe.class, TextInputFormat.class, IgnoreKeyTextOutputFormat.class, - Utilities.makeProperties( - org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, separatorCode, - "columns", columns)); + properties); } + + /** + * Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode + */ public static tableDesc getDefaultTableDesc(String separatorCode) { return new tableDesc( MetadataTypedColumnsetSerDe.class, @@ -61,26 +99,131 @@ org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, separatorCode)); } + /** + * Generate the table descriptor of DynamicSerDe and TBinarySortableProtocol. + */ + public static tableDesc getBinarySortableTableDesc(List fieldSchemas) { + String structName = "binary_sortable_table"; + return new tableDesc( + DynamicSerDe.class, + SequenceFileInputFormat.class, + SequenceFileOutputFormat.class, + Utilities.makeProperties( + "name", structName, + org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, TBinarySortableProtocol.class.getName(), + org.apache.hadoop.hive.serde.Constants.SERIALIZATION_DDL, + MetaStoreUtils.getDDLFromFieldSchema(structName, fieldSchemas) + )); + } + + /** + * Generate the table descriptor of DynamicSerDe and TBinaryProtocol. + */ + public static tableDesc getBinaryTableDesc(List fieldSchemas) { + String structName = "binary_table"; + return new tableDesc( + DynamicSerDe.class, + SequenceFileInputFormat.class, + SequenceFileOutputFormat.class, + Utilities.makeProperties( + "name", structName, + org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName(), + org.apache.hadoop.hive.serde.Constants.SERIALIZATION_DDL, + MetaStoreUtils.getDDLFromFieldSchema(structName, fieldSchemas) + )); + } - // We will make reduce key and reduce value TableDesc with configurable SerDes + + /** + * Convert the ColumnList to FieldSchema list. + */ + public static List getFieldSchemasFromColumnList(ArrayList cols, + String fieldPrefix) { + List schemas = new ArrayList(cols.size()); + for (int i=0; i getFieldSchemasFromRowSchema(RowSchema row, String fieldPrefix) { + Vector c = row.getSignature(); + return getFieldSchemasFromColumnInfo(c, fieldPrefix); + } + + /** + * Convert the ColumnInfo to FieldSchema. + */ + public static List getFieldSchemasFromColumnInfo(Vector cols, String fieldPrefix) { + List schemas = new ArrayList(cols.size()); + for (int i=0; i keyCols, ArrayList valueCols, - int tag, int numPartitionFields, + int tag, + ArrayList partitionCols, int numReducers, boolean inferNumReducers) { - - return new reduceSinkDesc(keyCols, valueCols, tag, numPartitionFields, numReducers, inferNumReducers, - getDefaultTableDesc("" + Utilities.ctrlaCode, ObjectInspectorUtils.getIntegerCSV(keyCols.size())), - getDefaultTableDesc("" + Utilities.ctrlaCode, ObjectInspectorUtils.getIntegerCSV(valueCols.size()))); + + return new reduceSinkDesc(keyCols, valueCols, tag, partitionCols, numReducers, inferNumReducers, + getBinarySortableTableDesc(getFieldSchemasFromColumnList(keyCols, "reducesinkkey")), + getBinaryTableDesc(getFieldSchemasFromColumnList(valueCols, "reducesinkvalue"))); } - // We should read the TableDesc from gWork when it is available. - public static tableDesc getReduceKeyDesc(mapredWork gWork) { - return getDefaultTableDesc("" + Utilities.ctrlaCode); - } + /** + * Create the reduce sink descriptor. + * @param keyCols The columns to be stored in the key + * @param valueCols The columns to be stored in the value + * @param tag The tag for this reducesink + * @param numPartitionFields The first numPartitionFields of keyCols will be partition columns. + * If numPartitionFields=-1, then partition randomly. + * @param numReducers The number of reducers. + * @param inferNumReducers whether we should try to infer the number of reducers. + * @return The reduceSinkDesc object. + */ + public static reduceSinkDesc getReduceSinkDesc(ArrayList keyCols, + ArrayList valueCols, + int tag, + int numPartitionFields, + int numReducers, boolean inferNumReducers) { + ArrayList partitionCols = null; - // We should read the TableDesc from gWork when it is available. - public static tableDesc getReduceValueDesc(mapredWork gWork, int tag) { - return getDefaultTableDesc("" + Utilities.ctrlaCode); + if (numPartitionFields >= keyCols.size()) { + partitionCols = keyCols; + } else if (numPartitionFields >= 0) { + partitionCols = new ArrayList(numPartitionFields); + for (int i=0; i(1); + partitionCols.add(SemanticAnalyzer.getFuncExprNodeDesc("rand")); + } + + return getReduceSinkDesc(keyCols, valueCols, tag, partitionCols, numReducers, inferNumReducers); } + } + \ No newline at end of file Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java (working copy) @@ -19,8 +19,13 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.parse.RowResolver; + public class exprNodeFieldDesc extends exprNodeDesc implements Serializable { private static final long serialVersionUID = 1L; @@ -67,5 +72,12 @@ @Override public String getExprString() { return this.desc.getExprString() + "." + this.fieldName; - } + } + + public List getCols() { + List colList = new ArrayList(); + if (desc != null) + colList = Utilities.mergeUniqElems(colList, desc.getCols()); + return colList; + } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java (working copy) @@ -23,20 +23,34 @@ @explain(displayName="Reduce Output Operator") public class reduceSinkDesc implements Serializable { private static final long serialVersionUID = 1L; - // these are the expressions that go into the reduce key + /** + * Key columns are passed to reducer in the "key". + */ private java.util.ArrayList keyCols; + /** + * Value columns are passed to reducer in the "value". + */ private java.util.ArrayList valueCols; - // Describe how to serialize the key + /** + * Describe how to serialize the key. + */ private tableDesc keySerializeInfo; - // Describe how to serialize the value + /** + * Describe how to serialize the value. + */ private tableDesc valueSerializeInfo; + /** + * The tag for this reducesink descriptor. + */ private int tag; - // The partition key will be the first #numPartitionFields of keyCols - // If the value is 0, then all data will go to a single reducer - // If the value is -1, then data will go to a random reducer - private int numPartitionFields; + /** + * The partition columns (CLUSTER BY or DISTRIBUTE BY in Hive language). + * Partition columns decide the reducer that the current row goes to. + * Partition columns are not passed to reducer. + */ + private java.util.ArrayList partitionCols; private boolean inferNumReducers; private int numReducers; @@ -47,7 +61,7 @@ (java.util.ArrayList keyCols, java.util.ArrayList valueCols, int tag, - int numPartitionFields, + java.util.ArrayList partitionCols, int numReducers, boolean inferNumReducers, final tableDesc keySerializeInfo, @@ -57,7 +71,7 @@ this.tag = tag; this.numReducers = numReducers; this.inferNumReducers = inferNumReducers; - this.numPartitionFields = numPartitionFields; + this.partitionCols = partitionCols; this.keySerializeInfo = keySerializeInfo; this.valueSerializeInfo = valueSerializeInfo; } @@ -80,12 +94,12 @@ this.valueCols=valueCols; } - @explain(displayName="# partition fields") - public int getNumPartitionFields() { - return this.numPartitionFields; + @explain(displayName="Map-reduce partition columns") + public java.util.ArrayList getPartitionCols() { + return this.partitionCols; } - public void setNumPartitionFields(int numPartitionFields) { - this.numPartitionFields = numPartitionFields; + public void setPartitionCols(final java.util.ArrayList partitionCols) { + this.partitionCols = partitionCols; } @explain(displayName="tag") Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java (working copy) @@ -19,99 +19,91 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; -import java.util.Properties; +import java.util.List; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.serde2.Deserializer; -import org.apache.hadoop.mapred.InputFormat; +import org.apache.hadoop.hive.ql.plan.tableDesc; @explain(displayName="Fetch Operator") public class fetchWork implements Serializable { private static final long serialVersionUID = 1L; - // private loadFileDesc loadFileWork; - // private tableDesc tblDesc; - private Path srcDir; - private Properties schema; - private Class deserializerClass; - private Class inputFormatClass; + private Path tblDir; + private tableDesc tblDesc; + + private List partDir; + private List partDesc; + private int limit; public fetchWork() { } - /** - * @param deserializer - * @param deserializerClass - * @param inputFormatClass - * @param schema - * @param srcDir - */ - public fetchWork(Path srcDir, - Class deserializerClass, - Class inputFormatClass, Properties schema, - int limit) { - this.srcDir = srcDir; - this.deserializerClass = deserializerClass; - this.inputFormatClass = inputFormatClass; - this.schema = schema; + public fetchWork(Path tblDir, tableDesc tblDesc, int limit) { + this.tblDir = tblDir; + this.tblDesc = tblDesc; this.limit = limit; } + public fetchWork(List partDir, List partDesc, int limit) { + this.partDir = partDir; + this.partDesc = partDesc; + this.limit = limit; + } + /** - * @return the srcDir + * @return the tblDir */ - @explain(displayName="source") - public Path getSrcDir() { - return srcDir; + public Path getTblDir() { + return tblDir; } /** - * @param srcDir the srcDir to set + * @param tblDir the tblDir to set */ - public void setSrcDir(Path srcDir) { - this.srcDir = srcDir; + public void setTblDir(Path tblDir) { + this.tblDir = tblDir; } /** - * @return the schema + * @return the tblDesc */ - public Properties getSchema() { - return schema; + public tableDesc getTblDesc() { + return tblDesc; } /** - * @param schema the schema to set + * @param tblDesc the tblDesc to set */ - public void setSchema(Properties schema) { - this.schema = schema; + public void setTblDesc(tableDesc tblDesc) { + this.tblDesc = tblDesc; } /** - * @return the deserializerClass + * @return the partDir */ - public Class getDeserializerClass() { - return deserializerClass; + public List getPartDir() { + return partDir; } /** - * @param deserializerClass the deserializerClass to set + * @param partDir the partDir to set */ - public void setDeserializerClass(Class deserializerClass) { - this.deserializerClass = deserializerClass; + public void setPartDir(List partDir) { + this.partDir = partDir; } /** - * @return the inputFormatClass + * @return the partDesc */ - public Class getInputFormatClass() { - return inputFormatClass; + public List getPartDesc() { + return partDesc; } /** - * @param inputFormatClass the inputFormatClass to set + * @param partDesc the partDesc to set */ - public void setInputFormatClass(Class inputFormatClass) { - this.inputFormatClass = inputFormatClass; + public void setPartDesc(List partDesc) { + this.partDesc = partDesc; } /** Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -218,7 +218,7 @@ public boolean getResults(Vector res) { - if (sem.getFetchTask() != null) { + if (sem != null && sem.getFetchTask() != null) { if (!sem.getFetchTaskInit()) { sem.setFetchTaskInit(true); sem.getFetchTask().initialize(conf); Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java (working copy) @@ -43,7 +43,7 @@ public PrimitiveTypeInfo() {} public String getTypeName() { - return ObjectInspectorUtils.getClassShortName(primitiveClass.getName()); + return ObjectInspectorUtils.getClassShortName(primitiveClass); } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java (working copy) @@ -3,11 +3,16 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Map; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; @@ -109,4 +114,67 @@ return result; } + public static String getFieldSchemaTypeFromTypeInfo(TypeInfo typeInfo) { + switch(typeInfo.getCategory()) { + case PRIMITIVE: { + return ObjectInspectorUtils.getClassShortName(typeInfo.getPrimitiveClass()); + } + case LIST: { + String elementType = getFieldSchemaTypeFromTypeInfo(typeInfo.getListElementTypeInfo()); + return org.apache.hadoop.hive.serde.Constants.LIST_TYPE_NAME + "<" + elementType + ">"; + } + case MAP: { + String keyType = getFieldSchemaTypeFromTypeInfo(typeInfo.getMapKeyTypeInfo()); + String valueType = getFieldSchemaTypeFromTypeInfo(typeInfo.getMapValueTypeInfo()); + return org.apache.hadoop.hive.serde.Constants.MAP_TYPE_NAME + "<" + + keyType + "," + valueType + ">"; + } + case STRUCT: { + throw new RuntimeException("Complex struct type not supported!"); + } + default: { + throw new RuntimeException("Unknown type!"); + } + } + } + + /** + * Convert TypeInfo to FieldSchema. + */ + public static FieldSchema getFieldSchemaFromTypeInfo(String fieldName, TypeInfo typeInfo) { + return new FieldSchema( + fieldName, getFieldSchemaTypeFromTypeInfo(typeInfo), "generated by TypeInfoUtils.getFieldSchemaFromTypeInfo" + ); + } + + /** + * The mapping from type name in DDL to the Java class. + */ + public static final Map> TypeNameToClass = new HashMap>(); + static { + TypeNameToClass.put(Constants.BOOLEAN_TYPE_NAME, Boolean.class); + TypeNameToClass.put(Constants.TINYINT_TYPE_NAME, Byte.class); + TypeNameToClass.put(Constants.SMALLINT_TYPE_NAME, Short.class); + TypeNameToClass.put(Constants.INT_TYPE_NAME, Integer.class); + TypeNameToClass.put(Constants.BIGINT_TYPE_NAME, Long.class); + TypeNameToClass.put(Constants.FLOAT_TYPE_NAME, Float.class); + TypeNameToClass.put(Constants.DOUBLE_TYPE_NAME, Double.class); + TypeNameToClass.put(Constants.STRING_TYPE_NAME, String.class); + TypeNameToClass.put(Constants.DATE_TYPE_NAME, java.sql.Date.class); + // These types are not supported yet. + // TypeNameToClass.put(Constants.DATETIME_TYPE_NAME); + // TypeNameToClass.put(Constants.TIMESTAMP_TYPE_NAME); + } + + /** + * Return the primitive type corresponding to the field schema + * @param field The field schema + * @return The TypeInfo object, or null if the field is not a primitive type. + */ + public static TypeInfo getPrimitiveTypeInfoFromFieldSchema(FieldSchema field) { + String type = field.getType(); + + Class c = TypeNameToClass.get(type); + return c == null ? null : TypeInfoFactory.getPrimitiveTypeInfo(c); + } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java (working copy) @@ -37,9 +37,9 @@ mCount = 0; } - public boolean aggregate(String o) { - if (o != null && !o.isEmpty()) { - mSum += Double.parseDouble(o); + public boolean aggregate(Double o) { + if (o != null) { + mSum += o; mCount ++; } return true; @@ -60,9 +60,9 @@ return true; } - public String evaluate() { + public Double evaluate() { // This is SQL standard - average of zero items should be null. - return mCount == 0 ? null : String.valueOf(mSum / mCount); + return mCount == 0 ? null : Double.valueOf(mSum / mCount); } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java (working copy) @@ -37,30 +37,30 @@ mEmpty = true; } - public boolean aggregate(String o) { - if (o != null && !o.isEmpty()) { - mSum += Double.parseDouble(o); + public boolean aggregate(Double o) { + if (o != null) { + mSum += o; mEmpty = false; } return true; } - public String evaluatePartial() { + public Double evaluatePartial() { // This is SQL standard - sum of zero items should be null. - return mEmpty ? null : new Double(mSum).toString(); + return mEmpty ? null : new Double(mSum); } - public boolean aggregatePartial(String o) { - if (o != null && !o.isEmpty()) { - mSum += Double.parseDouble(o); + public boolean aggregatePartial(Double o) { + if (o != null) { + mSum += o; mEmpty = false; } return true; } - public String evaluate() { + public Double evaluate() { // This is SQL standard - sum of zero items should be null. - return mEmpty ? null : new Double(mSum).toString(); + return mEmpty ? null : new Double(mSum); } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (working copy) @@ -78,12 +78,4 @@ } } - public String evaluate(java.sql.Date i) { - if (i == null) { - return null; - } else { - return i.toString(); - } - } - } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java (working copy) @@ -37,28 +37,28 @@ mEmpty = true; } - public boolean aggregate(String o) { - if (o != null && !o.isEmpty()) { + public boolean aggregate(Double o) { + if (o != null) { if (mEmpty) { - mMin = Double.parseDouble(o); + mMin = o; mEmpty = false; } else { - mMin = Math.min(mMin, Double.parseDouble(o)); + mMin = Math.min(mMin, o); } } return true; } - public String evaluatePartial() { - return mEmpty ? null : String.valueOf(mMin); + public Double evaluatePartial() { + return mEmpty ? null : Double.valueOf(mMin); } - public boolean aggregatePartial(String o) { + public boolean aggregatePartial(Double o) { return aggregate(o); } - public String evaluate() { - return mEmpty ? null : String.valueOf(mMin); + public Double evaluate() { + return mEmpty ? null : Double.valueOf(mMin); } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java (working copy) @@ -37,28 +37,28 @@ mEmpty = true; } - public boolean aggregate(String o) { - if (o != null && !o.isEmpty()) { + public boolean aggregate(Double o) { + if (o != null) { if (mEmpty) { - mMax = Double.parseDouble(o); + mMax = o; mEmpty = false; } else { - mMax = Math.max(mMax, Double.parseDouble(o)); + mMax = Math.max(mMax, o); } } return true; } - public String evaluatePartial() { - return mEmpty ? null : String.valueOf(mMax); + public Double evaluatePartial() { + return mEmpty ? null : Double.valueOf(mMax); } - public boolean aggregatePartial(String o) { + public boolean aggregatePartial(Double o) { return aggregate(o); } - public String evaluate() { - return mEmpty ? null : String.valueOf(mMax); + public Double evaluate() { + return mEmpty ? null : Double.valueOf(mMax); } } Index: src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java =================================================================== --- src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java (revision 712243) +++ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java (working copy) @@ -42,17 +42,17 @@ return true; } - public String evaluatePartial() { - return Long.valueOf(mCount).toString(); + public Long evaluatePartial() { + return Long.valueOf(mCount); } - public boolean aggregatePartial(String count) { - mCount += Long.parseLong(count); + public boolean aggregatePartial(Long count) { + mCount += count; return true; } - public String evaluate() { - return Long.valueOf(mCount).toString(); + public Long evaluate() { + return Long.valueOf(mCount); } Index: src/contrib/hive/README =================================================================== --- src/contrib/hive/README (revision 712243) +++ src/contrib/hive/README (working copy) @@ -118,6 +118,10 @@ shows the list of columns +hive> DESCRIBE EXTENDED invites; + +shows the list of columns plus any other meta information about the table + Altering tables. Table name can be changed and additional columns can be dropped hive> ALTER TABLE pokes ADD COLUMNS (new_col INT); @@ -258,28 +262,92 @@ STREAMING --------- -hive> FROM invites a INSERT OVERWRITE TABLE events SELECT TRANSFORM(a.foo, a.bar) AS (oof, rab) USING '/bin/cat' WHERE a.ds > '2008-08-09'; +hive> FROM invites a INSERT OVERWRITE TABLE events + > SELECT TRANSFORM(a.foo, a.bar) AS (oof, rab) + > USING '/bin/cat' WHERE a.ds > '2008-08-09'; -This streams the data in the map phase through the script /bin/cat (like hadoop streaming). +This streams the data in the map phase through the script /bin/cat (like hadoop streaming). Similarly - streaming can be used on the reduce side (please see the Hive Tutorial or examples) KNOWN BUGS/ISSUES ----------------- * hive cli may hang for a couple of minutes because of a bug in getting metadata from the derby database. let it run and you'll be fine! -* hive cli does not support multi-line queries. * hive cli creates derby.log in the directory from which it has been invoked. -* DESCRIBE table currently only shows columns in a table. Other metadata like - partitions, buckets etc are not shown. -* LOAD FILE or INSERT INTO TABLE do not validate schemas of the destination tables. * COUNT(*) does not work for now. Use COUNT(1) instead. -* String literals are indicated by single quotes(double quotes are not supported). - So 'is a valid string' while "is not a valid string" in the query language. Hive - does support escaping quotes and semi-colon similar to MySQL. -* Multiple GROUP BYs are not supported in the multi-table table INSERT queries. -* ORDER BY not supported. +* ORDER BY not supported yet. * Only string and thrift types (http://developers.facebook.com/thrift) have been tested. +* When doing Join, please put the table with big number of rows containing the same join key to +the rightmost in the JOIN clause. Otherwise we may see OutOfMemory errors. - FUTURE FEATURES --------------- +* EXPLODE function to generate multiple rows from a column of list type. +* Simpler syntax for running Map/Reduce scripts. +* ORDER BY and SORT BY. +* Table statistics for query optimization. + +Developing Hive using Eclipse +------------------------ +1. Set up hadoop development environment with Eclipse: +http://wiki.apache.org/hadoop/EclipseEnvironment + +2. Download Hive src code from: +http://mirror.facebook.com/facebook/hive + +If hadoop version is 0.17.x or 0.18.x, use +http://mirror.facebook.com/facebook/hive/hadoop-0.17/ + +If hadoop version is 0.19.x or up or trunk, use +http://mirror.facebook.com/facebook/hive/hadoop-0.19/ + +3. Extract the Hive src code to src/contrib/hive, make sure this file (README) + is in src/contrib/hive. + +4. In src/contrib/hive, run "ant package" + +5. In src/contrib/hive, run "ant -logfile test.log test" to make sure + everything works. This test may take 20 minutes. + +6. Add the following list to the Eclipse project's .classpath file: + + + + + + + + + + + + + + + + + + + + + + + + + + + + +7. Develop using Eclipse. + + +Development Tips +------------------------ +* You may change the first line in conf/hive-log4j.properties to the following line to see error messages on the console. +hive.root.logger=INFO,console +Otherwise you will see error messages in /tmp/ +* You may use the following line to test a specific testcase with a specific query file. +ant -Dtestcase=TestParse -Dqfile=udf4.q test +ant -Dtestcase=TestParseNegative -Dqfile=invalid_dot.q test +ant -Dtestcase=TestCliDriver -Dqfile=udf1.q test +ant -Dtestcase=TestNegativeCliDriver -Dqfile=invalid_tbl_name.q test Index: src/contrib/hive/build.xml =================================================================== --- src/contrib/hive/build.xml (revision 712243) +++ src/contrib/hive/build.xml (working copy) @@ -161,10 +161,6 @@ - - - -