Index: service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java =================================================================== --- service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java (revision 901519) +++ service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java (working copy) @@ -1,48 +1,49 @@ package org.apache.hadoop.hive.service; -import java.util.*; +import java.util.List; +import java.util.Properties; -import org.apache.hadoop.fs.Path; import junit.framework.TestCase; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Schema; -import org.apache.hadoop.hive.service.HiveInterface; -import org.apache.hadoop.hive.service.HiveClient; -import org.apache.hadoop.hive.service.HiveServer; +import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe; +import org.apache.hadoop.io.BytesWritable; +import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocol; -import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe; -import org.apache.hadoop.hive.serde.Constants; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.BytesWritable; public class TestHiveServer extends TestCase { private HiveInterface client; private final static String host = "localhost"; private final static int port = 10000; - private Path dataFilePath; + private final Path dataFilePath; private static String tableName = "testhivedrivertable"; - private HiveConf conf; + private final HiveConf conf; private boolean standAloneServer = false; private TTransport transport; public TestHiveServer(String name) { super(name); conf = new HiveConf(TestHiveServer.class); - String dataFileDir = conf.get("test.data.files").replace('\\', '/').replace("c:", ""); + String dataFileDir = conf.get("test.data.files").replace('\\', '/') + .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); // See data/conf/hive-site.xml String paramStr = System.getProperty("test.service.standalone.server"); - if (paramStr != null && paramStr.equals("true")) + if (paramStr != null && paramStr.equals("true")) { standAloneServer = true; + } } + @Override protected void setUp() throws Exception { super.setUp(); if (standAloneServer) { @@ -51,16 +52,15 @@ TProtocol protocol = new TBinaryProtocol(transport); client = new HiveClient(protocol); transport.open(); - } - catch (Throwable e) { + } catch (Throwable e) { e.printStackTrace(); } - } - else { + } else { client = new HiveServer.HiveServerHandler(); } } + @Override protected void tearDown() throws Exception { super.tearDown(); if (standAloneServer) { @@ -76,26 +76,26 @@ try { client.execute("create table " + tableName + " (num int)"); - client.execute("load data local inpath '" + dataFilePath.toString() + "' into table " + tableName); + client.execute("load data local inpath '" + dataFilePath.toString() + + "' into table " + tableName); client.execute("select count(1) as cnt from " + tableName); String row = client.fetchOne(); assertEquals(row, "500"); - + Schema hiveSchema = client.getSchema(); List listFields = hiveSchema.getFieldSchemas(); assertEquals(listFields.size(), 1); assertEquals(listFields.get(0).getName(), "cnt"); assertEquals(listFields.get(0).getType(), "bigint"); - + Schema thriftSchema = client.getThriftSchema(); List listThriftFields = thriftSchema.getFieldSchemas(); assertEquals(listThriftFields.size(), 1); assertEquals(listThriftFields.get(0).getName(), "cnt"); assertEquals(listThriftFields.get(0).getType(), "i64"); - + client.execute("drop table " + tableName); - } - catch (Throwable t) { + } catch (Throwable t) { t.printStackTrace(); } } @@ -107,7 +107,8 @@ } client.execute("create table " + tableName + " (num int)"); - client.execute("load data local inpath '" + dataFilePath.toString() + "' into table " + tableName); + client.execute("load data local inpath '" + dataFilePath.toString() + + "' into table " + tableName); client.execute("select count(1) from " + tableName); String row = client.fetchOne(); assertEquals(row, "500"); @@ -122,38 +123,39 @@ } client.execute("create table " + tableName + " (num int)"); - client.execute("load data local inpath '" + dataFilePath.toString() + "' into table " + tableName); - - // Command not part of HiveQL - verify no results + client.execute("load data local inpath '" + dataFilePath.toString() + + "' into table " + tableName); + + // Command not part of HiveQL - verify no results client.execute("SET hive.mapred.mode = nonstrict"); - + Schema schema = client.getSchema(); assertEquals(schema.getFieldSchemasSize(), 0); assertEquals(schema.getPropertiesSize(), 0); - + Schema thriftschema = client.getThriftSchema(); assertEquals(thriftschema.getFieldSchemasSize(), 0); assertEquals(thriftschema.getPropertiesSize(), 0); - + assertEquals(client.fetchOne(), ""); assertEquals(client.fetchN(10).size(), 0); assertEquals(client.fetchAll().size(), 0); - + // Execute Hive query and fetch client.execute("select * from " + tableName + " limit 10"); - String row = client.fetchOne(); - + client.fetchOne(); + // Re-execute command not part of HiveQL - verify still no results client.execute("SET hive.mapred.mode = nonstrict"); - + schema = client.getSchema(); assertEquals(schema.getFieldSchemasSize(), 0); assertEquals(schema.getPropertiesSize(), 0); - + thriftschema = client.getThriftSchema(); assertEquals(thriftschema.getFieldSchemasSize(), 0); assertEquals(thriftschema.getPropertiesSize(), 0); - + assertEquals(client.fetchOne(), ""); assertEquals(client.fetchN(10).size(), 0); assertEquals(client.fetchAll().size(), 0); @@ -161,7 +163,7 @@ // Cleanup client.execute("drop table " + tableName); } - + /** * Test metastore call */ @@ -188,10 +190,10 @@ assertTrue(clusterStatus.getReduceTasks() >= 0); assertTrue(clusterStatus.getMaxMapTasks() >= 0); assertTrue(clusterStatus.getMaxReduceTasks() >= 0); - assertTrue(clusterStatus.getState() == JobTrackerState.INITIALIZING || - clusterStatus.getState() == JobTrackerState.RUNNING); + assertTrue(clusterStatus.getState() == JobTrackerState.INITIALIZING + || clusterStatus.getState() == JobTrackerState.RUNNING); } - + /** * */ @@ -202,31 +204,31 @@ } catch (Exception ex) { } client.execute("create table " + tableName + " (key int, value string)"); - client.execute("load data local inpath '" + dataFilePath.toString() + "' into table " + tableName); + client.execute("load data local inpath '" + dataFilePath.toString() + + "' into table " + tableName); try { - // fetchAll test - client.execute("select key, value from " + tableName); - assertEquals(client.fetchAll().size(), 500); - assertEquals(client.fetchAll().size(), 0); + // fetchAll test + client.execute("select key, value from " + tableName); + assertEquals(client.fetchAll().size(), 500); + assertEquals(client.fetchAll().size(), 0); - // fetchOne test - client.execute("select key, value from " + tableName); - for (int i = 0; i < 500; i++) { - String str = client.fetchOne(); - if (str.equals("")) { - assertTrue(false); + // fetchOne test + client.execute("select key, value from " + tableName); + for (int i = 0; i < 500; i++) { + String str = client.fetchOne(); + if (str.equals("")) { + assertTrue(false); + } } - } - assertEquals(client.fetchOne(), ""); + assertEquals(client.fetchOne(), ""); - // fetchN test - client.execute("select key, value from " + tableName); - assertEquals(client.fetchN(499).size(), 499); - assertEquals(client.fetchN(499).size(), 1); - assertEquals(client.fetchN(499).size(), 0); - } - catch (Throwable e) { + // fetchN test + client.execute("select key, value from " + tableName); + assertEquals(client.fetchN(499).size(), 499); + assertEquals(client.fetchN(499).size(), 1); + assertEquals(client.fetchN(499).size(), 0); + } catch (Throwable e) { e.printStackTrace(); } } @@ -238,21 +240,28 @@ } client.execute("create table " + tableName + " (key int, value string)"); - client.execute("load data local inpath '" + dataFilePath.toString() + "' into table " + tableName); - //client.execute("select key, count(1) from " + tableName + " where key > 10 group by key"); + client.execute("load data local inpath '" + dataFilePath.toString() + + "' into table " + tableName); + // client.execute("select key, count(1) from " + tableName + + // " where key > 10 group by key"); String sql = "select key, value from " + tableName + " where key > 10"; client.execute(sql); // Instantiate DynamicSerDe DynamicSerDe ds = new DynamicSerDe(); Properties dsp = new Properties(); - dsp.setProperty(Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName()); - dsp.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "result"); + dsp.setProperty(Constants.SERIALIZATION_FORMAT, + org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class + .getName()); + dsp.setProperty( + org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, + "result"); String serDDL = new String("struct result { "); List schema = client.getThriftSchema().getFieldSchemas(); for (int pos = 0; pos < schema.size(); pos++) { - if (pos != 0) - serDDL = serDDL.concat(","); + if (pos != 0) { + serDDL = serDDL.concat(","); + } serDDL = serDDL.concat(schema.get(pos).getType()); serDDL = serDDL.concat(" "); serDDL = serDDL.concat(schema.get(pos).getName()); @@ -268,10 +277,10 @@ Object o = ds.deserialize(new BytesWritable(row.getBytes())); assertEquals(o.getClass().toString(), "class java.util.ArrayList"); - List lst = (List)o; + List lst = (List) o; assertEquals(lst.get(0), 238); - // TODO: serde doesn't like underscore -- struct result { string _c0} + // TODO: serde doesn't like underscore -- struct result { string _c0} sql = "select count(1) as c from " + tableName; client.execute(sql); row = client.fetchOne(); @@ -279,8 +288,9 @@ serDDL = new String("struct result { "); schema = client.getThriftSchema().getFieldSchemas(); for (int pos = 0; pos < schema.size(); pos++) { - if (pos != 0) - serDDL = serDDL.concat(","); + if (pos != 0) { + serDDL = serDDL.concat(","); + } serDDL = serDDL.concat(schema.get(pos).getType()); serDDL = serDDL.concat(" "); serDDL = serDDL.concat(schema.get(pos).getName());