diff --git cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index d9b7031..b6f9350 100644 --- cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -32,12 +32,12 @@ import java.util.Set; import jline.ArgumentCompletor; -import jline.ArgumentCompletor.AbstractArgumentDelimiter; -import jline.ArgumentCompletor.ArgumentDelimiter; import jline.Completor; import jline.ConsoleReader; import jline.History; import jline.SimpleCompletor; +import jline.ArgumentCompletor.AbstractArgumentDelimiter; +import jline.ArgumentCompletor.ArgumentDelimiter; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; @@ -55,7 +55,7 @@ import org.apache.hadoop.hive.ql.exec.HadoopJobExecHelper; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter; -import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.VariableSubstitution; import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory; @@ -266,13 +266,11 @@ int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) { printHeader(qp, out); - int counter = 0; try { while (qp.getResults(res)) { for (String r : res) { out.println(r); } - counter += res.size(); res.clear(); if (out.checkError()) { break; @@ -291,9 +289,10 @@ int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) { } long end = System.currentTimeMillis(); - double timeTaken = (end - start) / 1000.0; - console.printInfo("Time taken: " + timeTaken + " seconds" + - (counter == 0 ? "" : ", Fetched: " + counter + " row(s)")); + if (end > start) { + double timeTaken = (end - start) / 1000.0; + console.printInfo("Time taken: " + timeTaken + " seconds", null); + } } else { String firstToken = tokenizeCmd(cmd.trim())[0]; @@ -536,7 +535,7 @@ public void processSelectDatabase(CliSessionState ss) throws IOException { } // We add Hive keywords, including lower-cased versions - for (String s : HiveParser.getKeywords()) { + for (String s : ParseDriver.getKeywords()) { sc.addCandidateString(s); sc.addCandidateString(s.toLowerCase()); } @@ -611,11 +610,11 @@ public int complete(String buffer, int offset, List completions) { } public static void main(String[] args) throws Exception { - int ret = run(args); + int ret = new CliDriver().run(args); System.exit(ret); } - public static int run(String[] args) throws Exception { + public int run(String[] args) throws Exception { OptionsProcessor oproc = new OptionsProcessor(); if (!oproc.process_stage1(args)) { @@ -669,30 +668,6 @@ public static int run(String[] args) throws Exception { SessionState.start(ss); - // execute cli driver work - int ret = 0; - try { - ret = executeDriver(ss, conf, oproc); - } catch (Exception e) { - ss.close(); - throw e; - } - - ss.close(); - return ret; - } - - /** - * Execute the cli work - * @param ss CliSessionState of the CLI driver - * @param conf HiveConf for the driver sionssion - * @param oproc Opetion processor of the CLI invocation - * @return status of the CLI comman execution - * @throws Exception - */ - private static int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor oproc) - throws Exception { - // connect to Hive Server if (ss.getHost() != null) { ss.connect(); @@ -728,21 +703,19 @@ private static int executeDriver(CliSessionState ss, HiveConf conf, OptionsProce cli.processInitFiles(ss); if (ss.execString != null) { - int cmdProcessStatus = cli.processLine(ss.execString); - return cmdProcessStatus; + return cli.processLine(ss.execString); } try { if (ss.fileName != null) { - int fileProcessStatus = cli.processFile(ss.fileName); - return fileProcessStatus; + return cli.processFile(ss.fileName); } } catch (FileNotFoundException e) { System.err.println("Could not open input file for reading. (" + e.getMessage() + ")"); return 3; } - ConsoleReader reader = new ConsoleReader(); + ConsoleReader reader = getConsoleReader(); reader.setBellEnabled(false); // reader.setDebug(new PrintWriter(new FileWriter("writer.debug", true))); for (Completor completor : getCommandCompletor()) { @@ -790,9 +763,15 @@ private static int executeDriver(CliSessionState ss, HiveConf conf, OptionsProce continue; } } + + ss.close(); + return ret; } + protected ConsoleReader getConsoleReader() throws IOException{ + return new ConsoleReader(); + } /** * Retrieve the current database name string to display, based on the * configuration value. diff --git cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java index 0af27b2..f1806a0 100644 --- cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java +++ cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java @@ -22,6 +22,7 @@ import java.io.FileDescriptor; import java.io.FileOutputStream; import java.io.IOException; +import java.io.OutputStream; import java.io.PrintStream; import java.nio.ByteBuffer; import java.nio.charset.Charset; @@ -53,6 +54,8 @@ // In verbose mode, print an update per RECORD_PRINT_INTERVAL records private static final int RECORD_PRINT_INTERVAL = (1024*1024); + protected static boolean test=false; + public RCFileCat() { super(); decoder = Charset.forName("UTF-8").newDecoder(). @@ -81,6 +84,7 @@ public int run(String[] args) throws Exception { //get options from arguments if (args.length < 1 || args.length > 3) { printUsage(null); + return -1; } Path fileName = null; for (int i = 0; i < args.length; i++) { @@ -102,6 +106,7 @@ public int run(String[] args) throws Exception { fileName = new Path(arg); } else { printUsage(null); + return -1; } } @@ -253,14 +258,19 @@ public static void main(String[] args) { e.printStackTrace(); System.err.println("\n\n\n"); printUsage(e.getMessage()); + System.exit(1); } } private static void setupBufferedOutput() { - FileOutputStream fdout = - new FileOutputStream(FileDescriptor.out); + OutputStream pdataOut; + if (test) { + pdataOut = System.out; + } else { + pdataOut = new FileOutputStream(FileDescriptor.out); + } BufferedOutputStream bos = - new BufferedOutputStream(fdout, STDOUT_BUFFER_SIZE); + new BufferedOutputStream(pdataOut, STDOUT_BUFFER_SIZE); PrintStream ps = new PrintStream(bos, false); System.setOut(ps); @@ -270,7 +280,6 @@ private static void printUsage(String errorMsg) { if(errorMsg != null) { System.err.println(errorMsg); } - System.exit(1); } } diff --git cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java index 22a0891..3e1f491 100644 --- cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java +++ cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java @@ -17,28 +17,50 @@ */ package org.apache.hadoop.hive.cli; -import junit.framework.TestCase; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.Schema; -import org.apache.hadoop.hive.ql.CommandNeedRetryException; -import org.apache.hadoop.hive.ql.Driver; -import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; - -import java.io.PrintStream; -import java.util.ArrayList; -import java.util.List; -import static org.apache.hadoop.hive.conf.HiveConf.ConfVars; import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintStream; +import java.lang.reflect.Field; +import java.security.Permission; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import jline.ArgumentCompletor; +import jline.Completor; +import jline.ConsoleReader; +import junit.framework.TestCase; + +import org.apache.commons.io.FileUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Schema; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.service.HiveClient; +import org.apache.hadoop.hive.service.HiveServerException; +import org.apache.thrift.TException; + + // Cannot call class TestCliDriver since that's the name of the generated // code for the script-based testing public class TestCliDriverMethods extends TestCase { @@ -58,7 +80,8 @@ public void testThatCliDriverPrintsHeaderForCommandsWithSchema() throws CommandN } // If the command has no schema, make sure nothing is printed - public void testThatCliDriverPrintsNoHeaderForCommandsWithNoSchema() throws CommandNeedRetryException { + public void testThatCliDriverPrintsNoHeaderForCommandsWithNoSchema() + throws CommandNeedRetryException { Schema mockSchema = mock(Schema.class); when(mockSchema.getFieldSchemas()).thenReturn(null); @@ -69,16 +92,21 @@ public void testThatCliDriverPrintsNoHeaderForCommandsWithNoSchema() throws Comm /** * Do the actual testing against a mocked CliDriver based on what type of schema - * @param mockSchema Schema to throw against test + * + * @param mockSchema + * Schema to throw against test * @return Output that would have been sent to the user - * @throws CommandNeedRetryException won't actually be thrown + * @throws CommandNeedRetryException + * won't actually be thrown */ private PrintStream headerPrintingTestDriver(Schema mockSchema) throws CommandNeedRetryException { CliDriver cliDriver = new CliDriver(); // We want the driver to try to print the header... + Configuration conf = mock(Configuration.class); - when(conf.getBoolean(eq(ConfVars.HIVE_CLI_PRINT_HEADER.varname), anyBoolean())).thenReturn(true); + when(conf.getBoolean(eq(ConfVars.HIVE_CLI_PRINT_HEADER.varname), anyBoolean())) + .thenReturn(true); cliDriver.setConf(conf); Driver proc = mock(Driver.class); @@ -99,4 +127,429 @@ private PrintStream headerPrintingTestDriver(Schema mockSchema) throws CommandNe return mockOut; } + + public void testGetCommandCompletor() { + Completor[] completors = CliDriver.getCommandCompletor(); + assertEquals(2, completors.length); + assertTrue(completors[0] instanceof ArgumentCompletor); + assertTrue(completors[1] instanceof Completor); + + //comletor add space after last delimeter + ListtestList=new ArrayList(Arrays.asList(new String[]{")"})); + completors[1].complete("fdsdfsdf", 0, testList); + assertEquals(") ", testList.get(0)); + testList=new ArrayList(); + completors[1].complete("len", 0, testList); + assertTrue(testList.get(0).endsWith("length(")); + + testList=new ArrayList(); + completors[0].complete("set f", 0, testList); + assertEquals("set", testList.get(0)); + + } + + public void testRun() throws Exception { + // clean history + String historyDirectory = System.getProperty("user.home"); + if ((new File(historyDirectory)).exists()) { + File historyFile = new File(historyDirectory + File.separator + ".hivehistory"); + historyFile.delete(); + } + HiveConf configuration = new HiveConf(); + CliSessionState ss = new CliSessionState(configuration); + CliSessionState.start(ss); + String[] args = {}; + PrintStream oldOut = System.out; + ByteArrayOutputStream dataOut = new ByteArrayOutputStream(); + System.setOut(new PrintStream(dataOut)); + + PrintStream oldErr = System.err; + ByteArrayOutputStream dataErr = new ByteArrayOutputStream(); + System.setErr(new PrintStream(dataErr)); + + + try { + new FakeCliDriver().run(args); + assertTrue(dataOut.toString().contains("test message")); + assertTrue(dataErr.toString().contains("Hive history file=")); + assertTrue(dataErr.toString().contains("File: fakeFile is not a file.")); + dataOut.reset(); + dataErr.reset(); + + } finally { + System.setOut(oldOut); + System.setErr(oldErr); + + } + + } + + /** + * Test commands exit and quit + */ + public void testQuit() throws Exception { + + CliSessionState ss = new CliSessionState(new HiveConf()); + ss.err = System.err; + ss.out = System.out; + + NoExitSecurityManager newSecurityManager = new NoExitSecurityManager(); + try { + CliSessionState.start(ss); + CliDriver cliDriver = new CliDriver(); + cliDriver.processCmd("quit"); + fail("should be exit"); + } catch (ExitException e) { + assertEquals(0, e.getStatus()); + + } catch (Exception e) { + newSecurityManager.resetSecurityManager(); + throw e; + } + + try { + CliSessionState.start(ss); + CliDriver cliDriver = new CliDriver(); + cliDriver.processCmd("exit"); + fail("should be exit"); + } catch (ExitException e) { + assertEquals(0, e.getStatus()); + + } finally { + newSecurityManager.resetSecurityManager(); + } + + } + + /** + * test remote execCommand + */ + public void testRemoteCall() throws Exception { + MyCliSessionState ss = new MyCliSessionState(new HiveConf(), + org.apache.hadoop.hive.cli.TestCliDriverMethods.MyCliSessionState.ClientResult.RETURN_OK); + ss.err = System.err; + ByteArrayOutputStream data = new ByteArrayOutputStream(); + ss.out = new PrintStream(data); + MyCliSessionState.start(ss); + + CliDriver cliDriver = new CliDriver(); + cliDriver.processCmd("remote command"); + assertTrue(data.toString().contains("test result")); + + } + + /** + * test remote Exception + */ + public void testServerException() throws Exception { + MyCliSessionState ss = new MyCliSessionState( + new HiveConf(), + org.apache.hadoop.hive.cli.TestCliDriverMethods.MyCliSessionState.ClientResult.RETURN_SERVER_EXCEPTION); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + ss.err = new PrintStream(data); + ss.out = System.out; + MyCliSessionState.start(ss); + + CliDriver cliDriver = new CliDriver(); + cliDriver.processCmd("remote command"); + assertTrue(data.toString().contains("[Hive Error]: test HiveServerException")); + data.reset(); + + + } + + /** + * test remote Exception + */ + public void testServerTException() throws Exception { + MyCliSessionState ss = new MyCliSessionState( + new HiveConf(), + org.apache.hadoop.hive.cli.TestCliDriverMethods.MyCliSessionState.ClientResult.RETURN_T_EXCEPTION); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + ss.err = new PrintStream(data); + ss.out = System.out; + MyCliSessionState.start(ss); + + CliDriver cliDriver = new CliDriver(); + cliDriver.processCmd("remote command"); + assertTrue(data.toString().contains("[Thrift Error]: test TException")); + assertTrue(data.toString().contains( + "[Thrift Error]: Hive server is not cleaned due to thrift exception: test TException")); + + } + + /** + * test remote Exception + */ + public void testProcessSelectDatabase() throws Exception { + CliSessionState sessinState = new CliSessionState(new HiveConf()); + CliSessionState.start(sessinState); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + sessinState.err = new PrintStream(data); + sessinState.database = "database"; + CliDriver driver = new CliDriver(); + NoExitSecurityManager securityManager = new NoExitSecurityManager(); + try { + driver.processSelectDatabase(sessinState); + fail("shuld be exit"); + } catch (ExitException e) { + e.printStackTrace(); + assertEquals(40000, e.getStatus()); + } finally { + securityManager.resetSecurityManager(); + } + assertTrue(data.toString().contains( + "FAILED: ParseException line 1:4 cannot recognize input near 'database'")); + } + + public void testprocessInitFiles() throws Exception { + String oldHiveHome = System.getenv("HIVE_HOME"); + String oldHiveConfDir = System.getenv("HIVE_CONF_DIR"); + + File homeFile = File.createTempFile("test", "hive"); + String tmpDir = homeFile.getParentFile().getAbsoluteFile() + File.separator + + "TestCliDriverMethods"; + homeFile.delete(); + FileUtils.deleteDirectory(new File(tmpDir)); + homeFile = new File(tmpDir + File.separator + "bin" + File.separator + CliDriver.HIVERCFILE); + homeFile.getParentFile().mkdirs(); + homeFile.createNewFile(); + FileUtils.write(homeFile, "-- init hive file for test "); + setEnv("HIVE_HOME", homeFile.getParentFile().getParentFile().getAbsolutePath()); + setEnv("HIVE_CONF_DIR", homeFile.getParentFile().getAbsolutePath()); + CliSessionState sessionState = new CliSessionState(new HiveConf()); + + ByteArrayOutputStream data = new ByteArrayOutputStream(); + NoExitSecurityManager securityManager = new NoExitSecurityManager(); + + sessionState.err = new PrintStream(data); + sessionState.out = System.out; + try { + CliSessionState.start(sessionState); + CliDriver cliDriver = new CliDriver(); + cliDriver.processInitFiles(sessionState); + assertTrue(data.toString().contains( + "Putting the global hiverc in $HIVE_HOME/bin/.hiverc is deprecated. " + + "Please use $HIVE_CONF_DIR/.hiverc instead.")); + FileUtils.write(homeFile, "bla bla bla"); + // if init file contains incorrect row + try { + cliDriver.processInitFiles(sessionState); + fail("should be exit"); + } catch (ExitException e) { + assertEquals(40000, e.getStatus()); + } + setEnv("HIVE_HOME", null); + try { + cliDriver.processInitFiles(sessionState); + fail("should be exit"); + } catch (ExitException e) { + assertEquals(40000, e.getStatus()); + } + + } finally { + // restore data + setEnv("HIVE_HOME", oldHiveHome); + setEnv("HIVE_CONF_DIR", oldHiveConfDir); + FileUtils.deleteDirectory(new File(tmpDir)); + } + + File f = File.createTempFile("hive", "test"); + FileUtils.write(f, "bla bla bla"); + try { + sessionState.initFiles = Arrays.asList(new String[] {f.getAbsolutePath()}); + CliDriver cliDriver = new CliDriver(); + cliDriver.processInitFiles(sessionState); + fail("should be exit"); + } catch (ExitException e) { + assertEquals(40000, e.getStatus()); + assertTrue(data.toString().contains("cannot recognize input near 'bla' 'bla' 'bla'")); + + } finally { + securityManager.resetSecurityManager(); + } + } + + + private static void setEnv(String key, String value) throws Exception { + Class[] classes = Collections.class.getDeclaredClasses(); + Map env = (Map) System.getenv(); + for (Class cl : classes) { + if ("java.util.Collections$UnmodifiableMap".equals(cl.getName())) { + Field field = cl.getDeclaredField("m"); + field.setAccessible(true); + Object obj = field.get(env); + Map map = (Map) obj; + if (value == null) { + map.remove(key); + } else { + map.put(key, value); + } + } + } + } + + + private static class FakeCliDriver extends CliDriver { + + @Override + protected ConsoleReader getConsoleReader() throws IOException { + ConsoleReader reslt = new FakeConsoleReader(); + return reslt; + } + + } + + private static class FakeConsoleReader extends ConsoleReader { + private int counter = 0; + File temp = null; + + public FakeConsoleReader() throws IOException { + super(); + + } + + @Override + public String readLine(String prompt) throws IOException { + FileWriter writer; + switch (counter++) { + case 0: + return "!echo test message;"; + case 1: + temp = File.createTempFile("hive", "test"); + temp.deleteOnExit(); + return "source " + temp.getAbsolutePath() + ";"; + case 2: + temp = File.createTempFile("hive", "test"); + temp.deleteOnExit(); + writer = new FileWriter(temp); + writer.write("bla bla bla"); + writer.close(); + return "list file file://" + temp.getAbsolutePath() + ";"; + case 3: + return "!echo "; + case 4: + return "test message;"; + case 5: + return "source fakeFile;"; + case 6: + temp = File.createTempFile("hive", "test"); + temp.deleteOnExit(); + writer = new FileWriter(temp); + writer.write("source fakeFile;"); + writer.close(); + return "list file file://" + temp.getAbsolutePath() + ";"; + + + // drop table over10k; + default: + return null; + } + } + } + + private static class NoExitSecurityManager extends SecurityManager { + + public SecurityManager parentSecurityManager; + + public NoExitSecurityManager() { + super(); + parentSecurityManager = System.getSecurityManager(); + System.setSecurityManager(this); + } + + @Override + public void checkPermission(Permission perm, Object context) { + if (parentSecurityManager != null) { + parentSecurityManager.checkPermission(perm, context); + } + } + + @Override + public void checkPermission(Permission perm) { + if (parentSecurityManager != null) { + parentSecurityManager.checkPermission(perm); + } + } + + @Override + public void checkExit(int status) { + throw new ExitException(status); + } + + public void resetSecurityManager() { + System.setSecurityManager(parentSecurityManager); + } + } + + private static class ExitException extends RuntimeException { + int status; + + public ExitException(int status) { + this.status = status; + } + + public int getStatus() { + return status; + } + } + + private static class MyCliSessionState extends CliSessionState { + + public enum ClientResult { + RETURN_OK, RETURN_SERVER_EXCEPTION, RETURN_T_EXCEPTION + }; + + private final ClientResult result; + + public MyCliSessionState(HiveConf conf, ClientResult result) { + super(conf); + this.result = result; + } + + @Override + public boolean isRemoteMode() { + return true; + } + + @Override + public HiveClient getClient() { + + HiveClient result = mock(HiveClient.class); + if (ClientResult.RETURN_OK.equals(this.result)) { + List fetchResult = new ArrayList(1); + fetchResult.add("test result"); + try { + when(result.fetchN(anyInt())).thenReturn(fetchResult); + } catch (HiveServerException e) { + } catch (Exception e) { + } + } else if (ClientResult.RETURN_SERVER_EXCEPTION.equals(this.result)) { + HiveServerException exception = new HiveServerException("test HiveServerException", 10, + "sql state"); + try { + when(result.fetchN(anyInt())).thenThrow(exception); + + when(result.fetchN(anyInt())).thenThrow(exception); + } catch (TException e) { + ; + } + return result; + } else if (ClientResult.RETURN_T_EXCEPTION.equals(this.result)) { + TException exception = new TException("test TException"); + try { + // org.mockito.Mockito. + doThrow(exception).when(result).clean(); + when(result.fetchN(anyInt())).thenThrow(exception); + } catch (TException e) { + e.printStackTrace(); + } + return result; + } + return result; + } + + } + + } diff --git cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java new file mode 100644 index 0000000..a95e206 --- /dev/null +++ cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java @@ -0,0 +1,132 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.cli; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.InetAddress; +import java.net.ServerSocket; +import java.net.Socket; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Test CliSessionState + */ +public class TestCliSessionState { + + private static TCPServer server; + private static String command = null; + + @BeforeClass + public static void start() throws Exception { + // start fake server + server = new TCPServer(); + Thread thread = new Thread(server); + thread.start(); + // wait for start server; + while (server.getPort() == 0) { + Thread.sleep(20); + } + } + + @AfterClass + public static void stop() throws IOException { + server.stop(); + } + + /** + * test CliSessionState for remote + */ + @Test + public void testConnect() throws Exception { + CliSessionState sessionState = new CliSessionState(new HiveConf()); + sessionState.port = server.getPort(); + sessionState.setHost(InetAddress.getLocalHost().getHostName()); + // check connect + sessionState.connect(); + assertTrue(sessionState.isRemoteMode()); + assertEquals(server.getPort(), sessionState.getPort()); + assertEquals(InetAddress.getLocalHost().getHostName(), sessionState.getHost()); + assertNotNull(sessionState.getClient()); + sessionState.close(); + // close should send command clean + assertEquals(command, "clean"); + + } + + /** + * test default db name + */ + @Test + public void testgetDbName() throws Exception { + HiveConf configuration = new HiveConf(); + CliSessionState sessionState = new CliSessionState(configuration); + assertEquals(MetaStoreUtils.DEFAULT_DATABASE_NAME, sessionState.getCurrentDbName()); + + } + + /** + * fake hive server + */ + private static class TCPServer implements Runnable { + private int port = 0; + private boolean stop = false; + private ServerSocket welcomeSocket; + + public void run() { + try { + + welcomeSocket = new ServerSocket(0); + port = welcomeSocket.getLocalPort(); + while (!stop) { + byte[] buffer = new byte[512]; + Socket connectionSocket = welcomeSocket.accept(); + InputStream input = connectionSocket.getInputStream(); + OutputStream output = connectionSocket.getOutputStream(); + int read = input.read(buffer); + // command without service bytes + command = new String(buffer, 8, read - 13); + // send derived + output.write(buffer, 0, read); + } + } catch (IOException e) { + ; + } + + } + + public int getPort() { + return port; + } + + public void stop() throws IOException { + stop = true; + welcomeSocket.close(); + } + } +} diff --git cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java new file mode 100644 index 0000000..577a8dd --- /dev/null +++ cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java @@ -0,0 +1,84 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.cli; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.junit.Test; + +/** + * test class OptionsProcessor + */ +public class TestOptionsProcessor { + + /** + * test pase parameters for Hive + */ + @Test + public void testOptionsProcessor() { + OptionsProcessor processor = new OptionsProcessor(); + System.clearProperty("hiveconf"); + System.clearProperty("define"); + System.clearProperty("hivevar"); + assertNull(System.getProperty("_A")); + String[] args = { "-hiveconf", "_A=B", "-define", "C=D", "-hivevar", "X=Y", + "-S", "true", "-database", "testDb", "-e", "execString", "-v", "true", + "-h", "yahoo.host", "-p", "3000"}; + + // stage 1 + assertTrue(processor.process_stage1(args)); + assertEquals("B", System.getProperty("_A")); + assertEquals("D", processor.getHiveVariables().get("C")); + assertEquals("Y", processor.getHiveVariables().get("X")); + + CliSessionState sessionState = new CliSessionState(new HiveConf()); + // stage 2 + processor.process_stage2(sessionState); + assertEquals("testDb", sessionState.database); + assertEquals("execString", sessionState.execString); + assertEquals("yahoo.host", sessionState.host); + assertEquals(3000, sessionState.port); + assertEquals(0, sessionState.initFiles.size()); + assertTrue(sessionState.getIsVerbose()); + sessionState.setConf(null); + assertTrue(sessionState.getIsSilent()); + + } + /** + * Test set fileName + */ + @Test + public void testFiles() { + OptionsProcessor processor = new OptionsProcessor(); + + String[] args = {"-i", "f1", "-i", "f2","-f", "fileName",}; + assertTrue(processor.process_stage1(args)); + + CliSessionState sessionState = new CliSessionState(new HiveConf()); + processor.process_stage2(sessionState); + assertEquals("fileName", sessionState.fileName); + assertEquals(2, sessionState.initFiles.size()); + assertEquals("f1", sessionState.initFiles.get(0)); + assertEquals("f2", sessionState.initFiles.get(1)); + + } +} diff --git cli/src/test/org/apache/hadoop/hive/cli/TestRCFileCat.java cli/src/test/org/apache/hadoop/hive/cli/TestRCFileCat.java new file mode 100644 index 0000000..7ee02e8 --- /dev/null +++ cli/src/test/org/apache/hadoop/hive/cli/TestRCFileCat.java @@ -0,0 +1,152 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.cli; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.io.PrintStream; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hive.ql.io.RCFile; +import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; +import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable; +import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.compress.DefaultCodec; +import org.junit.Test; + +/** + * test RCFileCat + * + */ +public class TestRCFileCat { + + /** + * test parse file + */ + @Test + public void testRCFileCat() throws Exception { + File template = File.createTempFile("hive", "tmpTest"); + Configuration configuration = new Configuration(); + + byte[][] record_1 = { Bytes.toBytes("123"), Bytes.toBytes("456"), + Bytes.toBytes("789"), Bytes.toBytes("1000"), Bytes.toBytes("5.3"), + Bytes.toBytes("hive and hadoop"), new byte[0], Bytes.toBytes("NULL") }; + byte[][] record_2 = { Bytes.toBytes("100"), Bytes.toBytes("200"), + Bytes.toBytes("123"), Bytes.toBytes("1000"), Bytes.toBytes("5.3"), + Bytes.toBytes("hive and hadoop"), new byte[0], Bytes.toBytes("NULL") }; + byte[][] record_3 = { Bytes.toBytes("200"), Bytes.toBytes("400"), + Bytes.toBytes("678"), Bytes.toBytes("1000"), Bytes.toBytes("4.8"), + Bytes.toBytes("hive and hadoop"), new byte[0], Bytes.toBytes("TEST") }; + + RCFileOutputFormat.setColumnNumber(configuration, 8); + + Path file = new Path(template.getAbsolutePath()); + + FileSystem fs = FileSystem.getLocal(configuration); + RCFile.Writer writer = new RCFile.Writer(fs, configuration, file, null, + RCFile.createMetadata(new Text("apple"), new Text("block"), new Text( + "cat"), new Text("dog")), new DefaultCodec()); + write(writer, record_1); + write(writer, record_2); + write(writer, record_3); + writer.close(); + + RCFileCat fileCat = new RCFileCat(); + RCFileCat.test=true; + fileCat.setConf(new Configuration()); + + // set fake input and output streams + PrintStream oldOutPrintStream= System.out; + PrintStream oldErrPrintStream= System.err; + ByteArrayOutputStream dataOut= new ByteArrayOutputStream(); + ByteArrayOutputStream dataErr= new ByteArrayOutputStream(); + System.setOut(new PrintStream(dataOut)); + System.setErr(new PrintStream(dataErr)); + + + try { + String[] params = {"--verbose","file://" + template.getAbsolutePath() }; + + assertEquals(0, fileCat.run(params)); + assertTrue(dataOut.toString().contains("123\t456\t789\t1000\t5.3\thive and hadoop\t\tNULL")); + assertTrue(dataOut.toString().contains("100\t200\t123\t1000\t5.3\thive and hadoop\t\tNULL")); + assertTrue(dataOut.toString().contains("200\t400\t678\t1000\t4.8\thive and hadoop\t\tTEST")); + dataOut.reset(); + params = new String[] { "--start=-10","--file-sizes","file://" + template.getAbsolutePath() }; + assertEquals(0, fileCat.run(params)); + assertTrue(dataOut.toString().contains("File size (uncompressed): 105. File size (compressed): 134. Number of rows: 3.")); + dataOut.reset(); + + params = new String[] {"--start=0", "--column-sizes","file://" + template.getAbsolutePath() }; + assertEquals(0, fileCat.run(params)); + assertTrue(dataOut.toString().contains("0\t9\t17")); + assertTrue(dataOut.toString().contains("1\t9\t17")); + assertTrue(dataOut.toString().contains("2\t9\t17")); + assertTrue(dataOut.toString().contains("3\t12\t14")); + assertTrue(dataOut.toString().contains("4\t9\t17")); + assertTrue(dataOut.toString().contains("5\t45\t26")); + + + dataOut.reset(); + params = new String[] {"--start=0", "--column-sizes-pretty","file://" + template.getAbsolutePath() }; + assertEquals(0, fileCat.run(params)); + assertTrue(dataOut.toString().contains("Column 0: Uncompressed size: 9 Compressed size: 17")); + assertTrue(dataOut.toString().contains("Column 1: Uncompressed size: 9 Compressed size: 17")); + assertTrue(dataOut.toString().contains("Column 2: Uncompressed size: 9 Compressed size: 17")); + assertTrue(dataOut.toString().contains("Column 3: Uncompressed size: 12 Compressed size: 14")); + assertTrue(dataOut.toString().contains("Column 4: Uncompressed size: 9 Compressed size: 17")); + assertTrue(dataOut.toString().contains("Column 5: Uncompressed size: 45 Compressed size: 26")); + + params = new String[] { }; + assertEquals(-1, fileCat.run(params)); + assertTrue(dataErr.toString().contains("RCFileCat [--start=start_offet] [--length=len] [--verbose] " + + "[--column-sizes | --column-sizes-pretty] [--file-sizes] fileName")); + + dataErr.reset(); + params = new String[] { "--fakeParameter","file://" + template.getAbsolutePath()}; + assertEquals(-1, fileCat.run(params)); + assertTrue(dataErr.toString().contains("RCFileCat [--start=start_offet] [--length=len] [--verbose] " + + "[--column-sizes | --column-sizes-pretty] [--file-sizes] fileName")); + + } finally { + // restore input and output streams + System.setOut(oldOutPrintStream); + System.setErr(oldErrPrintStream); + } + + } + + private void write(RCFile.Writer writer, byte[][] record) throws IOException { + BytesRefArrayWritable bytes = new BytesRefArrayWritable(record.length); + for (int i = 0; i < record.length; i++) { + BytesRefWritable cu = new BytesRefWritable(record[i], 0, record[i].length); + bytes.set(i, cu); + } + writer.append(bytes); + + } +}