diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java index 1894ba09f3..325831e2eb 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java @@ -22,6 +22,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.hooks.Entity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.reexec.ReExecDriver; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.After; import org.junit.Before; @@ -48,8 +49,8 @@ public void tearDown() throws Exception { @Test public void testUdfWithLocalResource() throws Exception { - int rc = driver.compile("CREATE FUNCTION " + funcName + " AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' " - + " using file '" + "file:///tmp/udf1.jar'"); + int rc = ((ReExecDriver)driver).compile("CREATE FUNCTION " + funcName + " AS " + + "'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' using file '" + "file:///tmp/udf1.jar'", true); assertEquals(0, rc); WriteEntity outputEntities[] = driver.getPlan().getOutputs().toArray(new WriteEntity[] {}); assertEquals(outputEntities.length, 3); @@ -66,8 +67,8 @@ public void testUdfWithLocalResource() throws Exception { @Test public void testUdfWithDfsResource() throws Exception { - int rc = driver.compile("CREATE FUNCTION default." + funcName + " AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' " - + " using file '" + "hdfs:///tmp/udf1.jar'"); + int rc = ((ReExecDriver)driver).compile("CREATE FUNCTION default." + funcName + " AS " + + "'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' using file '" + "hdfs:///tmp/udf1.jar'", true); assertEquals(0, rc); WriteEntity outputEntities[] = driver.getPlan().getOutputs().toArray(new WriteEntity[] {}); assertEquals(outputEntities.length, 3); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java index 191d5f5a8a..137bedd972 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java @@ -27,7 +27,6 @@ import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatistic; import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatisticsNames; -import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Assert; @@ -35,7 +34,6 @@ import java.io.File; import java.net.MalformedURLException; -import java.net.URL; import java.nio.file.Paths; import java.util.List; import java.util.Map; @@ -62,7 +60,7 @@ public void testSparkStatistics() throws MalformedURLException { null, null); Assert.assertEquals(0, driver.run("create table test (col int)").getResponseCode()); - Assert.assertEquals(0, driver.compile("select * from test order by col")); + Assert.assertEquals(0, driver.compile("select * from test order by col", true)); List sparkTasks = Utilities.getSparkTasks(driver.getPlan().getRootTasks()); Assert.assertEquals(1, sparkTasks.size()); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java index 5c803cacd8..a897dc6ab8 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java @@ -135,7 +135,7 @@ public void testInputSomeColumnsUsed() throws Exception { reset(mockedAuthorizer); int status = driver.compile("select i from " + tableName - + " where k = 'X' and city = 'Scottsdale-AZ' "); + + " where k = 'X' and city = 'Scottsdale-AZ' ", true); assertEquals(0, status); List inputs = getHivePrivilegeObjectInputs().getLeft(); @@ -151,7 +151,7 @@ public void testInputSomeColumnsUsedView() throws Exception { reset(mockedAuthorizer); int status = driver.compile("select i from " + viewName - + " where k = 'X' and city = 'Scottsdale-AZ' "); + + " where k = 'X' and city = 'Scottsdale-AZ' ", true); assertEquals(0, status); List inputs = getHivePrivilegeObjectInputs().getLeft(); @@ -168,7 +168,7 @@ public void testInputSomeColumnsUsedJoin() throws Exception { reset(mockedAuthorizer); int status = driver.compile("select " + viewName + ".i, " + tableName + ".city from " + viewName + " join " + tableName + " on " + viewName + ".city = " + tableName - + ".city where " + tableName + ".k = 'X'"); + + ".city where " + tableName + ".k = 'X'", true); assertEquals(0, status); List inputs = getHivePrivilegeObjectInputs().getLeft(); @@ -194,7 +194,7 @@ public void testInputSomeColumnsUsedJoin() throws Exception { public void testInputAllColumnsUsed() throws Exception { reset(mockedAuthorizer); - int status = driver.compile("select * from " + tableName + " order by i"); + int status = driver.compile("select * from " + tableName + " order by i", true); assertEquals(0, status); List inputs = getHivePrivilegeObjectInputs().getLeft(); @@ -220,7 +220,7 @@ public void testCreateViewWithDb() throws Exception { private void checkCreateViewOrTableWithDb(String newTable, String cmd) throws HiveAuthzPluginException, HiveAccessControlException { reset(mockedAuthorizer); - int status = driver.compile(cmd); + int status = driver.compile(cmd, true); assertEquals(0, status); List outputs = getHivePrivilegeObjectInputs().getRight(); @@ -248,7 +248,7 @@ private void assertEqualsIgnoreCase(String msg, String expected, String actual) public void testInputNoColumnsUsed() throws Exception { reset(mockedAuthorizer); - int status = driver.compile("describe " + tableName); + int status = driver.compile("describe " + tableName, true); assertEquals(0, status); List inputs = getHivePrivilegeObjectInputs().getLeft(); @@ -263,7 +263,7 @@ public void testPermFunction() throws Exception { reset(mockedAuthorizer); final String funcName = "testauthfunc1"; int status = driver.compile("create function " + dbName + "." + funcName - + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'"); + + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'", true); assertEquals(0, status); List outputs = getHivePrivilegeObjectInputs().getRight(); @@ -292,7 +292,7 @@ public void testPermFunction() throws Exception { // Verify privilege objects reset(mockedAuthorizer); - status = driver.compile("select " + dbName + "." + funcName + "() , i from " + tableName); + status = driver.compile("select " + dbName + "." + funcName + "() , i from " + tableName, true); assertEquals(0, status); List inputs = getHivePrivilegeObjectInputs().getLeft(); @@ -322,7 +322,7 @@ public void testPermFunction() throws Exception { // try using 2nd permanent function and verify its only 2nd one that shows up // for auth reset(mockedAuthorizer); - status = driver.compile("select " + dbName + "." + funcName2 + "(i) from " + tableName); + status = driver.compile("select " + dbName + "." + funcName2 + "(i) from " + tableName, true); assertEquals(0, status); inputs = getHivePrivilegeObjectInputs().getLeft(); @@ -345,7 +345,7 @@ public void testPermFunction() throws Exception { // try using both permanent functions reset(mockedAuthorizer); status = driver.compile( - "select " + dbName + "." + funcName2 + "(i), " + dbName + "." + funcName + "(), j from " + tableName); + "select " + dbName + "." + funcName2 + "(i), " + dbName + "." + funcName + "(), j from " + tableName, true); assertEquals(0, status); inputs = getHivePrivilegeObjectInputs().getLeft(); @@ -376,7 +376,7 @@ public void testTempFunction() throws Exception { reset(mockedAuthorizer); final String funcName = "testAuthFunc2"; int status = driver.compile("create temporary function " + funcName - + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'"); + + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'", true); assertEquals(0, status); List outputs = getHivePrivilegeObjectInputs().getRight(); @@ -409,12 +409,12 @@ public void testTempTable() throws Exception { assertEquals("output count", 1, outputs.size()); assertEquals("output type", HivePrivilegeObjectType.DATABASE, outputs.get(0).getType()); - status = driver.compile("select * from " + tableName); + status = driver.compile("select * from " + tableName, true); assertEquals(0, status); } { // select from the temp table reset(mockedAuthorizer); - int status = driver.compile("insert into " + tableName + " values(1)"); + int status = driver.compile("insert into " + tableName + " values(1)", true); assertEquals(0, status); // temp tables should be skipped from authorization @@ -428,7 +428,7 @@ public void testTempTable() throws Exception { } { // select from the temp table reset(mockedAuthorizer); - int status = driver.compile("select * from " + tableName); + int status = driver.compile("select * from " + tableName, true); assertEquals(0, status); // temp tables should be skipped from authorization @@ -450,7 +450,7 @@ public void testTempTableImplicit() throws Exception { assertEquals(0, status); reset(mockedAuthorizer); - status = driver.compile("insert into " + tableName + " values (1)"); + status = driver.compile("insert into " + tableName + " values (1)", true); assertEquals(0, status); List inputs = getHivePrivilegeObjectInputs().getLeft(); @@ -460,7 +460,7 @@ public void testTempTableImplicit() throws Exception { assertEquals("input count", 0, inputs.size()); reset(mockedAuthorizer); - status = driver.compile("select * from " + tableName); + status = driver.compile("select * from " + tableName, true); assertEquals(0, status); inputs = getHivePrivilegeObjectInputs().getLeft(); @@ -480,7 +480,7 @@ private String getDefaultTmp() { @Test public void testUpdateSomeColumnsUsed() throws Exception { reset(mockedAuthorizer); - int status = driver.compile("update " + acidTableName + " set i = 5 where j = 3"); + int status = driver.compile("update " + acidTableName + " set i = 5 where j = 3", true); assertEquals(0, status); Pair, List> io = getHivePrivilegeObjectInputs(); @@ -499,7 +499,7 @@ public void testUpdateSomeColumnsUsed() throws Exception { @Test public void testUpdateSomeColumnsUsedExprInSet() throws Exception { reset(mockedAuthorizer); - int status = driver.compile("update " + acidTableName + " set i = 5, j = k where j = 3"); + int status = driver.compile("update " + acidTableName + " set i = 5, j = k where j = 3", true); assertEquals(0, status); Pair, List> io = getHivePrivilegeObjectInputs(); @@ -520,7 +520,7 @@ public void testUpdateSomeColumnsUsedExprInSet() throws Exception { @Test public void testDelete() throws Exception { reset(mockedAuthorizer); - int status = driver.compile("delete from " + acidTableName + " where j = 3"); + int status = driver.compile("delete from " + acidTableName + " where j = 3", true); assertEquals(0, status); Pair, List> io = getHivePrivilegeObjectInputs(); @@ -534,7 +534,7 @@ public void testDelete() throws Exception { @Test public void testShowTables() throws Exception { reset(mockedAuthorizer); - int status = driver.compile("show tables"); + int status = driver.compile("show tables", true); assertEquals(0, status); Pair, List> io = getHivePrivilegeObjectInputs(); @@ -547,7 +547,7 @@ public void testShowTables() throws Exception { @Test public void testDescDatabase() throws Exception { reset(mockedAuthorizer); - int status = driver.compile("describe database " + dbName); + int status = driver.compile("describe database " + dbName, true); assertEquals(0, status); Pair, List> io = getHivePrivilegeObjectInputs(); @@ -574,7 +574,7 @@ private void resetAuthorizer() throws HiveAuthzPluginException, HiveAccessContro public void testReplDump() throws Exception { resetAuthorizer(); - int status = driver.compile("repl dump " + dbName); + int status = driver.compile("repl dump " + dbName, true); assertEquals(0, status); List inputs = getHivePrivilegeObjectInputs().getLeft(); HivePrivilegeObject dbObj = inputs.get(0); @@ -582,7 +582,7 @@ public void testReplDump() throws Exception { assertEquals("db name", dbName.toLowerCase(), dbObj.getDbname()); resetAuthorizer(); - status = driver.compile("repl dump " + dbName + ".'" + inDbTableName + "'"); + status = driver.compile("repl dump " + dbName + ".'" + inDbTableName + "'", true); assertEquals(0, status); inputs = getHivePrivilegeObjectInputs().getLeft(); dbObj = inputs.get(0); @@ -627,7 +627,8 @@ private void checkSingleViewInput(List inputs) { inputsCapturer.capture(), outputsCapturer.capture(), any(HiveAuthzContext.class)); - return new ImmutablePair(inputsCapturer.getValue(), outputsCapturer.getValue()); + return new ImmutablePair, List>( + inputsCapturer.getValue(), outputsCapturer.getValue()); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 2eb65918c9..043c97669a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -55,7 +55,6 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.VariableSubstitution; -import org.apache.hadoop.hive.metastore.ColumnType; import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -123,9 +122,6 @@ import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.ql.wm.WmContext; import org.apache.hadoop.hive.serde2.ByteStream; -import org.apache.hadoop.mapred.ClusterStatus; -import org.apache.hadoop.mapred.JobClient; -import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hive.common.util.ShutdownHookManager; import org.apache.hive.common.util.TxnIdUtils; @@ -143,12 +139,12 @@ static final private String CLASS_NAME = Driver.class.getName(); private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); static final private LogHelper console = new LogHelper(LOG); - static final int SHUTDOWN_HOOK_PRIORITY = 0; + private static final int SHUTDOWN_HOOK_PRIORITY = 0; private final QueryInfo queryInfo; private Runnable shutdownRunner = null; private int maxRows = 100; - ByteStream.Output bos = new ByteStream.Output(); + private ByteStream.Output bos = new ByteStream.Output(); private final HiveConf conf; private DataInput resStream; @@ -161,7 +157,7 @@ private Throwable downstreamError; private FetchTask fetchTask; - List hiveLocks = new ArrayList(); + private List hiveLocks = new ArrayList(); // A limit on the number of threads that can be launched private int maxthreads; @@ -271,24 +267,6 @@ private boolean checkConcurrency() { return true; } - /** - * Return the status information about the Map-Reduce cluster - */ - public ClusterStatus getClusterStatus() throws Exception { - ClusterStatus cs; - try { - JobConf job = new JobConf(conf); - JobClient jc = new JobClient(job); - cs = jc.getClusterStatus(); - } catch (Exception e) { - e.printStackTrace(); - throw e; - } - LOG.info("Returning cluster status: " + cs.toString()); - return cs; - } - - @Override public Schema getSchema() { return schema; @@ -358,37 +336,6 @@ private static Schema getSchema(BaseSemanticAnalyzer sem, HiveConf conf) { return schema; } - /** - * Get a Schema with fields represented with Thrift DDL types - */ - public Schema getThriftSchema() throws Exception { - Schema schema; - try { - schema = getSchema(); - if (schema != null) { - List lst = schema.getFieldSchemas(); - // Go over the schema and convert type to thrift type - if (lst != null) { - for (FieldSchema f : lst) { - f.setType(ColumnType.typeToThriftType(f.getType())); - } - } - } - } catch (Exception e) { - e.printStackTrace(); - throw e; - } - LOG.info("Returning Thrift schema: " + schema); - return schema; - } - - /** - * Return the maximum number of rows returned by getResults - */ - public int getMaxRows() { - return maxRows; - } - /** * Set the maximum number of rows returned by getResults */ @@ -397,17 +344,11 @@ public void setMaxRows(int maxRows) { this.maxRows = maxRows; } + @VisibleForTesting public Driver(HiveConf conf) { this(new QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build(), null); } - // Pass lineageState when a driver instantiates another Driver to run - // or compile another query - // NOTE: only used from index related classes - public Driver(HiveConf conf, LineageState lineageState) { - this(getNewQueryState(conf, lineageState), null); - } - // Pass lineageState when a driver instantiates another Driver to run // or compile another query public Driver(HiveConf conf, Context ctx, LineageState lineageState) { @@ -452,18 +393,6 @@ private static QueryState getNewQueryState(HiveConf conf, LineageState lineageSt .build(); } - /** - * Compile a new query. Any currently-planned query associated with this Driver is discarded. - * Do not reset id for inner queries(index, etc). Task ids are used for task identity check. - * - * @param command - * The SQL query to compile. - */ - @Override - public int compile(String command) { - return compile(command, true); - } - /** * Compile a new query, but potentially reset taskID counter. Not resetting task counter * is useful for generating re-entrant QL queries. @@ -2585,11 +2514,6 @@ public void destroy() { ShutdownHookManager.removeShutdownHook(shutdownRunner); } - - public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan() throws IOException { - return plan.getQueryPlan(); - } - public String getErrorMsg() { return errorMessage; } @@ -2642,7 +2566,7 @@ public boolean hasResultSet() { } } - public void setCompactionWriteIds(ValidWriteIdList val, long compactorTxnId) { + void setCompactionWriteIds(ValidWriteIdList val, long compactorTxnId) { this.compactionWriteIds = val; this.compactorTxnId = compactorTxnId; } diff --git ql/src/java/org/apache/hadoop/hive/ql/IDriver.java ql/src/java/org/apache/hadoop/hive/ql/IDriver.java index e44e6a3913..b61822417d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/IDriver.java +++ ql/src/java/org/apache/hadoop/hive/ql/IDriver.java @@ -36,8 +36,6 @@ @InterfaceStability.Unstable public interface IDriver extends CommandProcessor { - int compile(String string); - CommandProcessorResponse compileAndRespond(String statement); QueryPlan getPlan(); diff --git ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java index ab5c66b151..e8bf9dcd15 100644 --- ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java +++ ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.reexec; import java.io.IOException; -import java.io.Serializable; import java.util.ArrayList; import java.util.List; @@ -87,7 +86,7 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, List> r private static final Logger LOG = LoggerFactory.getLogger(ReExecDriver.class); private boolean explainReOptimization; - protected Driver coreDriver; + private Driver coreDriver; private QueryState queryState; private String currentQuery; private int executionIndex; @@ -99,7 +98,7 @@ public HiveConf getConf() { return queryState.getConf(); } - public boolean firstExecution() { + private boolean firstExecution() { return executionIndex == 0; } @@ -115,9 +114,8 @@ public ReExecDriver(QueryState queryState, String userName, QueryInfo queryInfo, } } - @Override - public int compile(String string) { - return coreDriver.compile(string); + public int compile(String command, boolean resetTaskIds) { + return coreDriver.compile(command, resetTaskIds); } @Override @@ -223,7 +221,7 @@ public CommandProcessorResponse run(String command) { return run(); } - protected void prepareToReExecute() { + private void prepareToReExecute() { for (IReExecutionPlugin p : plugins) { p.prepareToReExecute(); } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java index f42cffd8b2..0b61a1a155 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hive.ql.DriverFactory; import org.apache.hadoop.hive.ql.IDriver; import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +import org.apache.hadoop.hive.ql.reexec.ReExecDriver; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.MRJobConfig; @@ -71,7 +71,7 @@ public void testSetJobGroupAndDescription() throws Exception { Assert.assertEquals(0, driver.run("create table test (col int)").getResponseCode()); String query = "select * from test order by col"; - driver.compile(query); + ((ReExecDriver)driver).compile(query, true); List sparkTasks = Utilities.getSparkTasks(driver.getPlan().getRootTasks()); Assert.assertEquals(1, sparkTasks.size()); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java index ef02a292b6..5badabfd5e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.IDriver; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveKey; +import org.apache.hadoop.hive.ql.reexec.ReExecDriver; import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; import org.apache.hadoop.hive.ql.session.SessionState; @@ -77,7 +78,7 @@ public void testSetRDDCallSite() throws Exception { driver = DriverFactory.newDriver(conf); Assert.assertEquals(0, driver.run("create table test (col int)").getResponseCode()); - driver.compile("select * from test order by col"); + ((ReExecDriver)driver).compile("select * from test order by col", true); List sparkTasks = Utilities.getSparkTasks(driver.getPlan().getRootTasks()); Assert.assertEquals(1, sparkTasks.size()); diff --git ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java index c1081f2514..b38b128061 100644 --- ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java @@ -77,7 +77,7 @@ public void testQueryRedactor() throws Exception { .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); Driver driver = createDriver(conf); - int ret = driver.compile("select 'XXX' from t1"); + int ret = driver.compile("select 'XXX' from t1", true); assertEquals("Checking command success", 0, ret); assertEquals("select 'AAA' from t1", conf.getQueryString()); } diff --git ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java index 005d420e5a..1a1a7b14a6 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java @@ -59,7 +59,6 @@ /** * Unittest for SymlinkTextInputFormat. */ -@SuppressWarnings("deprecation") public class TestSymlinkTextInputFormat { private static final Logger log = LoggerFactory.getLogger(TestSymlinkTextInputFormat.class); @@ -113,22 +112,17 @@ public void tearDown() throws IOException { public void testCombine() throws Exception { JobConf newJob = new JobConf(job); FileSystem fs = dataDir1.getFileSystem(newJob); - int symbolLinkedFileSize = 0; Path dir1_file1 = new Path(dataDir1, "combinefile1_1"); writeTextFile(dir1_file1, "dir1_file1_line1\n" + "dir1_file1_line2\n"); - symbolLinkedFileSize += fs.getFileStatus(dir1_file1).getLen(); - Path dir2_file1 = new Path(dataDir2, "combinefile2_1"); writeTextFile(dir2_file1, "dir2_file1_line1\n" + "dir2_file1_line2\n"); - symbolLinkedFileSize += fs.getFileStatus(dir2_file1).getLen(); - // A symlink file, contains first file from first dir and second file from // second dir. writeSymlinkFile( @@ -172,7 +166,7 @@ public void testCombine() throws Exception { } String cmd = "select key*1 from " + tblName; - ecode = drv.compile(cmd); + ecode = drv.compile(cmd, true); if (ecode != 0) { throw new Exception("Select compile: " + cmd + " failed with exit code= " + ecode); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java index d2b93278c6..594688bad3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java @@ -59,7 +59,7 @@ public static void Teardown() throws Exception { public void testQueryTable1() throws ParseException { String query = "select * from t1"; Driver driver = createDriver(); - int rc = driver.compile(query); + int rc = driver.compile(query, true); Assert.assertEquals("Checking command success", 0, rc); QueryPlan plan = driver.getPlan(); // check access columns from ColumnAccessInfo @@ -83,7 +83,7 @@ public void testQueryTable1() throws ParseException { public void testJoinTable1AndTable2() throws ParseException { String query = "select * from t1 join t2 on (t1.id1 = t2.id1)"; Driver driver = createDriver(); - int rc = driver.compile(query); + int rc = driver.compile(query, true); Assert.assertEquals("Checking command success", 0, rc); QueryPlan plan = driver.getPlan(); // check access columns from ColumnAccessInfo @@ -120,7 +120,7 @@ public void testJoinTable1AndTable2() throws ParseException { public void testJoinView1AndTable2() throws ParseException { String query = "select * from v1 join t2 on (v1.id1 = t2.id1)"; Driver driver = createDriver(); - int rc = driver.compile(query); + int rc = driver.compile(query, true); Assert.assertEquals("Checking command success", 0, rc); QueryPlan plan = driver.getPlan(); // check access columns from ColumnAccessInfo diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java index 0d87662568..50967637e7 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java @@ -57,7 +57,7 @@ public void testDecimalType3() throws ParseException { String query = "create table `dec` (d decimal(66,7))"; Driver driver = createDriver(); - int rc = driver.compile(query); + int rc = driver.compile(query, true); Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); Assert.assertTrue(driver.getErrorMsg(), driver.getErrorMsg().contains("Decimal precision out of allowed range [1,38]")); @@ -68,7 +68,7 @@ public void testDecimalType4() throws ParseException { String query = "create table `dec` (d decimal(0,7))"; Driver driver = createDriver(); - int rc = driver.compile(query); + int rc = driver.compile(query, true); Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); Assert.assertTrue(driver.getErrorMsg(), driver.getErrorMsg().contains("Decimal precision out of allowed range [1,38]")); @@ -79,7 +79,7 @@ public void testDecimalType5() throws ParseException { String query = "create table `dec` (d decimal(7,33))"; Driver driver = createDriver(); - int rc = driver.compile(query); + int rc = driver.compile(query, true); Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); Assert.assertTrue(driver.getErrorMsg(), driver.getErrorMsg().contains("Decimal scale must be less than or equal to precision")); @@ -90,7 +90,7 @@ public void testDecimalType6() throws ParseException { String query = "create table `dec` (d decimal(7,-1))"; Driver driver = createDriver(); - int rc = driver.compile(query); + int rc = driver.compile(query, true); Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); Assert.assertTrue(driver.getErrorMsg(), driver.getErrorMsg().contains("extraneous input '-' expecting Number")); @@ -101,7 +101,7 @@ public void testDecimalType7() throws ParseException { String query = "create table `dec` (d decimal(7,33,4))"; Driver driver = createDriver(); - int rc = driver.compile(query); + int rc = driver.compile(query, true); Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); Assert.assertTrue(driver.getErrorMsg(), driver.getErrorMsg().contains("missing ) at ',' near ',' in column name or constraint")); @@ -112,7 +112,7 @@ public void testDecimalType8() throws ParseException { String query = "create table `dec` (d decimal(7a))"; Driver driver = createDriver(); - int rc = driver.compile(query); + int rc = driver.compile(query, true); Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); Assert.assertTrue(driver.getErrorMsg(), driver.getErrorMsg().contains("mismatched input '7a' expecting Number near '('")); @@ -123,7 +123,7 @@ public void testDecimalType9() throws ParseException { String query = "create table `dec` (d decimal(20,23))"; Driver driver = createDriver(); - int rc = driver.compile(query); + int rc = driver.compile(query, true); Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); Assert.assertTrue(driver.getErrorMsg(), driver.getErrorMsg().contains("Decimal scale must be less than or equal to precision")); @@ -142,7 +142,7 @@ private Driver createDriver() { private String getColumnType(String query) { Driver driver = createDriver(); - int rc = driver.compile(query); + int rc = driver.compile(query, true); if (rc != 0) { return null; diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java index 88edc12385..51f610d9eb 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java @@ -76,7 +76,7 @@ public void setup() { @Test public void testSelectEntityDirect() throws ParseException { Driver driver = createDriver(); - int ret = driver.compile("select * from t1"); + int ret = driver.compile("select * from t1", true); assertEquals("Checking command success", 0, ret); assertEquals(1, CheckInputReadEntityDirect.readEntities.size()); assertTrue("isDirect", CheckInputReadEntityDirect.readEntities.iterator().next().isDirect()); @@ -90,7 +90,7 @@ public void testSelectEntityDirect() throws ParseException { @Test public void testSelectEntityInDirect() throws ParseException { Driver driver = createDriver(); - int ret = driver.compile("select * from v1"); + int ret = driver.compile("select * from v1", true); assertEquals("Checking command success", 0, ret); assertEquals(2, CheckInputReadEntityDirect.readEntities.size()); for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) { @@ -113,7 +113,7 @@ public void testSelectEntityInDirect() throws ParseException { @Test public void testSelectEntityViewDirectJoin() throws ParseException { Driver driver = createDriver(); - int ret = driver.compile("select * from v1 join t1 on (v1.i = t1.i)"); + int ret = driver.compile("select * from v1 join t1 on (v1.i = t1.i)", true); assertEquals("Checking command success", 0, ret); assertEquals(2, CheckInputReadEntityDirect.readEntities.size()); for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) { @@ -136,7 +136,7 @@ public void testSelectEntityViewDirectJoin() throws ParseException { @Test public void testSelectEntityViewDirectUnion() throws ParseException { Driver driver = createDriver(); - int ret = driver.compile("select * from ( select * from v1 union all select * from t1) uv1t1"); + int ret = driver.compile("select * from ( select * from v1 union all select * from t1) uv1t1", true); assertEquals("Checking command success", 0, ret); assertEquals(2, CheckInputReadEntityDirect.readEntities.size()); for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) { @@ -158,7 +158,7 @@ public void testSelectEntityViewDirectUnion() throws ParseException { @Test public void testSelectEntityInDirectJoinAlias() throws ParseException { Driver driver = createDriver(); - int ret = driver.compile("select * from v1 as a join v1 as b on (a.i = b.i)"); + int ret = driver.compile("select * from v1 as a join v1 as b on (a.i = b.i)", true); assertEquals("Checking command success", 0, ret); assertEquals(2, CheckInputReadEntityDirect.readEntities.size()); for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) { diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java index 6d86ca200d..97ef3c4de3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java @@ -93,7 +93,7 @@ public void testUnionView() throws Exception { .getResponseCode(); assertEquals("Checking command success", 0, ret); - driver.compile("select * from " + view1 ); + driver.compile("select * from " + view1, true); // view entity assertEquals("default@" + view1, CheckInputReadEntity.readEntities[0].getName()); @@ -128,7 +128,7 @@ public void testViewInSubQuery() throws Exception { ret = driver.run("create view " + view1 + " as select * from " + tab1).getResponseCode(); assertEquals("Checking command success", 0, ret); - driver.compile("select * from " + view1 ); + driver.compile("select * from " + view1, true); // view entity assertEquals("default@" + view1, CheckInputReadEntity.readEntities[0].getName()); @@ -204,7 +204,7 @@ private void testViewInSubQueryWithWhereClause() { assertEquals("Checking command success", 0, ret); //select from view2 - driver.compile("select * from " + view2); + driver.compile("select * from " + view2, true); //verify that only view2 is direct input in above query ReadEntity[] readEntities = CheckInputReadEntity.readEntities; @@ -243,7 +243,7 @@ public void testSubQueryInSubView() throws Exception { ret = driver.run("create view " + view2 + " as select * from (select * from " + view1 + ") x").getResponseCode(); assertEquals("Checking command success", 0, ret); - driver.compile("select * from " + view2); + driver.compile("select * from " + view2, true); // view entity assertEquals("default@" + view2, CheckInputReadEntity.readEntities[0].getName()); @@ -277,7 +277,7 @@ public void testUnionAllInSubView() throws Exception { ret = driver.run("create view " + view2 + " as select * from (select * from " + view1 + " union all select * from " + view1 + ") x").getResponseCode(); assertEquals("Checking command success", 0, ret); - driver.compile("select * from " + view2); + driver.compile("select * from " + view2, true); // view entity assertEquals("default@" + view2, CheckInputReadEntity.readEntities[0].getName()); diff --git ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java index 9b50a0d92a..0f45c81497 100644 --- ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java +++ ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java @@ -99,7 +99,7 @@ public static void afterTests() throws Exception { @Test public void testOwnerNames() throws Exception { reset(mockedAuthorizer); - driver.compile("create table default.t1 (name string)"); + driver.compile("create table default.t1 (name string)", true); Pair, List> io = getHivePrivilegeObjectInputs(); boolean containsDBOwnerName = false; @@ -129,7 +129,7 @@ public void testOwnerNames() throws Exception { @Test public void testOwnerType() throws Exception { reset(mockedAuthorizer); - driver.compile("create table default.t1 (name string)"); + driver.compile("create table default.t1 (name string)", true); Pair, List> io = getHivePrivilegeObjectInputs(); boolean containsOwnerType = false; @@ -164,7 +164,8 @@ public void testOwnerType() throws Exception { .checkPrivileges(any(HiveOperationType.class), inputsCapturer.capture(), outputsCapturer.capture(), any(HiveAuthzContext.class)); - return new ImmutablePair(inputsCapturer.getValue(), outputsCapturer.getValue()); + return new ImmutablePair, List>( + inputsCapturer.getValue(), outputsCapturer.getValue()); } }