Index: data/conf/hive-site.xml
===================================================================
--- data/conf/hive-site.xml (revision 965383)
+++ data/conf/hive-site.xml (working copy)
@@ -129,7 +129,7 @@
hive.exec.pre.hooks
- org.apache.hadoop.hive.ql.hooks.PreExecutePrinter
+ org.apache.hadoop.hive.ql.hooks.PreExecutePrinter, org.apache.hadoop.hive.ql.hooks.EnforceReadOnlyTables
Pre Execute Hook for Tests
Index: ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out
===================================================================
--- ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out (revision 965383)
+++ ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out (working copy)
@@ -8,7 +8,6 @@
DROP TABLE T1
POSTHOOK: type: DROPTABLE
-POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T1(name STRING) STORED AS TEXTFILE
PREHOOK: type: CREATETABLE
POSTHOOK: query: CREATE TABLE T1(name STRING) STORED AS TEXTFILE
Index: ql/src/test/results/clientnegative/invalid_t_alter2.q.out
===================================================================
--- ql/src/test/results/clientnegative/invalid_t_alter2.q.out (revision 965383)
+++ ql/src/test/results/clientnegative/invalid_t_alter2.q.out (working copy)
@@ -1,4 +1,6 @@
PREHOOK: query: CREATE TABLE alter_test (d STRING)
PREHOOK: type: CREATETABLE
-FAILED: Error in metadata: AlreadyExistsException(message:Table alter_test already exists)
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
+POSTHOOK: query: CREATE TABLE alter_test (d STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@alter_test
+FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead.
Index: ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java (revision 0)
+++ ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java (revision 0)
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import java.util.Set;
+
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.hive.ql.QTestUtil;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+/**
+ * Implementation of a pre execute hook that prevents modifications
+ * of read-only tables used by the test framework
+ */
+public class EnforceReadOnlyTables implements PreExecute {
+
+ @Override
+ public void run(SessionState sess, Set inputs,
+ Set outputs, UserGroupInformation ugi)
+ throws Exception {
+
+ for (WriteEntity w: outputs) {
+ if ((w.getTyp() == WriteEntity.Type.TABLE) ||
+ (w.getTyp() == WriteEntity.Type.PARTITION)) {
+ Table t = w.getTable();
+ if (QTestUtil.srcTables.contains(t.getTableName()))
+ throw new RuntimeException ("Cannot overwrite read-only table: " + t.getTableName());
+ }
+ }
+ }
+}
Index: ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (revision 965383)
+++ ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (working copy)
@@ -28,6 +28,7 @@
import java.io.Serializable;
import java.net.URI;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
@@ -84,11 +85,15 @@
private final String logDir;
private final TreeMap qMap;
private final Set qSkipSet;
- private final LinkedList srcTables;
+ public static final HashSet srcTables = new HashSet
+ (Arrays.asList(new String [] {
+ "src", "src1", "srcbucket", "srcbucket2", "src_json", "src_thrift",
+ "src_sequencefile", "srcpart"
+ }));
private ParseDriver pd;
private Hive db;
- protected final HiveConf conf;
+ protected HiveConf conf;
private Driver drv;
private SemanticAnalyzer sem;
private FileSystem fs;
@@ -184,6 +189,18 @@
return null;
}
+ public void initConf() {
+ if (miniMr) {
+ String fsName = conf.get("fs.default.name");
+ assert fsName != null;
+ // hive.metastore.warehouse.dir needs to be set relative to the jobtracker
+ conf.set("hive.metastore.warehouse.dir", fsName
+ .concat("/build/ql/test/data/warehouse/"));
+ conf.set("mapred.job.tracker", "localhost:" + mr.getJobTrackerPort());
+ }
+
+ }
+
public QTestUtil(String outDir, String logDir, boolean miniMr, String hadoopVer) throws Exception {
this.outDir = outDir;
this.logDir = logDir;
@@ -197,17 +214,10 @@
dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);
FileSystem fs = dfs.getFileSystem();
mr = new MiniMRCluster(4, fs.getUri().toString(), 1);
+ }
- // hive.metastore.warehouse.dir needs to be set relative to the jobtracker
- String fsName = conf.get("fs.default.name");
- assert fsName != null;
- conf.set("hive.metastore.warehouse.dir", fsName
- .concat("/build/ql/test/data/warehouse/"));
+ initConf();
- conf.set("mapred.job.tracker", "localhost:" + mr.getJobTrackerPort());
- }
-
- // System.out.println(conf.toString());
testFiles = conf.get("test.data.files").replace('\\', '/')
.replace("c:", "");
@@ -217,8 +227,6 @@
overWrite = true;
}
- srcTables = new LinkedList();
-
init();
}
@@ -290,6 +298,22 @@
dis.close();
}
+ /**
+ * Clear out any side effects of running tests
+ */
+ public void clearTestSideEffects () throws Exception {
+ // delete any tables other than the source tables
+ for (String s: db.getAllTables()) {
+ if (!srcTables.contains(s))
+ db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, s);
+ }
+ // allocate and initialize a new conf since a test can
+ // modify conf by using 'set' commands
+ conf = new HiveConf (Driver.class);
+ initConf();
+ }
+
+
public void cleanUp() throws Exception {
String warehousePath = ((new URI(testWarehouse)).getPath());
// Drop any tables that remain due to unsuccessful runs
@@ -329,8 +353,7 @@
}
public void createSources() throws Exception {
- // Next create the three tables src, dest1 and dest2 each with two columns
- // key and value
+ // Create a bunch of tables with columns key and value
LinkedList cols = new LinkedList();
cols.add("key");
cols.add("value");
@@ -340,7 +363,6 @@
part_cols.add("hr");
db.createTable("srcpart", cols, part_cols, TextInputFormat.class,
IgnoreKeyTextOutputFormat.class);
- srcTables.add("srcpart");
Path fpath;
Path newfpath;
@@ -367,7 +389,6 @@
runCreateTableCmd("CREATE TABLE srcbucket(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE");
// db.createTable("srcbucket", cols, null, TextInputFormat.class,
// IgnoreKeyTextOutputFormat.class, 2, bucketCols);
- srcTables.add("srcbucket");
for (String fname : new String[] {"srcbucket0.txt", "srcbucket1.txt"}) {
fpath = new Path(testFiles, fname);
newfpath = new Path(tmppath, fname);
@@ -380,7 +401,6 @@
+ "CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE");
// db.createTable("srcbucket", cols, null, TextInputFormat.class,
// IgnoreKeyTextOutputFormat.class, 2, bucketCols);
- srcTables.add("srcbucket2");
for (String fname : new String[] {"srcbucket20.txt", "srcbucket21.txt",
"srcbucket22.txt", "srcbucket23.txt"}) {
fpath = new Path(testFiles, fname);
@@ -393,11 +413,9 @@
for (String tname : new String[] {"src", "src1"}) {
db.createTable(tname, cols, null, TextInputFormat.class,
IgnoreKeyTextOutputFormat.class);
- srcTables.add(tname);
}
db.createTable("src_sequencefile", cols, null,
SequenceFileInputFormat.class, SequenceFileOutputFormat.class);
- srcTables.add("src_sequencefile");
Table srcThrift = new Table("src_thrift");
srcThrift.setInputFormatClass(SequenceFileInputFormat.class.getName());
@@ -408,13 +426,11 @@
srcThrift.setSerdeParam(Constants.SERIALIZATION_FORMAT,
TBinaryProtocol.class.getName());
db.createTable(srcThrift);
- srcTables.add("src_thrift");
LinkedList json_cols = new LinkedList();
json_cols.add("json");
db.createTable("src_json", json_cols, null, TextInputFormat.class,
IgnoreKeyTextOutputFormat.class);
- srcTables.add("src_json");
// load the input data into the src table
fpath = new Path(testFiles, "kv1.txt");
@@ -512,7 +528,7 @@
createSources();
}
- CliSessionState ss = new CliSessionState(conf);
+ CliSessionState ss = new CliSessionState(new HiveConf(Driver.class));
assert ss != null;
ss.in = System.in;
Index: ql/src/test/templates/TestCliDriver.vm
===================================================================
--- ql/src/test/templates/TestCliDriver.vm (revision 965383)
+++ ql/src/test/templates/TestCliDriver.vm (working copy)
@@ -22,27 +22,38 @@
public class $className extends TestCase {
- private QTestUtil qt;
+ private static QTestUtil qt;
- public $className(String name) {
- super(name);
- qt = null;
- }
-
- @Override
- protected void setUp() {
+ static {
try {
boolean miniMR = false;
String hadoopVer;
-
if ("$clusterMode".equals("miniMR"))
miniMR = true;
hadoopVer = "$hadoopVersion";
-
qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()", miniMR, hadoopVer);
+ // do a one time initialization
+ qt.cleanUp();
+ qt.createSources();
+
+ } catch (Exception e) {
+ System.out.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.out.flush();
+ fail("Unexpected exception in static initialization");
}
- catch (Exception e) {
+ }
+
+ public $className(String name) {
+ super(name);
+ }
+
+ @Override
+ protected void setUp() {
+ try {
+ qt.clearTestSideEffects();
+ } catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
e.printStackTrace();
System.out.flush();
@@ -50,10 +61,18 @@
}
}
+ /**
+ * Dummy last test. This is only meant to shutdown qt
+ */
+ public void testCliDriver_shutdown() {
+ System.out.println ("Cleaning up " + "$className");
+ }
+
@Override
protected void tearDown() {
try {
- qt.shutdown();
+ if (getName().equals("testCliDriver_shutdown"))
+ qt.shutdown();
}
catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
@@ -71,6 +90,7 @@
#set ($tname = $fname.substring(0, $eidx))
suite.addTest(new $className("testCliDriver_$tname"));
#end
+ suite.addTest(new $className("testCliDriver_shutdown"));
return suite;
}
@@ -91,7 +111,7 @@
return;
}
- qt.cliInit("$fname");
+ qt.cliInit("$fname", false);
int ecode = qt.executeClient("$fname");
if (ecode != 0) {
fail("Client Execution failed with error code = " + ecode + debugHint);
Index: ql/src/test/templates/TestNegativeCliDriver.vm
===================================================================
--- ql/src/test/templates/TestNegativeCliDriver.vm (revision 965383)
+++ ql/src/test/templates/TestNegativeCliDriver.vm (working copy)
@@ -15,18 +15,30 @@
public class $className extends TestCase {
- private QTestUtil qt;
+ private static QTestUtil qt;
+ static {
+ try {
+ qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()");
+ // do a one time initialization
+ qt.cleanUp();
+ qt.createSources();
+ } catch (Exception e) {
+ System.out.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.out.flush();
+ fail("Unexpected exception in static initialization");
+ }
+ }
+
public $className(String name) {
super(name);
- qt = null;
}
@Override
protected void setUp() {
try {
- qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()");
-
+ qt.clearTestSideEffects();
}
catch (Throwable e) {
e.printStackTrace();
@@ -64,7 +76,7 @@
return;
}
- qt.cliInit("$fname");
+ qt.cliInit("$fname", false);
int ecode = qt.executeClient("$fname");
if (ecode == 0) {
fail("Client Execution failed with error code = " + ecode