diff --git a/itests/util/pom.xml b/itests/util/pom.xml
index 2cddef0..bf373f8 100644
--- a/itests/util/pom.xml
+++ b/itests/util/pom.xml
@@ -170,6 +170,18 @@
org.apache.hbase
hbase-server
${hbase.hadoop2.version}
+ test-jar
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.hadoop2.version}
+ test-jar
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.hadoop2.version}
org.apache.hbase
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 39d5d9e..2727cdc 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql;
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_COMMENT;
import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
import java.io.BufferedInputStream;
@@ -65,6 +66,11 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
import org.apache.hadoop.hive.cli.CliDriver;
import org.apache.hadoop.hive.cli.CliSessionState;
@@ -75,7 +81,10 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.hbase.HBaseReadWrite;
+import org.apache.hadoop.hive.metastore.hbase.TephraHBaseConnection;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -162,6 +171,8 @@
public interface SuiteAddTestFunctor {
public void addTestToSuite(TestSuite suite, Object setup, String tName);
}
+ private HBaseTestingUtility utility;
+ private boolean snapshotTaken = false;
static {
for (String srcTable : System.getProperty("test.src.tables", "").trim().split(",")) {
@@ -281,6 +292,12 @@ public void initConf() throws Exception {
conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
"org.apache.hadoop.hive.metastore.VerifyingObjectStore");
+ conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
+ conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
+ "org.apache.hadoop.hive.metastore.hbase.HBaseStore");
+ conf.setBoolVar(HiveConf.ConfVars.METASTORE_FASTPATH, true);
+ conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+
if (mr != null) {
assert dfs != null;
@@ -342,6 +359,51 @@ private String getKeyProviderURI() {
return "jceks://file" + new Path(keyDir, "test.jks").toUri();
}
+ private void rebuildHBase() throws Exception {
+ HBaseAdmin admin = utility.getHBaseAdmin();
+ if (!snapshotTaken) {
+ for (String tableName : HBaseReadWrite.tableNames) {
+ List families = HBaseReadWrite.columnFamilies.get(tableName);
+ HTableDescriptor desc = new HTableDescriptor(
+ TableName.valueOf(tableName));
+ for (byte[] family : families) {
+ HColumnDescriptor columnDesc = new HColumnDescriptor(family);
+ desc.addFamily(columnDesc);
+ }
+ try {
+ admin.disableTable(tableName);
+ admin.deleteTable(tableName);
+ } catch (IOException e) {
+ System.out.println(e.getMessage());
+ }
+ admin.createTable(desc);
+ }
+ } else {
+ for (String tableName : HBaseReadWrite.tableNames) {
+ admin.disableTable(tableName);
+ admin.restoreSnapshot("snapshot_" + tableName);
+ admin.enableTable(tableName);
+ }
+ try {
+ db.createDatabase(new org.apache.hadoop.hive.metastore.api.Database(
+ DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT, new Warehouse(conf)
+ .getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null));
+ } catch (Exception e) {
+ // Ignore if default database already exist
+ }
+ SessionState.get().setCurrentDatabase(DEFAULT_DATABASE_NAME);
+ }
+ admin.close();
+ }
+
+ private void startMiniHBaseCluster() throws Exception {
+ utility = new HBaseTestingUtility();
+ utility.startMiniCluster();
+ conf = new HiveConf(utility.getConfiguration(), Driver.class);
+ rebuildHBase();
+ HBaseReadWrite.getInstance(conf);
+ }
+
public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
String confDir, String hadoopVer, String initScript, String cleanupScript)
throws Exception {
@@ -351,6 +413,7 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml"));
System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation());
}
+ startMiniHBaseCluster();
conf = new HiveConf(Driver.class);
this.hadoopVer = getHadoopMainVersion(hadoopVer);
qMap = new TreeMap();
@@ -444,6 +507,7 @@ public void shutdown() throws Exception {
sparkSession = null;
}
}
+ utility.shutdownMiniCluster();
if (mr != null) {
mr.shutdown();
mr = null;
@@ -731,6 +795,8 @@ public void clearTestSideEffects() throws Exception {
return;
}
+ rebuildHBase();
+
clearTablesCreatedDuringTests();
clearKeysCreatedInTests();
@@ -820,6 +886,12 @@ public void createSources() throws Exception {
cliDriver.processLine(initCommands);
conf.setBoolean("hive.test.init.phase", false);
+
+ HBaseAdmin admin = utility.getHBaseAdmin();
+ for (String tableName : HBaseReadWrite.tableNames) {
+ admin.snapshot("snapshot_" + tableName, tableName);
+ }
+ snapshotTaken = true;
}
public void init() throws Exception {
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
index b54afb9..9353a63 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
@@ -73,7 +73,7 @@
/**
* Class to manage storing object in and reading them from HBase.
*/
-class HBaseReadWrite {
+public class HBaseReadWrite {
@VisibleForTesting final static String DB_TABLE = "HBMS_DBS";
@VisibleForTesting final static String FUNC_TABLE = "HBMS_FUNCS";
@@ -89,9 +89,9 @@
/**
* List of tables in HBase
*/
- final static String[] tableNames = { DB_TABLE, FUNC_TABLE, GLOBAL_PRIVS_TABLE, PART_TABLE,
+ final static public String[] tableNames = { DB_TABLE, FUNC_TABLE, GLOBAL_PRIVS_TABLE, PART_TABLE,
USER_TO_ROLE_TABLE, ROLE_TABLE, SD_TABLE, TABLE_TABLE };
- final static Map> columnFamilies =
+ final static public Map> columnFamilies =
new HashMap> (tableNames.length);
static {
@@ -159,7 +159,7 @@ protected HBaseReadWrite initialValue() {
* @param configuration Configuration object
* @return thread's instance of HBaseReadWrite
*/
- static HBaseReadWrite getInstance(Configuration configuration) {
+ public static HBaseReadWrite getInstance(Configuration configuration) {
staticConf = configuration;
return self.get();
}