diff --git a/itests/util/pom.xml b/itests/util/pom.xml
index 00af301..e8d7845 100644
--- a/itests/util/pom.xml
+++ b/itests/util/pom.xml
@@ -170,6 +170,18 @@
org.apache.hbase
hbase-server
${hbase.hadoop2.version}
+ test-jar
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.hadoop2.version}
+ test-jar
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.hadoop2.version}
org.apache.hbase
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index f52350d..a53f8cc 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -64,6 +64,11 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
import org.apache.hadoop.hive.cli.CliDriver;
import org.apache.hadoop.hive.cli.CliSessionState;
@@ -75,6 +80,8 @@
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.hbase.HBaseReadWrite;
+import org.apache.hadoop.hive.metastore.hbase.TephraHBaseConnection;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -94,6 +101,8 @@
import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.processors.HiveCommand;
+import org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.ShimLoader;
@@ -104,6 +113,8 @@
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooKeeper;
+import co.cask.tephra.hbase98.coprocessor.TransactionProcessor;
+
import com.google.common.collect.ImmutableList;
/**
@@ -158,6 +169,8 @@
private final String initScript;
private final String cleanupScript;
+ private HBaseTestingUtility utility;
+
static {
for (String srcTable : System.getProperty("test.src.tables", "").trim().split(",")) {
srcTable = srcTable.trim();
@@ -276,6 +289,12 @@ public void initConf() throws Exception {
conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
"org.apache.hadoop.hive.metastore.VerifyingObjectStore");
+ conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
+ conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
+ "org.apache.hadoop.hive.metastore.hbase.HBaseStore");
+ conf.setBoolVar(HiveConf.ConfVars.METASTORE_FASTPATH, true);
+ conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+
if (mr != null) {
assert dfs != null;
@@ -337,6 +356,27 @@ private String getKeyProviderURI() {
return "jceks://file" + new Path(keyDir, "test.jks").toUri();
}
+ private void startMiniHBaseCluster() throws Exception {
+ boolean testingTephra = false;
+ utility = new HBaseTestingUtility();
+ utility.startMiniCluster();
+ conf = new HiveConf(utility.getConfiguration(), Driver.class);
+ HBaseAdmin admin = utility.getHBaseAdmin();
+ for (String tableName : HBaseReadWrite.tableNames) {
+ List families = HBaseReadWrite.columnFamilies.get(tableName);
+ HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
+ for (byte[] family : families) {
+ HColumnDescriptor columnDesc = new HColumnDescriptor(family);
+ if (testingTephra) columnDesc.setMaxVersions(Integer.MAX_VALUE);
+ desc.addFamily(columnDesc);
+ }
+ if (testingTephra) desc.addCoprocessor(TransactionProcessor.class.getName());
+ admin.createTable(desc);
+ }
+ admin.close();
+ HBaseReadWrite.getInstance(conf);
+ }
+
public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
String confDir, String hadoopVer, String initScript, String cleanupScript)
throws Exception {
@@ -346,6 +386,7 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml"));
System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation());
}
+ startMiniHBaseCluster();
conf = new HiveConf(Driver.class);
this.hadoopVer = getHadoopMainVersion(hadoopVer);
qMap = new TreeMap();
@@ -435,6 +476,7 @@ public void shutdown() throws Exception {
sparkSession = null;
}
}
+ utility.shutdownMiniCluster();
if (mr != null) {
mr.shutdown();
mr = null;
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
index b0dc707..6d773e4 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
@@ -74,7 +74,7 @@
/**
* Class to manage storing object in and reading them from HBase.
*/
-class HBaseReadWrite {
+public class HBaseReadWrite {
@VisibleForTesting final static String DB_TABLE = "HBMS_DBS";
@VisibleForTesting final static String FUNC_TABLE = "HBMS_FUNCS";
@@ -90,9 +90,9 @@
/**
* List of tables in HBase
*/
- final static String[] tableNames = { DB_TABLE, FUNC_TABLE, GLOBAL_PRIVS_TABLE, PART_TABLE,
+ final static public String[] tableNames = { DB_TABLE, FUNC_TABLE, GLOBAL_PRIVS_TABLE, PART_TABLE,
USER_TO_ROLE_TABLE, ROLE_TABLE, SD_TABLE, TABLE_TABLE };
- final static Map> columnFamilies =
+ final static public Map> columnFamilies =
new HashMap> (tableNames.length);
static {
@@ -160,7 +160,7 @@ protected HBaseReadWrite initialValue() {
* @param configuration Configuration object
* @return thread's instance of HBaseReadWrite
*/
- static HBaseReadWrite getInstance(Configuration configuration) {
+ public static HBaseReadWrite getInstance(Configuration configuration) {
staticConf = configuration;
return self.get();
}