Index: storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java =================================================================== --- storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java (revision 1304584) +++ storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java (working copy) @@ -121,6 +121,14 @@ } @Override + public void createTable(String table, List columnFamilies) throws IOException { + } + + @Override + public void dropTable(String table) throws IOException { + } + + @Override public Transaction beginWriteTransaction(String table, List families) throws IOException { return recordCall(null, table, families); Index: storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java =================================================================== --- storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java (revision 1304584) +++ storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java (working copy) @@ -36,6 +36,8 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hcatalog.cli.HCatDriver; import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer; +import org.apache.hcatalog.hbase.snapshot.RevisionManager; +import org.apache.zookeeper.KeeperException.NoNodeException; import org.junit.Test; public class TestHBaseHCatStorageHandler extends SkeletonHBaseTest { @@ -85,14 +87,97 @@ assertTrue(doesTableExist); + RevisionManager rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(getHbaseConf()); + rm.open(); + //Should be able to successfully query revision manager + rm.getAbortedWriteTransactions("test_table", "cf1"); + hcatDriver.run("drop table test_table"); doesTableExist = hAdmin.tableExists("test_table"); + assertTrue(doesTableExist == false); + try { + rm.getAbortedWriteTransactions("test_table", "cf1"); + } catch (Exception e) { + assertTrue(e.getCause() instanceof NoNodeException); + } + rm.close(); + + } + + @Test + public void testTableCreateDropDifferentCase() throws Exception { + Initialize(); + + hcatDriver.run("drop table test_Table"); + CommandProcessorResponse response = hcatDriver + .run("create table test_Table(key int, value string) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" + + "TBLPROPERTIES ('hbase.columns.mapping'=':key,cf1:val')"); + + assertEquals(0, response.getResponseCode()); + + //HBase table gets created with lower case unless specified as a table property. + HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf()); + boolean doesTableExist = hAdmin.tableExists("test_table"); + + assertTrue(doesTableExist); + + RevisionManager rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(getHbaseConf()); + rm.open(); + //Should be able to successfully query revision manager + rm.getAbortedWriteTransactions("test_table", "cf1"); + + hcatDriver.run("drop table test_table"); + doesTableExist = hAdmin.tableExists("test_table"); assertTrue(doesTableExist == false); + try { + rm.getAbortedWriteTransactions("test_table", "cf1"); + } catch (Exception e) { + assertTrue(e.getCause() instanceof NoNodeException); + } + rm.close(); + } @Test + public void testTableCreateDropCaseSensitive() throws Exception { + Initialize(); + + hcatDriver.run("drop table test_Table"); + CommandProcessorResponse response = hcatDriver + .run("create table test_Table(key int, value string) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" + + "TBLPROPERTIES ('hbase.columns.mapping'=':key,cf1:val'," + + " 'hbase.table.name'='CaseSensitiveTable')"); + + assertEquals(0, response.getResponseCode()); + + HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf()); + boolean doesTableExist = hAdmin.tableExists("CaseSensitiveTable"); + + assertTrue(doesTableExist); + + RevisionManager rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(getHbaseConf()); + rm.open(); + //Should be able to successfully query revision manager + rm.getAbortedWriteTransactions("CaseSensitiveTable", "cf1"); + + hcatDriver.run("drop table test_table"); + doesTableExist = hAdmin.tableExists("CaseSensitiveTable"); + assertTrue(doesTableExist == false); + + try { + rm.getAbortedWriteTransactions("CaseSensitiveTable", "cf1"); + } catch (Exception e) { + assertTrue(e.getCause() instanceof NoNodeException); + } + rm.close(); + + } + + @Test public void testTableDropNonExistent() throws Exception { Initialize(); Index: storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java =================================================================== --- storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java (revision 1304584) +++ storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java (working copy) @@ -187,13 +187,14 @@ String testName = "directHCatOutputFormatTest"; Path methodTestDir = new Path(getTestDir(),testName); - String databaseName = testName.toLowerCase(); + String databaseName = testName; String dbDir = new Path(methodTestDir,"DB_"+testName).toString(); - String tableName = newTableName(testName).toLowerCase(); + String tableName = newTableName(testName); String familyName = "my_family"; byte[] familyNameBytes = Bytes.toBytes(familyName); + //Table name will be lower case unless specified by hbase.table.name property + String hbaseTableName = (databaseName + "." + tableName).toLowerCase(); - //include hbase config in conf file Configuration conf = new Configuration(allConf); conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, HCatUtil.serialize(allConf.getAllProperties())); @@ -233,7 +234,7 @@ RevisionManager rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(conf); try { - TableSnapshot snapshot = rm.createSnapshot(databaseName+"."+tableName); + TableSnapshot snapshot = rm.createSnapshot(hbaseTableName); for(String el: snapshot.getColumnFamilies()) { assertEquals(1,snapshot.getRevision(el)); } @@ -242,7 +243,7 @@ } //verify - HTable table = new HTable(conf, databaseName+"."+tableName); + HTable table = new HTable(conf, hbaseTableName); Scan scan = new Scan(); scan.addFamily(familyNameBytes); ResultScanner scanner = table.getScanner(scan); @@ -264,11 +265,13 @@ public void directModeAbortTest() throws Exception { String testName = "directModeAbortTest"; Path methodTestDir = new Path(getTestDir(), testName); - String databaseName = testName.toLowerCase(); + String databaseName = testName; String dbDir = new Path(methodTestDir, "DB_" + testName).toString(); - String tableName = newTableName(testName).toLowerCase(); + String tableName = newTableName(testName); String familyName = "my_family"; byte[] familyNameBytes = Bytes.toBytes(familyName); + //Table name as specified by hbase.table.name property + String hbaseTableName = tableName; // include hbase config in conf file Configuration conf = new Configuration(allConf); @@ -281,7 +284,7 @@ "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" + "TBLPROPERTIES (" + "'hbase.columns.mapping'=':key," + familyName + ":english," + familyName + - ":spanish')"; + ":spanish','hbase.table.name'='"+ hbaseTableName +"')"; assertEquals(0, hcatDriver.run(dbquery).getResponseCode()); assertEquals(0, hcatDriver.run(tableQuery).getResponseCode()); @@ -305,17 +308,16 @@ tableName, null); Job job = configureJob(testName, conf, workingDir, MapWriteAbortTransaction.class, outputJobInfo, inputPath); - job.waitForCompletion(true); assertFalse(job.waitForCompletion(true)); // verify that revision manager has it as aborted transaction RevisionManager rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(conf); try { - TableSnapshot snapshot = rm.createSnapshot(databaseName + "." + tableName); + TableSnapshot snapshot = rm.createSnapshot(hbaseTableName); for (String family : snapshot.getColumnFamilies()) { assertEquals(1, snapshot.getRevision(family)); List abortedWriteTransactions = rm.getAbortedWriteTransactions( - databaseName + "." + tableName, family); + hbaseTableName, family); assertEquals(1, abortedWriteTransactions.size()); assertEquals(1, abortedWriteTransactions.get(0).getRevision()); } @@ -324,7 +326,7 @@ } // verify that hbase has the records of the successful maps. - HTable table = new HTable(conf, databaseName + "." + tableName); + HTable table = new HTable(conf, hbaseTableName); Scan scan = new Scan(); scan.addFamily(familyNameBytes); ResultScanner scanner = table.getScanner(scan); @@ -380,12 +382,8 @@ job.setOutputFormatClass(HCatOutputFormat.class); HCatOutputFormat.setOutput(job, outputJobInfo); String txnString = job.getConfiguration().get(HBaseConstants.PROPERTY_WRITE_TXN_KEY); - //Test passing in same jobConf or same OutputJobInfo multiple times and verify 1 transaction is created - //Same jobConf - HCatOutputFormat.setOutput(job, outputJobInfo); - assertEquals(txnString, job.getConfiguration().get(HBaseConstants.PROPERTY_WRITE_TXN_KEY)); + //Test passing in same OutputJobInfo multiple times and verify 1 transaction is created String jobString = job.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO); - //Same OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(jobString); Job job2 = new Job(conf); HCatOutputFormat.setOutput(job2, outputJobInfo); Index: storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java =================================================================== --- storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java (revision 1304584) +++ storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java (working copy) @@ -552,7 +552,6 @@ Job job = configureJob(testName, conf, workingDir, MapWriteAbortTransaction.class, outputJobInfo, inputPath); - job.waitForCompletion(true); assertFalse(job.waitForCompletion(true)); // verify that revision manager has it as aborted transaction Index: storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java =================================================================== --- storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java (revision 1304584) +++ storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java (working copy) @@ -172,8 +172,10 @@ @Test public void TestHBaseTableReadMR() throws Exception { Initialize(); - String tableName = newTableName("mytable"); - String databaseName = newTableName("mydatabase"); + String tableName = newTableName("MyTable"); + String databaseName = newTableName("MyDatabase"); + //Table name will be lower case unless specified by hbase.table.name property + String hbaseTableName = (databaseName + "." + tableName).toLowerCase(); String db_dir = getTestDir() + "/hbasedb"; String dbquery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '" @@ -189,7 +191,6 @@ assertEquals(0, responseTwo.getResponseCode()); HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf()); - String hbaseTableName = databaseName + "." + tableName; boolean doesTableExist = hAdmin.tableExists(hbaseTableName); assertTrue(doesTableExist); @@ -243,21 +244,24 @@ public void TestHBaseTableProjectionReadMR() throws Exception { Initialize(); - String tableName = newTableName("mytable"); + String tableName = newTableName("MyTable"); + //Table name as specified by hbase.table.name property + String hbaseTableName = "MyDB_" + tableName; String tableQuery = "CREATE TABLE " + tableName - + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " + - "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" - + "TBLPROPERTIES ('hbase.columns.mapping'=':key," + - "testFamily:testQualifier1,testFamily:testQualifier2')" ; + + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" + + "TBLPROPERTIES ('hbase.columns.mapping'=" + + "':key,testFamily:testQualifier1,testFamily:testQualifier2'," + + "'hbase.table.name'='" + hbaseTableName+ "')" ; CommandProcessorResponse responseTwo = hcatDriver.run(tableQuery); assertEquals(0, responseTwo.getResponseCode()); HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf()); - boolean doesTableExist = hAdmin.tableExists(tableName); + boolean doesTableExist = hAdmin.tableExists(hbaseTableName); assertTrue(doesTableExist); - populateHBaseTable(tableName, 5); + populateHBaseTable(hbaseTableName, 5); Configuration conf = new Configuration(hcatConf); conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, @@ -293,7 +297,7 @@ CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery); assertEquals(0, responseThree.getResponseCode()); - boolean isHbaseTableThere = hAdmin.tableExists(tableName); + boolean isHbaseTableThere = hAdmin.tableExists(hbaseTableName); assertFalse(isHbaseTableThere); } Index: storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java =================================================================== --- storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java (revision 1304584) +++ storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java (working copy) @@ -51,6 +51,19 @@ public void close() throws IOException; /** + * Setup revision management for a newly created hbase table. + * @param table the hbase table name + * @param columnFamilies the column families in the table + */ + public void createTable(String table, List columnFamilies) throws IOException; + + /** + * Remove table data from revision manager for a dropped table. + * @param table the hbase table name + */ + public void dropTable(String table) throws IOException; + + /** * Start the write transaction. * * @param table Index: storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpointClient.java =================================================================== --- storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpointClient.java (revision 1304584) +++ storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpointClient.java (working copy) @@ -53,6 +53,16 @@ } @Override + public void createTable(String table, List columnFamilies) throws IOException { + rmProxy.createTable(table, columnFamilies); + } + + @Override + public void dropTable(String table) throws IOException { + rmProxy.dropTable(table); + } + + @Override public Transaction beginWriteTransaction(String table, List families) throws IOException { return rmProxy.beginWriteTransaction(table, families); } Index: storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpoint.java =================================================================== --- storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpoint.java (revision 1304584) +++ storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpoint.java (working copy) @@ -83,6 +83,16 @@ } @Override + public void createTable(String table, List columnFamilies) throws IOException { + rmImpl.createTable(table, columnFamilies); + } + + @Override + public void dropTable(String table) throws IOException { + rmImpl.dropTable(table); + } + + @Override public Transaction beginWriteTransaction(String table, List families) throws IOException { return rmImpl.beginWriteTransaction(table, families); Index: storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/ZKBasedRevisionManager.java =================================================================== --- storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/ZKBasedRevisionManager.java (revision 1304584) +++ storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/ZKBasedRevisionManager.java (working copy) @@ -88,7 +88,17 @@ } } + @Override + public void createTable(String table, List columnFamilies) throws IOException { + zkUtil.createRootZNodes(); + zkUtil.setUpZnodesForTable(table, columnFamilies); + } + @Override + public void dropTable(String table) throws IOException { + zkUtil.deleteZNodes(table); + } + /* @param table /* @param families /* @param keepAlive @@ -413,29 +423,6 @@ return lockPath; } - /** - * Sets up the table, column family znodes in zookeeper. - * - * @param tableName the hbase table name - * @param columnFamilies the column families in hbase - * @throws IOException Signals that an I/O exception has occurred. - */ - public void setUpZNodes(String tableName, List columnFamilies) throws IOException{ - zkUtil.createRootZNodes(); - zkUtil.setUpZnodesForTable(tableName, columnFamilies); - } - - /** - * Delete the table znodes from zookeeper. - * - * @param tableName the table name - * @throws IOException Signals that an I/O exception has occurred. - */ - public void deleteZNodes(String tableName) throws IOException { - zkUtil.deleteZNodes(tableName); - } - - /* * This class is a listener class for the locks used in revision management. * TBD: Use the following class to signal that that the lock is actually Index: storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java =================================================================== --- storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java (revision 1304584) +++ storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java (working copy) @@ -60,7 +60,6 @@ import org.apache.hcatalog.hbase.HBaseDirectOutputFormat.HBaseDirectOutputCommitter; import org.apache.hcatalog.hbase.snapshot.RevisionManager; import org.apache.hcatalog.hbase.snapshot.Transaction; -import org.apache.hcatalog.hbase.snapshot.ZKBasedRevisionManager; import org.apache.hcatalog.mapreduce.HCatOutputFormat; import org.apache.hcatalog.mapreduce.HCatTableInfo; import org.apache.hcatalog.mapreduce.InputJobInfo; @@ -83,6 +82,7 @@ private final static String PROPERTY_INT_OUTPUT_LOCATION = "hcat.hbase.mapreduce.intermediateOutputLocation"; private Configuration hbaseConf; + private Configuration jobConf; private HBaseAdmin admin; @Override @@ -94,17 +94,18 @@ try { InputJobInfo inputJobInfo = (InputJobInfo) HCatUtil.deserialize(jobString); HCatTableInfo tableInfo = inputJobInfo.getTableInfo(); - String qualifiedTableName = HBaseHCatStorageHandler.getFullyQualifiedName(tableInfo); + String qualifiedTableName = HBaseHCatStorageHandler.getFullyQualifiedHBaseTableName(tableInfo); jobProperties.put(TableInputFormat.INPUT_TABLE, qualifiedTableName); - Configuration jobConf = getConf(); + Configuration jobConf = getJobConf(); addHbaseResources(jobConf, jobProperties); - Configuration copyOfConf = new Configuration(jobConf); + JobConf copyOfConf = new JobConf(jobConf); HBaseConfiguration.addHbaseResources(copyOfConf); //Getting hbase delegation token in getInputSplits does not work with PIG. So need to //do it here - if (jobConf instanceof JobConf) { - HBaseUtil.addHBaseDelegationToken((JobConf)jobConf); + if (jobConf instanceof JobConf) { //Should be the case + HBaseUtil.addHBaseDelegationToken(copyOfConf); + ((JobConf)jobConf).getCredentials().addAll(copyOfConf.getCredentials()); } String outputSchema = jobConf.get(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA); @@ -140,11 +141,11 @@ try { OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(jobString); HCatTableInfo tableInfo = outputJobInfo.getTableInfo(); - String qualifiedTableName = HBaseHCatStorageHandler.getFullyQualifiedName(tableInfo); + String qualifiedTableName = HBaseHCatStorageHandler.getFullyQualifiedHBaseTableName(tableInfo); jobProperties.put(HBaseConstants.PROPERTY_OUTPUT_TABLE_NAME_KEY, qualifiedTableName); jobProperties.put(TableOutputFormat.OUTPUT_TABLE, qualifiedTableName); - Configuration jobConf = getConf(); + Configuration jobConf = getJobConf(); addHbaseResources(jobConf, jobProperties); Configuration copyOfConf = new Configuration(jobConf); @@ -152,24 +153,14 @@ String txnString = outputJobInfo.getProperties().getProperty( HBaseConstants.PROPERTY_WRITE_TXN_KEY); - String jobTxnString = jobConf.get(HBaseConstants.PROPERTY_WRITE_TXN_KEY); - //Pig makes 3 calls to HCatOutputFormat.setOutput(HCatStorer) with different JobConf - //which leads to creating 2 transactions. - //So apart from fixing HCatStorer to pass same OutputJobInfo, making the call idempotent for other - //cases which might call multiple times but with same JobConf. Transaction txn = null; - if (txnString == null && jobTxnString == null) { + if (txnString == null) { txn = HBaseRevisionManagerUtil.beginWriteTransaction(qualifiedTableName, tableInfo, copyOfConf); String serializedTxn = HCatUtil.serialize(txn); outputJobInfo.getProperties().setProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY, serializedTxn); - jobProperties.put(HBaseConstants.PROPERTY_WRITE_TXN_KEY, serializedTxn); } else { - txnString = (txnString == null) ? jobTxnString : txnString; txn = (Transaction) HCatUtil.deserialize(txnString); - outputJobInfo.getProperties().setProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY, - txnString); - jobProperties.put(HBaseConstants.PROPERTY_WRITE_TXN_KEY, txnString); } if (isBulkMode(outputJobInfo)) { String tableLocation = tableInfo.getTableLocation(); @@ -259,7 +250,7 @@ } try { - String tableName = getHBaseTableName(tbl); + String tableName = getFullyQualifiedHBaseTableName(tbl); String hbaseColumnsMapping = tbl.getParameters().get( HBaseSerDe.HBASE_COLUMNS_MAPPING); @@ -331,13 +322,9 @@ // ensure the table is online new HTable(hbaseConf, tableDesc.getName()); - //Set up znodes in revision manager. + //Set up table in revision manager. RevisionManager rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(hbaseConf); - if (rm instanceof ZKBasedRevisionManager) { - ZKBasedRevisionManager zkRM = (ZKBasedRevisionManager) rm; - zkRM.setUpZNodes(tableName, new ArrayList( - uniqueColumnFamilies)); - } + rm.createTable(tableName, new ArrayList(uniqueColumnFamilies)); } catch (MasterNotRunningException mnre) { throw new MetaException(StringUtils.stringifyException(mnre)); @@ -409,7 +396,7 @@ } } - private String getHBaseTableName(Table tbl) { + private String getFullyQualifiedHBaseTableName(Table tbl) { String tableName = tbl.getParameters().get(HBaseSerDe.HBASE_TABLE_NAME); if (tableName == null) { tableName = tbl.getSd().getSerdeInfo().getParameters() @@ -421,10 +408,28 @@ } else { tableName = tbl.getDbName() + "." + tbl.getTableName(); } + tableName = tableName.toLowerCase(); } return tableName; } + static String getFullyQualifiedHBaseTableName(HCatTableInfo tableInfo){ + String qualifiedName = tableInfo.getStorerInfo().getProperties() + .getProperty(HBaseSerDe.HBASE_TABLE_NAME); + if (qualifiedName == null) { + String databaseName = tableInfo.getDatabaseName(); + String tableName = tableInfo.getTableName(); + if ((databaseName == null) + || (databaseName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME))) { + qualifiedName = tableName; + } else { + qualifiedName = databaseName + "." + tableName; + } + qualifiedName = qualifiedName.toLowerCase(); + } + return qualifiedName; + } + @Override public Class getInputFormatClass() { return HBaseInputFormat.class; @@ -449,6 +454,10 @@ return HBaseSerDe.class; } + public Configuration getJobConf() { + return jobConf; + } + @Override public Configuration getConf() { @@ -460,15 +469,23 @@ @Override public void setConf(Configuration conf) { - //Not cloning as we want to set tmpjars on it. Putting in jobProperties does not - //get propagated to JobConf in case of InputFormat as they are maintained per partition. - //Also we need to add hbase delegation token to the Credentials. - hbaseConf = conf; + //setConf is called both during DDL operations and mapred read/write jobs. + //Creating a copy of conf for DDL and adding hbase-default and hbase-site.xml to it. + //For jobs, maintaining a reference instead of cloning as we need to + // 1) add hbase delegation token to the Credentials. + // 2) set tmpjars on it. Putting in jobProperties does not get propagated to JobConf + // in case of InputFormat as they are maintained per partition. + //Not adding hbase-default.xml and hbase-site.xml to jobConf as it will override any + //hbase properties set in the JobConf by the user. In configureInputJobProperties and + //configureOutputJobProperties, we take care of adding the default properties + //that are not already present. TODO: Change to a copy for jobs after HCAT-308 is fixed. + jobConf = conf; + hbaseConf = HBaseConfiguration.create(conf); } private void checkDeleteTable(Table table) throws MetaException { boolean isExternal = MetaStoreUtils.isExternalTable(table); - String tableName = getHBaseTableName(table); + String tableName = getFullyQualifiedHBaseTableName(table); RevisionManager rm = null; try { if (!isExternal && getHBaseAdmin().tableExists(tableName)) { @@ -478,12 +495,9 @@ } getHBaseAdmin().deleteTable(tableName); - //Set up znodes in revision manager. + //Drop table in revision manager. rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(hbaseConf); - if (rm instanceof ZKBasedRevisionManager) { - ZKBasedRevisionManager zkRM = (ZKBasedRevisionManager) rm; - zkRM.deleteZNodes(tableName); - } + rm.dropTable(tableName); } } catch (IOException ie) { throw new MetaException(StringUtils.stringifyException(ie)); @@ -492,20 +506,6 @@ } } - static String getFullyQualifiedName(HCatTableInfo tableInfo){ - String qualifiedName; - String databaseName = tableInfo.getDatabaseName(); - String tableName = tableInfo.getTableName(); - - if ((databaseName == null) || (databaseName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME))) { - qualifiedName = tableName; - } else { - qualifiedName = databaseName + "." + tableName; - } - - return qualifiedName; - } - /** * Helper method for users to add the required depedency jars to distributed cache. * @param conf