Index: src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java (working copy) @@ -166,7 +166,7 @@ new Path(conf.get(HConstants.HBASE_DIR))); filesystem.mkdirs(rootdir); HRegionInfo hri = new HRegionInfo(desc.getName(), startKey, endKey); - return HRegion.createHRegion(hri, rootdir, conf, desc); + return HRegion.createHRegion(hri, rootdir, conf); } protected HRegion openClosedRegion(final HRegion closedRegion) @@ -658,10 +658,8 @@ } protected void createRootAndMetaRegions() throws IOException { - root = HRegion.createHRegion(HRegionInfo.ROOT_REGIONINFO, testDir, - conf, HTableDescriptor.ROOT_TABLEDESC); - meta = HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO, testDir, - conf, HTableDescriptor.META_TABLEDESC); + root = HRegion.createHRegion(HRegionInfo.ROOT_REGIONINFO, testDir, conf); + meta = HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO, testDir, conf); HRegion.addRegionToMETA(root, meta); } Index: src/test/java/org/apache/hadoop/hbase/TestScanMultipleVersions.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/TestScanMultipleVersions.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/TestScanMultipleVersions.java (working copy) @@ -68,8 +68,7 @@ // Create the regions for (int i = 0; i < REGIONS.length; i++) { REGIONS[i] = - HRegion.createHRegion(this.INFOS[i], this.testDir, this.conf, - this.desc); + HRegion.createHRegion(this.INFOS[i], this.testDir, this.conf); // Insert data for (int j = 0; j < TIMESTAMPS.length; j++) { Put put = new Put(ROWS[i], TIMESTAMPS[j], null); Index: src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java (working copy) @@ -253,7 +253,7 @@ HColumnDescriptor hcd = new HColumnDescriptor(CF); htd.addFamily(hcd); HRegionInfo hri = new HRegionInfo(htd.getName(), STARTROW, ENDROW); - HRegion.createHRegion(hri, testdir, TEST_UTIL.getConfiguration(), htd); + HRegion.createHRegion(hri, testdir, TEST_UTIL.getConfiguration()); return HRegion.openHRegion(testdir, hri, wal, TEST_UTIL.getConfiguration()); } } \ No newline at end of file Index: src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java (working copy) @@ -71,7 +71,7 @@ HRegionInfo info = new HRegionInfo(Bytes.toBytes(table), null, null, false); HRegion region = HRegion.createHRegion(info, HBaseTestingUtility.getTestDir(), TEST_UTIL - .getConfiguration(), htd); + .getConfiguration()); List rows = generateRandomWords(10, "row"); List allColumns = generateRandomWords(10, "column"); @@ -176,7 +176,7 @@ HRegionInfo info = new HRegionInfo(Bytes.toBytes(table), null, null, false); HRegion region = HRegion.createHRegion(info, HBaseTestingUtility.getTestDir(), TEST_UTIL - .getConfiguration(), htd); + .getConfiguration()); List rows = generateRandomWords(10, "row"); List allColumns = generateRandomWords(100, "column"); Index: src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java (working copy) @@ -69,7 +69,7 @@ throw new IOException("Failed delete of " + path); } } - HRegion region = HRegion.createHRegion(hri, path, conf, htd); + HRegion region = HRegion.createHRegion(hri, path, conf); Increment odd = new Increment(rows[0]); Increment even = new Increment(rows[0]); Index: src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSelection.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSelection.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSelection.java (working copy) @@ -85,7 +85,7 @@ HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); HLog hlog = new HLog(fs, logdir, oldLogDir, conf); - HRegion.createHRegion(info, basedir, conf, htd); + HRegion.createHRegion(info, basedir, conf); Path tableDir = new Path(basedir, Bytes.toString(htd.getName())); HRegion region = new HRegion(tableDir, hlog, fs, conf, info, null); Index: src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestOpenRegionHandler.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestOpenRegionHandler.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestOpenRegionHandler.java (working copy) @@ -237,7 +237,7 @@ HConstants.EMPTY_END_ROW); HRegion region = HRegion.createHRegion(hri, HBaseTestingUtility.getTestDir(), HTU - .getConfiguration(), htd); + .getConfiguration()); OpenRegionHandler handler = new OpenRegionHandler(server, rss, hri) { HRegion openRegion() { // Open region first, then remove znode as though it'd been hijacked. Index: src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java (working copy) @@ -135,7 +135,7 @@ HTableDescriptor htd = createBasic3FamilyHTD(tableNameStr); HRegion region2 = HRegion.createHRegion(hri, - hbaseRootDir, this.conf, htd); + hbaseRootDir, this.conf); final byte [] tableName = Bytes.toBytes(tableNameStr); final byte [] rowName = tableName; @@ -195,7 +195,7 @@ deleteDir(basedir); HTableDescriptor htd = createBasic3FamilyHTD(tableNameStr); HRegion region2 = HRegion.createHRegion(hri, - hbaseRootDir, this.conf, htd); + hbaseRootDir, this.conf); HLog wal = createWAL(this.conf); HRegion region = HRegion.openHRegion(hri, wal, this.conf); Path f = new Path(basedir, "hfile"); @@ -251,7 +251,7 @@ final int countPerFamily = 10; final HTableDescriptor htd = createBasic3FamilyHTD(tableNameStr); HRegion region3 = HRegion.createHRegion(hri, - hbaseRootDir, this.conf, htd); + hbaseRootDir, this.conf); // Write countPerFamily edits into the three families. Do a flush on one // of the families during the load of edits so its seqid is not same as @@ -356,7 +356,7 @@ fs.mkdirs(new Path(basedir, hri.getEncodedName())); final HTableDescriptor htd = createBasic3FamilyHTD(tableNameStr); HRegion region2 = HRegion.createHRegion(hri, - hbaseRootDir, this.conf, htd); + hbaseRootDir, this.conf); final HLog wal = createWAL(this.conf); final byte[] tableName = Bytes.toBytes(tableNameStr); Index: src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java (working copy) @@ -75,7 +75,7 @@ HTableDescriptor.META_TABLEDESC.setMemStoreFlushSize(64 * 1024 * 1024); HRegion mr = HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO, - rootdir, this.conf, HTableDescriptor.META_TABLEDESC); + rootdir, this.conf); // Write rows for three tables 'A', 'B', and 'C'. for (char c = 'A'; c < 'D'; c++) { HTableDescriptor htd = new HTableDescriptor("" + c); Index: src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (working copy) @@ -2796,7 +2796,7 @@ htd.addFamily(hcd); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); Path path = new Path(DIR + "testBloomFilterSize"); - region = HRegion.createHRegion(info, path, conf, htd); + region = HRegion.createHRegion(info, path, conf); int num_unique_rows = 10; int duplicate_multiplier =2; @@ -2854,7 +2854,7 @@ htd.addFamily(hcd); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); Path path = new Path(DIR + "testAllColumnsWithBloomFilter"); - region = HRegion.createHRegion(info, path, conf, htd); + region = HRegion.createHRegion(info, path, conf); // For row:0, col:0: insert versions 1 through 5. byte row[] = Bytes.toBytes("row:" + 0); @@ -2899,7 +2899,7 @@ htd.addFamily(hcd); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); Path path = new Path(DIR + "TestDeleteRowWithBloomFilter"); - region = HRegion.createHRegion(info, path, conf, htd); + region = HRegion.createHRegion(info, path, conf); // Insert some data byte row[] = Bytes.toBytes("row1"); @@ -3040,7 +3040,7 @@ throw new IOException("Failed delete of " + path); } } - region = HRegion.createHRegion(info, path, conf, htd); + region = HRegion.createHRegion(info, path, conf); } /** Index: src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java (working copy) @@ -35,7 +35,7 @@ htd.addFamily(new HColumnDescriptor(family)); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); HRegion region = HRegion.createHRegion(info, HBaseTestingUtility. - getTestDir(), TEST_UTIL.getConfiguration(), htd); + getTestDir(), TEST_UTIL.getConfiguration()); List rows = generateRandomWords(100, "row"); List columns = generateRandomWords(10000, "column"); @@ -91,7 +91,7 @@ htd.addFamily(new HColumnDescriptor(family)); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); HRegion region = HRegion.createHRegion(info, HBaseTestingUtility. - getTestDir(), TEST_UTIL.getConfiguration(), htd); + getTestDir(), TEST_UTIL.getConfiguration()); List rows = generateRandomWords(100, "row"); List columns = generateRandomWords(10000, "column"); Index: src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java (working copy) @@ -24,9 +24,9 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; import junit.framework.Assert; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestCase; @@ -93,7 +93,7 @@ htd.addFamily(new HColumnDescriptor(FAMILIES[0])); htd.addFamily(new HColumnDescriptor(FAMILIES[1])); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); - this.region = HRegion.createHRegion(info, this.testDir, this.conf, htd); + this.region = HRegion.createHRegion(info, this.testDir, this.conf); // Insert first half for(byte [] ROW : ROWS_ONE) { Index: src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java (working copy) @@ -77,7 +77,7 @@ htd.addFamily(new HColumnDescriptor(FAMILIES[1])); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); this.region = HRegion.createHRegion(info, testUtil.getTestDir(), - testUtil.getConfiguration(), htd); + testUtil.getConfiguration()); addData(); } Index: src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java (working copy) @@ -63,7 +63,7 @@ public void setupBasicMocks() { try { HRegion.createHRegion(FAKE_HRI, HBaseTestingUtility.getTestDir(), - HBaseConfiguration.create(), FAKE_TABLE); + HBaseConfiguration.create()); } catch(IOException ioe) { } Index: src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java (working copy) @@ -263,7 +263,7 @@ Path rootdir = filesystem.makeQualified( new Path(conf.get(HConstants.HBASE_DIR))); HRegionInfo hriEnabled = new HRegionInfo(htdEnabled.getName(), null, null); - HRegion.createHRegion(hriEnabled, rootdir, conf, htdEnabled); + HRegion.createHRegion(hriEnabled, rootdir, conf); List enabledRegions = TEST_UTIL.createMultiRegionsInMeta( TEST_UTIL.getConfiguration(), htdEnabled, SPLIT_KEYS); @@ -272,7 +272,7 @@ HTableDescriptor htdDisabled = new HTableDescriptor(disabledTable); htdDisabled.addFamily(new HColumnDescriptor(FAMILY)); HRegionInfo hriDisabled = new HRegionInfo(htdDisabled.getName(), null, null); - HRegion.createHRegion(hriDisabled, rootdir, conf, htdDisabled); + HRegion.createHRegion(hriDisabled, rootdir, conf); List disabledRegions = TEST_UTIL.createMultiRegionsInMeta( TEST_UTIL.getConfiguration(), htdDisabled, SPLIT_KEYS); @@ -577,7 +577,7 @@ HRegionInfo hriEnabled = new HRegionInfo(htdEnabled.getName(), null, null); - HRegion.createHRegion(hriEnabled, rootdir, conf, htdEnabled); + HRegion.createHRegion(hriEnabled, rootdir, conf); List enabledRegions = TEST_UTIL.createMultiRegionsInMeta( TEST_UTIL.getConfiguration(), htdEnabled, SPLIT_KEYS); @@ -586,7 +586,7 @@ HTableDescriptor htdDisabled = new HTableDescriptor(disabledTable); htdDisabled.addFamily(new HColumnDescriptor(FAMILY)); HRegionInfo hriDisabled = new HRegionInfo(htdDisabled.getName(), null, null); - HRegion.createHRegion(hriDisabled, rootdir, conf, htdDisabled); + HRegion.createHRegion(hriDisabled, rootdir, conf); List disabledRegions = TEST_UTIL.createMultiRegionsInMeta( TEST_UTIL.getConfiguration(), htdDisabled, SPLIT_KEYS); Index: src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java (working copy) @@ -93,7 +93,7 @@ } HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); Path path = new Path(DIR + callingMethod); - HRegion r = HRegion.createHRegion(info, path, conf, htd); + HRegion r = HRegion.createHRegion(info, path, conf); // this following piece is a hack. currently a coprocessorHost // is secretly loaded at OpenRegionHandler. we don't really // start a region server here, so just manually create cphost Index: src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java (working copy) @@ -214,7 +214,7 @@ } HRegionInfo info = new HRegionInfo(tableName, null, null, false); Path path = new Path(DIR + callingMethod); - HRegion r = HRegion.createHRegion(info, path, conf, htd); + HRegion r = HRegion.createHRegion(info, path, conf); // this following piece is a hack. RegionCoprocessorHost host = new RegionCoprocessorHost(r, null, conf); Index: src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java (working copy) @@ -141,8 +141,7 @@ HRegionInfo hri = createBasic3FamilyHRegionInfo(Bytes.toString(TEST_TABLE)); final HTableDescriptor htd = createBasic3FamilyHTD(Bytes.toString(TEST_TABLE)); - HRegion region2 = HRegion.createHRegion(hri, - hbaseRootDir, this.conf, htd); + HRegion region2 = HRegion.createHRegion(hri, hbaseRootDir, this.conf); Path basedir = new Path(this.hbaseRootDir, Bytes.toString(TEST_TABLE)); deleteDir(basedir); @@ -241,8 +240,7 @@ final Configuration newConf = HBaseConfiguration.create(this.conf); - HRegion region2 = HRegion.createHRegion(hri, - hbaseRootDir, newConf,htd); + HRegion region2 = HRegion.createHRegion(hri, hbaseRootDir, newConf); //HLog wal = new HLog(this.fs, this.dir, this.oldLogDir, this.conf); Index: src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java (working copy) @@ -137,7 +137,7 @@ byte [] startKey, byte [] endKey, int firstRow, int nrows, Path rootdir) throws IOException { HRegionInfo hri = new HRegionInfo(desc.getName(), startKey, endKey); - HRegion region = HRegion.createHRegion(hri, rootdir, UTIL.getConfiguration(), desc); + HRegion region = HRegion.createHRegion(hri, rootdir, UTIL.getConfiguration()); LOG.info("Created region " + region.getRegionNameAsString()); for(int i = firstRow; i < firstRow + nrows; i++) { Put put = new Put(Bytes.toBytes("row_" + String.format("%1$05d", i))); @@ -157,10 +157,10 @@ throws IOException { HRegion root = HRegion.createHRegion(HRegionInfo.ROOT_REGIONINFO, rootdir, - UTIL.getConfiguration(), HTableDescriptor.ROOT_TABLEDESC); + UTIL.getConfiguration()); HRegion meta = HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO, rootdir, - UTIL.getConfiguration(), HTableDescriptor.META_TABLEDESC); + UTIL.getConfiguration()); HRegion.addRegionToMETA(root, meta); for (HRegion r: regions) { HRegion.addRegionToMETA(meta, r); Index: src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java (revision 1135385) +++ src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java (working copy) @@ -139,8 +139,7 @@ */ for (int i = 0; i < sourceRegions.length; i++) { regions[i] = - HRegion.createHRegion(this.sourceRegions[i], this.testDir, this.conf, - this.desc); + HRegion.createHRegion(this.sourceRegions[i], this.testDir, this.conf); /* * Insert data */ Index: src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java (revision 1135384) +++ src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java (working copy) @@ -699,7 +699,7 @@ } catch (TableExistsException e) { failures.incrementAndGet(); } catch (IOException e) { - throw new RuntimeException("Failed threaded create" + getName(), e); + throw new RuntimeException("Failed thread create" + getName(), e); } } }; Index: src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (revision 1135385) +++ src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (working copy) @@ -199,8 +199,6 @@ final Path regiondir; KeyValue.KVComparator comparator; - private Pair lastCompactInfo = null; - /* * Data structure of write state flags used coordinating flushes, * compactions and closes. @@ -2794,25 +2792,20 @@ * @param info Info for region to create. * @param rootDir Root directory for HBase instance * @param conf - * @param hTableDescriptor * @return new HRegion * * @throws IOException */ public static HRegion createHRegion(final HRegionInfo info, final Path rootDir, - final Configuration conf, - final HTableDescriptor hTableDescriptor) - throws IOException { - LOG.info("creating HRegion " + info.getTableNameAsString() - + " HTD == " + hTableDescriptor + " RootDir = " + rootDir + - " Table name == " + info.getTableNameAsString()); - + final Configuration conf) + throws IOException { + LOG.info("creating HRegion " + info.getTableNameAsString() + + " RootDir = " + rootDir + " Table name == " + info.getTableNameAsString()); Path tableDir = HTableDescriptor.getTableDir(rootDir, info.getTableName()); Path regionDir = HRegion.getRegionDir(tableDir, info.getEncodedName()); FileSystem fs = FileSystem.get(conf); fs.mkdirs(regionDir); - FSUtils.createTableDescriptor(fs, hTableDescriptor, tableDir); HRegion region = HRegion.newHRegion(tableDir, new HLog(fs, new Path(regionDir, HConstants.HREGION_LOGDIR_NAME), new Path(regionDir, HConstants.HREGION_OLDLOGDIR_NAME), conf), Index: src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java (revision 1135385) +++ src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java (working copy) @@ -33,19 +33,16 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.InvalidFamilyOperationException; import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.master.metrics.MasterMetrics; import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogSplitter; import org.apache.hadoop.hbase.regionserver.wal.OrphanHLogAfterSplitException; @@ -339,10 +336,10 @@ setInfoFamilyCachingForRoot(false); HRegionInfo metaHRI = new HRegionInfo(HRegionInfo.FIRST_META_REGIONINFO); setInfoFamilyCachingForMeta(false); - HRegion root = HRegion.createHRegion(rootHRI, rd, c, - HTableDescriptor.ROOT_TABLEDESC); - HRegion meta = HRegion.createHRegion(metaHRI, rd, c, - HTableDescriptor.META_TABLEDESC); + FSUtils.createTableDescriptor(HTableDescriptor.ROOT_TABLEDESC, c); + HRegion root = HRegion.createHRegion(rootHRI, rd, c); + FSUtils.createTableDescriptor(HTableDescriptor.META_TABLEDESC, c); + HRegion meta = HRegion.createHRegion(metaHRI, rd, c); setInfoFamilyCachingForRoot(true); setInfoFamilyCachingForMeta(true); // Add first region from the META table to the ROOT region. @@ -384,7 +381,7 @@ } public void deleteTable(byte[] tableName) throws IOException { - fs.delete(new Path(rootdir, Bytes.toString(tableName)), true); + fs.delete(getTablePath(tableName), true); } public void updateRegionInfo(HRegionInfo region) { @@ -399,23 +396,12 @@ } } - /** - * Get table info path for a table. - * @param tableName - * @return Table info path - */ - private Path getTableInfoPath(byte[] tableName) { - Path tablePath = new Path(this.rootdir, Bytes.toString(tableName)); - Path tableInfoPath = new Path(tablePath, HConstants.TABLEINFO_NAME); - return tableInfoPath; - } - /** * Get table info path for a table. * @param tableName * @return Table info path */ - private Path getTablePath(byte[] tableName) { + Path getTablePath(byte[] tableName) { return new Path(this.rootdir, Bytes.toString(tableName)); } /** @@ -435,8 +421,10 @@ /** * Create new HTableDescriptor in HDFS. * @param htableDescriptor + * @throws IOException */ - public void createTableDescriptor(HTableDescriptor htableDescriptor) { + public void createTableDescriptor(HTableDescriptor htableDescriptor) + throws IOException { FSUtils.createTableDescriptor(htableDescriptor, conf); } Index: src/main/java/org/apache/hadoop/hbase/master/HMaster.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/master/HMaster.java (revision 1135385) +++ src/main/java/org/apache/hadoop/hbase/master/HMaster.java (working copy) @@ -39,7 +39,6 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HServerLoad; - import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException; @@ -83,7 +82,6 @@ import org.apache.hadoop.hbase.util.Sleeper; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.VersionInfo; -import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.zookeeper.ClusterId; import org.apache.hadoop.hbase.zookeeper.ClusterStatusTracker; import org.apache.hadoop.hbase.zookeeper.RegionServerTracker; @@ -902,15 +900,18 @@ throw new MasterNotRunningException(); } String tableName = hTableDescriptor.getNameAsString(); - if(MetaReader.tableExists(catalogTracker, tableName)) { - throw new TableExistsException(tableName); + if (MetaReader.tableExists(catalogTracker, tableName)) { + throw new TableExistsException(tableName + " exists in .META. table"); } - + if (this.fileSystemManager.getFileSystem(). + exists(this.fileSystemManager.getTablePath(hTableDescriptor.getName()))) { + throw new TableExistsException(tableName + " exists in the fs"); + } if (cpHost != null) { cpHost.preCreateTable(hTableDescriptor, splitKeys); } HRegionInfo [] newRegions = getHRegionInfos(hTableDescriptor, splitKeys); - storeTableDescriptor(hTableDescriptor); + this.fileSystemManager.createTableDescriptor(hTableDescriptor); int timeout = conf.getInt("hbase.client.catalog.timeout", 10000); // Need META availability to create a table try { @@ -945,11 +946,6 @@ return hRegionInfos; } - private void storeTableDescriptor(HTableDescriptor hTableDescriptor) { - FSUtils.createTableDescriptor(hTableDescriptor, conf); - //fileSystemManager.createTableDescriptor(hTableDescriptor); - } - private synchronized void createTable(final HTableDescriptor hTableDescriptor, final HRegionInfo [] newRegions, final boolean sync) @@ -969,7 +965,7 @@ // 2. Create HRegion HRegion region = HRegion.createHRegion(newRegion, - fileSystemManager.getRootDir(), conf, hTableDescriptor); + this.fileSystemManager.getRootDir(), conf); // 3. Insert into META MetaEditor.addRegionToMeta(catalogTracker, region.getRegionInfo()); Index: src/main/java/org/apache/hadoop/hbase/util/FSUtils.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/util/FSUtils.java (revision 1135385) +++ src/main/java/org/apache/hadoop/hbase/util/FSUtils.java (working copy) @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.RemoteExceptionHandler; +import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hdfs.DistributedFileSystem; @@ -972,29 +973,32 @@ /** * Create new HTableDescriptor in HDFS. * @param htableDescriptor + * @return True if successful + * @throws IOException */ public static void createTableDescriptor(HTableDescriptor htableDescriptor, - Configuration conf) { - try { - Path tableDir = getTablePath(htableDescriptor.getName(), conf); - FileSystem fs = getCurrentFileSystem(conf); - createTableDescriptor(fs, htableDescriptor, tableDir); - } catch(IOException ioe) { - LOG.info("IOException while trying to create tableInfo in HDFS", ioe); - } + Configuration conf) + throws IOException { + Path tableDir = getTablePath(htableDescriptor.getName(), conf); + FileSystem fs = getCurrentFileSystem(conf); + createTableDescriptor(fs, htableDescriptor, tableDir); } + /** + * @param fs + * @param htableDescriptor + * @param tableDir + */ public static void createTableDescriptor(FileSystem fs, - HTableDescriptor htableDescriptor, - Path tableDir) { + final HTableDescriptor htableDescriptor, final Path tableDir) { try { Path tableInfoPath = new Path(tableDir, HConstants.TABLEINFO_NAME); - LOG.info("Current tableInfoPath = " + tableInfoPath - + " tableDir = " + tableDir) ; + LOG.info("Current tableInfoPath = " + tableInfoPath + + " tableDir = " + tableDir) ; if (fs.exists(tableInfoPath) && fs.getFileStatus(tableInfoPath).getLen() > 0) { - LOG.info("TableInfo already exists.. Skipping creation"); - return; + throw new TableExistsException("TableInfo file " + + tableInfoPath.toString() + " exists"); } writeTableDescriptor(fs, htableDescriptor, tableDir); } catch(IOException ioe) { @@ -1003,8 +1007,8 @@ } private static void writeTableDescriptor(FileSystem fs, - HTableDescriptor hTableDescriptor, - Path tableDir) throws IOException { + HTableDescriptor hTableDescriptor, Path tableDir) + throws IOException { // Create in tmpdir and then move into place in case we crash after // create but before close. If we don't successfully close the file, // subsequent region reopens will fail the below because create is