diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java index 3a65fc5..745896a 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java @@ -152,7 +152,7 @@ public class CoprocessorClassLoader extends ClassLoaderBase { synchronized (parentDirLockSet) { if (!parentDirLockSet.contains(parentDirStr)) { Path parentDir = new Path(parentDirStr); - FileSystem fs = parentDir.getFileSystem(conf); + FileSystem fs = FileSystem.getLocal(conf); fs.delete(parentDir, true); // it's ok if the dir doesn't exist now parentDirLockSet.add(parentDirStr); if (!fs.mkdirs(parentDir) && !fs.getFileStatus(parentDir).isDir()) { diff --git hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index d95b2bd..d5e5f9c 100644 --- hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -27,6 +27,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; /** @@ -39,6 +40,24 @@ import org.apache.hadoop.fs.Path; public class HBaseCommonTestingUtility { protected static final Log LOG = LogFactory.getLog(HBaseCommonTestingUtility.class); + protected Configuration conf; + + public HBaseCommonTestingUtility() { + this(HBaseConfiguration.create()); + } + + public HBaseCommonTestingUtility(Configuration conf) { + this.conf = conf; + } + + /** + * Returns this classes's instance of {@link Configuration}. + * @return Instance of Configuration. + */ + public Configuration getConfiguration() { + return this.conf; + } + /** * System property key to get base test directory value */ @@ -95,9 +114,19 @@ public class HBaseCommonTestingUtility { // Set this property so if mapreduce jobs run, they will use this as their home dir. System.setProperty("test.build.dir", this.dataTestDir.toString()); if (deleteOnExit()) this.dataTestDir.deleteOnExit(); + + createSubDir("hbase.local.dir", testPath, "hbase-local-dir"); + return testPath; } + protected void createSubDir(String propertyName, Path parent, String subDirName){ + Path newPath= new Path(parent, subDirName); + File newDir = new File(newPath.toString()).getAbsoluteFile(); + if (deleteOnExit()) newDir.deleteOnExit(); + conf.set(propertyName, newDir.getAbsolutePath()); + } + /** * @return True if we should delete testing dirs on exit. */ diff --git hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java index 91dd87f..942c7e3 100644 --- hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java +++ hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java @@ -30,7 +30,6 @@ import java.io.FileOutputStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; -import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.io.IOUtils; import org.junit.Test; @@ -41,9 +40,12 @@ import org.junit.experimental.categories.Category; */ @Category(SmallTests.class) public class TestCoprocessorClassLoader { - private static final Configuration conf = HBaseConfiguration.create(); private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility(); + private static final Configuration conf = TEST_UTIL.getConfiguration(); + static { + TEST_UTIL.getDataTestDir(); // prepare data test dir and hbase local dir + } @Test public void testCleanupOldJars() throws Exception { diff --git hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java index dfe439b..11a545b 100644 --- hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java +++ hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java @@ -27,7 +27,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; -import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -39,9 +38,8 @@ import org.junit.experimental.categories.Category; public class TestDynamicClassLoader { private static final Log LOG = LogFactory.getLog(TestDynamicClassLoader.class); - private static final Configuration conf = HBaseConfiguration.create(); - private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility(); + private static final Configuration conf = TEST_UTIL.getConfiguration(); static { conf.set("hbase.dynamic.jars.dir", TEST_UTIL.getDataTestDir().toString()); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index e6b031e..78ae3f6 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -71,7 +71,6 @@ import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; -import org.apache.hadoop.hbase.regionserver.compactions.Compactor; import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor; import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours; import org.apache.hadoop.hbase.regionserver.wal.HLogUtil; @@ -819,7 +818,7 @@ public class HStore implements Store { .build(); return w; } - + private HFileContext createFileContext(Compression.Algorithm compression, boolean includeMVCCReadpoint, boolean includesTag) { if (compression == null) { @@ -1015,6 +1014,7 @@ public class HStore implements Store { for (Path newFile : newFiles) { // Create storefile around what we wrote with a reader on it. StoreFile sf = createStoreFileAndReader(newFile); + sf.closeReader(true); sfs.add(sf); } return sfs; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 737bb8c..99a8f02 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -125,7 +125,6 @@ import org.apache.zookeeper.ZooKeeper.States; @InterfaceAudience.Public @InterfaceStability.Evolving public class HBaseTestingUtility extends HBaseCommonTestingUtility { - protected Configuration conf; private MiniZooKeeperCluster zkCluster = null; /** @@ -228,7 +227,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { } public HBaseTestingUtility(Configuration conf) { - this.conf = conf; + super(conf); // a hbase checksum verification failure will cause unit tests to fail ChecksumUtil.generateExceptionForChecksumFailureForTest(true); @@ -271,8 +270,9 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { * Configuration c = new Configuration(INSTANCE.getConfiguration()); * @return Instance of Configuration. */ + @Override public Configuration getConfiguration() { - return this.conf; + return super.getConfiguration(); } public void setHBaseCluster(HBaseCluster hbaseCluster) { @@ -317,19 +317,9 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { "mapred.local.dir", testPath, "mapred-local-dir"); - createSubDir( - "hbase.local.dir", - testPath, "hbase-local-dir"); return testPath; } - private void createSubDir(String propertyName, Path parent, String subDirName){ - Path newPath= new Path(parent, subDirName); - File newDir = new File(newPath.toString()).getAbsoluteFile(); - if (deleteOnExit()) newDir.deleteOnExit(); - conf.set(propertyName, newDir.getAbsolutePath()); - } - private void createSubDirAndSystemProperty( String propertyName, Path parent, String subDirName){ diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java index 49d2324..b5cfa9a 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java @@ -33,11 +33,11 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; -import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.Stoppable; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.master.cleaner.HFileCleaner; import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy; @@ -343,7 +343,7 @@ public class TestHFileArchiving { final long TEST_TIME = 20 * 1000; Configuration conf = UTIL.getMiniHBaseCluster().getMaster().getConfiguration(); - Path rootDir = UTIL.getDataTestDir("testCleaningRace"); + Path rootDir = UTIL.getDataTestDirOnTestFS("testCleaningRace"); FileSystem fs = UTIL.getTestFileSystem(); Path archiveDir = new Path(rootDir, HConstants.HFILE_ARCHIVE_DIRECTORY); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java index c9e3a64..f335093 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java @@ -49,7 +49,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import org.apache.hadoop.hbase.HConstants; /** * This class runs performance benchmarks for {@link HLog}. @@ -60,7 +59,7 @@ import org.apache.hadoop.hbase.HConstants; public final class HLogPerformanceEvaluation extends Configured implements Tool { static final Log LOG = LogFactory.getLog(HLogPerformanceEvaluation.class.getName()); - private final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private HBaseTestingUtility TEST_UTIL; static final String TABLE_NAME = "HLogPerformanceEvaluation"; static final String QUALIFIER_PREFIX = "q"; @@ -70,6 +69,12 @@ public final class HLogPerformanceEvaluation extends Configured implements Tool private int valueSize = 512; private int keySize = 16; + @Override + public void setConf(Configuration conf) { + super.setConf(conf); + TEST_UTIL = new HBaseTestingUtility(conf); + } + /** * Perform HLog.append() of Put object, for the number of iterations requested. * Keys and Vaues are generated randomly, the number of column families, @@ -91,6 +96,7 @@ public final class HLogPerformanceEvaluation extends Configured implements Tool this.htd = htd; } + @Override public void run() { byte[] key = new byte[keySize]; byte[] value = new byte[valueSize]; @@ -181,7 +187,7 @@ public final class HLogPerformanceEvaluation extends Configured implements Tool LOG.info("FileSystem: " + fs); try { if (rootRegionDir == null) { - rootRegionDir = TEST_UTIL.getDataTestDir("HLogPerformanceEvaluation"); + rootRegionDir = TEST_UTIL.getDataTestDirOnTestFS("HLogPerformanceEvaluation"); } rootRegionDir = rootRegionDir.makeQualified(fs); cleanRegionRootDir(fs, rootRegionDir); @@ -190,6 +196,7 @@ public final class HLogPerformanceEvaluation extends Configured implements Tool final long whenToRoll = roll; HLog hlog = new FSHLog(fs, rootRegionDir, "wals", getConf()) { int appends = 0; + @Override protected void doWrite(HRegionInfo info, HLogKey logKey, WALEdit logEdit, HTableDescriptor htd) throws IOException { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java index b2cc947..6ad6daf 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java @@ -18,14 +18,13 @@ package org.apache.hadoop.hbase.regionserver.wal; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -51,7 +50,7 @@ public class TestDurability { private static FileSystem FS; private static MiniDFSCluster CLUSTER; private static Configuration CONF; - private static final Path DIR = TEST_UTIL.getDataTestDir("TestDurability"); + private static Path DIR; private static byte[] FAMILY = Bytes.toBytes("family"); private static byte[] ROW = Bytes.toBytes("row"); @@ -66,6 +65,7 @@ public class TestDurability { CLUSTER = TEST_UTIL.getDFSCluster(); FS = CLUSTER.getFileSystem(); + DIR = TEST_UTIL.getDataTestDirOnTestFS("TestDurability"); } @AfterClass @@ -162,7 +162,7 @@ public class TestDurability { throw new IOException("Failed delete of " + path); } } - return HRegion.createHRegion(info, path, HBaseConfiguration.create(), htd, log); + return HRegion.createHRegion(info, path, CONF, htd, log); } } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index 5ea1c8c..2eac4ff 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -235,12 +235,14 @@ public class TestAccessController extends SecureTestUtil { assertEquals(0, AccessControlLists.getTablePermissions(conf, TEST_TABLE.getTableName()).size()); } + @Override public void verifyAllowed(PrivilegedExceptionAction action, User... users) throws Exception { for (User user : users) { verifyAllowed(user, action); } } + @Override public void verifyDenied(User user, PrivilegedExceptionAction... actions) throws Exception { for (PrivilegedExceptionAction action : actions) { try { @@ -291,6 +293,7 @@ public class TestAccessController extends SecureTestUtil { } } + @Override public void verifyDenied(PrivilegedExceptionAction action, User... users) throws Exception { for (User user : users) { verifyDenied(user, action); @@ -300,6 +303,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testTableCreate() throws Exception { PrivilegedExceptionAction createTable = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testnewtable")); htd.addFamily(new HColumnDescriptor(TEST_FAMILY)); @@ -318,6 +322,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testTableModify() throws Exception { PrivilegedExceptionAction modifyTable = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { HTableDescriptor htd = new HTableDescriptor(TEST_TABLE.getTableName()); htd.addFamily(new HColumnDescriptor(TEST_FAMILY)); @@ -335,6 +340,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testTableDelete() throws Exception { PrivilegedExceptionAction deleteTable = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER .preDeleteTable(ObserverContext.createAndPrepare(CP_ENV, null), TEST_TABLE.getTableName()); @@ -350,6 +356,7 @@ public class TestAccessController extends SecureTestUtil { public void testAddColumn() throws Exception { final HColumnDescriptor hcd = new HColumnDescriptor("fam_new"); PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preAddColumn(ObserverContext.createAndPrepare(CP_ENV, null), TEST_TABLE.getTableName(), hcd); @@ -366,6 +373,7 @@ public class TestAccessController extends SecureTestUtil { final HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAMILY); hcd.setMaxVersions(10); PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preModifyColumn(ObserverContext.createAndPrepare(CP_ENV, null), TEST_TABLE.getTableName(), hcd); @@ -380,6 +388,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testDeleteColumn() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preDeleteColumn(ObserverContext.createAndPrepare(CP_ENV, null), TEST_TABLE.getTableName(), TEST_FAMILY); @@ -394,6 +403,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testTableDisable() throws Exception { PrivilegedExceptionAction disableTable = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preDisableTable(ObserverContext.createAndPrepare(CP_ENV, null), TEST_TABLE.getTableName()); @@ -402,6 +412,7 @@ public class TestAccessController extends SecureTestUtil { }; PrivilegedExceptionAction disableAclTable = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preDisableTable(ObserverContext.createAndPrepare(CP_ENV, null), AccessControlLists.ACL_TABLE_NAME); @@ -419,6 +430,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testTableEnable() throws Exception { PrivilegedExceptionAction enableTable = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER .preEnableTable(ObserverContext.createAndPrepare(CP_ENV, null), TEST_TABLE.getTableName()); @@ -442,6 +454,7 @@ public class TestAccessController extends SecureTestUtil { final Map.Entry firstRegion = regions.entrySet().iterator().next(); final ServerName server = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName(); PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preMove(ObserverContext.createAndPrepare(CP_ENV, null), firstRegion.getKey(), server, server); @@ -465,6 +478,7 @@ public class TestAccessController extends SecureTestUtil { final Map.Entry firstRegion = regions.entrySet().iterator().next(); PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preAssign(ObserverContext.createAndPrepare(CP_ENV, null), firstRegion.getKey()); @@ -488,6 +502,7 @@ public class TestAccessController extends SecureTestUtil { final Map.Entry firstRegion = regions.entrySet().iterator().next(); PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preUnassign(ObserverContext.createAndPrepare(CP_ENV, null), firstRegion.getKey(), false); @@ -511,6 +526,7 @@ public class TestAccessController extends SecureTestUtil { final Map.Entry firstRegion = regions.entrySet().iterator().next(); PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preRegionOffline(ObserverContext.createAndPrepare(CP_ENV, null), firstRegion.getKey()); @@ -525,6 +541,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testBalance() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preBalance(ObserverContext.createAndPrepare(CP_ENV, null)); return null; @@ -538,6 +555,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testBalanceSwitch() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preBalanceSwitch(ObserverContext.createAndPrepare(CP_ENV, null), true); return null; @@ -551,6 +569,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testShutdown() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preShutdown(ObserverContext.createAndPrepare(CP_ENV, null)); return null; @@ -564,6 +583,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testStopMaster() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preStopMaster(ObserverContext.createAndPrepare(CP_ENV, null)); return null; @@ -582,6 +602,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testSplit() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preSplit(ObserverContext.createAndPrepare(RCP_ENV, null)); return null; @@ -595,6 +616,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testSplitWithSplitRow() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preSplit( ObserverContext.createAndPrepare(RCP_ENV, null), @@ -611,6 +633,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testFlush() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preFlush(ObserverContext.createAndPrepare(RCP_ENV, null)); return null; @@ -624,6 +647,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testCompact() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preCompact(ObserverContext.createAndPrepare(RCP_ENV, null), null, null, ScanType.COMPACT_RETAIN_DELETES); @@ -638,6 +662,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testPreCompactSelection() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preCompactSelection(ObserverContext.createAndPrepare(RCP_ENV, null), null, null); return null; @@ -662,6 +687,7 @@ public class TestAccessController extends SecureTestUtil { public void testRead() throws Exception { // get action PrivilegedExceptionAction getAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Get g = new Get(Bytes.toBytes("random_row")); g.addFamily(TEST_FAMILY); @@ -678,6 +704,7 @@ public class TestAccessController extends SecureTestUtil { // action for scanning PrivilegedExceptionAction scanAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Scan s = new Scan(); s.addFamily(TEST_FAMILY); @@ -707,6 +734,7 @@ public class TestAccessController extends SecureTestUtil { public void testWrite() throws Exception { // put action PrivilegedExceptionAction putAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Put p = new Put(Bytes.toBytes("random_row")); p.add(TEST_FAMILY, Bytes.toBytes("Qualifier"), Bytes.toBytes(1)); @@ -723,6 +751,7 @@ public class TestAccessController extends SecureTestUtil { // delete action PrivilegedExceptionAction deleteAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Delete d = new Delete(Bytes.toBytes("random_row")); d.deleteFamily(TEST_FAMILY); @@ -739,6 +768,7 @@ public class TestAccessController extends SecureTestUtil { // increment action PrivilegedExceptionAction incrementAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Increment inc = new Increment(Bytes.toBytes("random_row")); inc.addColumn(TEST_FAMILY, Bytes.toBytes("Qualifier"), 1); @@ -758,6 +788,7 @@ public class TestAccessController extends SecureTestUtil { public void testReadWrite() throws Exception { // action for checkAndDelete PrivilegedExceptionAction checkAndDeleteAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Delete d = new Delete(Bytes.toBytes("random_row")); d.deleteFamily(TEST_FAMILY); @@ -775,6 +806,7 @@ public class TestAccessController extends SecureTestUtil { // action for checkAndPut() PrivilegedExceptionAction checkAndPut = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Put p = new Put(Bytes.toBytes("random_row")); p.add(TEST_FAMILY, Bytes.toBytes("Qualifier"), Bytes.toBytes(1)); @@ -794,13 +826,14 @@ public class TestAccessController extends SecureTestUtil { @Test public void testBulkLoad() throws Exception { FileSystem fs = TEST_UTIL.getTestFileSystem(); - final Path dir = TEST_UTIL.getDataTestDir("testBulkLoad"); + final Path dir = TEST_UTIL.getDataTestDirOnTestFS("testBulkLoad"); fs.mkdirs(dir); //need to make it globally writable //so users creating HFiles have write permissions fs.setPermission(dir, FsPermission.valueOf("-rwxrwxrwx")); PrivilegedExceptionAction bulkLoadAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { int numRows = 3; @@ -907,6 +940,7 @@ public class TestAccessController extends SecureTestUtil { public void testAppend() throws Exception { PrivilegedExceptionAction appendAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { byte[] row = Bytes.toBytes("random_row"); byte[] qualifier = Bytes.toBytes("q"); @@ -933,6 +967,7 @@ public class TestAccessController extends SecureTestUtil { public void testGrantRevoke() throws Exception { PrivilegedExceptionAction grantAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { @@ -949,6 +984,7 @@ public class TestAccessController extends SecureTestUtil { }; PrivilegedExceptionAction revokeAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { @@ -965,6 +1001,7 @@ public class TestAccessController extends SecureTestUtil { }; PrivilegedExceptionAction getPermissionsAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { @@ -1016,6 +1053,7 @@ public class TestAccessController extends SecureTestUtil { // prepare actions: PrivilegedExceptionAction putActionAll = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Put p = new Put(Bytes.toBytes("a")); p.add(family1, qualifier, Bytes.toBytes("v1")); @@ -1030,6 +1068,7 @@ public class TestAccessController extends SecureTestUtil { } }; PrivilegedExceptionAction putAction1 = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Put p = new Put(Bytes.toBytes("a")); p.add(family1, qualifier, Bytes.toBytes("v1")); @@ -1043,6 +1082,7 @@ public class TestAccessController extends SecureTestUtil { } }; PrivilegedExceptionAction putAction2 = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Put p = new Put(Bytes.toBytes("a")); p.add(family2, qualifier, Bytes.toBytes("v2")); @@ -1056,6 +1096,7 @@ public class TestAccessController extends SecureTestUtil { } }; PrivilegedExceptionAction getActionAll = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Get g = new Get(Bytes.toBytes("random_row")); g.addFamily(family1); @@ -1070,6 +1111,7 @@ public class TestAccessController extends SecureTestUtil { } }; PrivilegedExceptionAction getAction1 = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Get g = new Get(Bytes.toBytes("random_row")); g.addFamily(family1); @@ -1083,6 +1125,7 @@ public class TestAccessController extends SecureTestUtil { } }; PrivilegedExceptionAction getAction2 = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Get g = new Get(Bytes.toBytes("random_row")); g.addFamily(family2); @@ -1096,6 +1139,7 @@ public class TestAccessController extends SecureTestUtil { } }; PrivilegedExceptionAction deleteActionAll = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Delete d = new Delete(Bytes.toBytes("random_row")); d.deleteFamily(family1); @@ -1110,6 +1154,7 @@ public class TestAccessController extends SecureTestUtil { } }; PrivilegedExceptionAction deleteAction1 = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Delete d = new Delete(Bytes.toBytes("random_row")); d.deleteFamily(family1); @@ -1123,6 +1168,7 @@ public class TestAccessController extends SecureTestUtil { } }; PrivilegedExceptionAction deleteAction2 = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Delete d = new Delete(Bytes.toBytes("random_row")); d.deleteFamily(family2); @@ -1327,6 +1373,7 @@ public class TestAccessController extends SecureTestUtil { User user = User.createUserForTesting(TEST_UTIL.getConfiguration(), "user", new String[0]); PrivilegedExceptionAction getQualifierAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Get g = new Get(Bytes.toBytes("random_row")); g.addColumn(family1, qualifier); @@ -1340,6 +1387,7 @@ public class TestAccessController extends SecureTestUtil { } }; PrivilegedExceptionAction putQualifierAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Put p = new Put(Bytes.toBytes("random_row")); p.add(family1, qualifier, Bytes.toBytes("v1")); @@ -1353,6 +1401,7 @@ public class TestAccessController extends SecureTestUtil { } }; PrivilegedExceptionAction deleteQualifierAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Delete d = new Delete(Bytes.toBytes("random_row")); d.deleteColumn(family1, qualifier); @@ -1832,6 +1881,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testStopRegionServer() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preStopRegionServer(ObserverContext.createAndPrepare(RSCP_ENV, null)); return null; @@ -1845,6 +1895,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testOpenRegion() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preOpen(ObserverContext.createAndPrepare(RCP_ENV, null)); return null; @@ -1858,6 +1909,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testCloseRegion() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preClose(ObserverContext.createAndPrepare(RCP_ENV, null), false); return null; @@ -1871,6 +1923,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testSnapshot() throws Exception { PrivilegedExceptionAction snapshotAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preSnapshot(ObserverContext.createAndPrepare(CP_ENV, null), null, null); @@ -1879,6 +1932,7 @@ public class TestAccessController extends SecureTestUtil { }; PrivilegedExceptionAction deleteAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preDeleteSnapshot(ObserverContext.createAndPrepare(CP_ENV, null), null); @@ -1887,6 +1941,7 @@ public class TestAccessController extends SecureTestUtil { }; PrivilegedExceptionAction restoreAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preRestoreSnapshot(ObserverContext.createAndPrepare(CP_ENV, null), null, null); @@ -1895,6 +1950,7 @@ public class TestAccessController extends SecureTestUtil { }; PrivilegedExceptionAction cloneAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { ACCESS_CONTROLLER.preCloneSnapshot(ObserverContext.createAndPrepare(CP_ENV, null), null, null); @@ -1956,6 +2012,7 @@ public class TestAccessController extends SecureTestUtil { .iterator().next(); PrivilegedExceptionAction moveAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { admin.move(firstRegion.getKey().getEncodedNameAsBytes(), Bytes.toBytes(newRs.getServerName().getServerName())); @@ -1981,6 +2038,7 @@ public class TestAccessController extends SecureTestUtil { // Verify write permission for user "admin2" who has the global // permissions. PrivilegedExceptionAction putAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { Put put = new Put(Bytes.toBytes("test")); put.add(TEST_FAMILY, Bytes.toBytes("qual"), Bytes.toBytes("value")); @@ -2011,6 +2069,7 @@ public class TestAccessController extends SecureTestUtil { } PrivilegedExceptionAction listTablesAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); try { @@ -2023,6 +2082,7 @@ public class TestAccessController extends SecureTestUtil { }; PrivilegedExceptionAction getTableDescAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); try { @@ -2058,6 +2118,7 @@ public class TestAccessController extends SecureTestUtil { } PrivilegedExceptionAction deleteTableAction = new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); try { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java index 1334735..8178ab1 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertNotEquals; import java.io.File; import java.io.IOException; @@ -52,7 +52,7 @@ import org.junit.experimental.categories.Category; public class TestFSUtils { /** * Test path compare and prefix checking. - * @throws IOException + * @throws IOException */ @Test public void testMatchingTail() throws IOException { @@ -115,7 +115,7 @@ public class TestFSUtils { if (cluster != null) cluster.shutdown(); } } - + private void WriteDataToHDFS(FileSystem fs, Path file, int dataSize) throws Exception { FSDataOutputStream out = fs.create(file); @@ -123,14 +123,14 @@ public class TestFSUtils { out.write(data, 0, dataSize); out.close(); } - + @Test public void testcomputeHDFSBlocksDistribution() throws Exception { HBaseTestingUtility htu = new HBaseTestingUtility(); final int DEFAULT_BLOCK_SIZE = 1024; htu.getConfiguration().setLong("dfs.block.size", DEFAULT_BLOCK_SIZE); MiniDFSCluster cluster = null; Path testFile = null; - + try { // set up a cluster with 3 nodes String hosts[] = new String[] { "host1", "host2", "host3" }; @@ -141,7 +141,7 @@ public class TestFSUtils { // create a file with two blocks testFile = new Path("/test1.txt"); WriteDataToHDFS(fs, testFile, 2*DEFAULT_BLOCK_SIZE); - + // given the default replication factor is 3, the same as the number of // datanodes; the locality index for each host should be 100%, // or getWeight for each host should be the same as getUniqueBlocksWeights @@ -173,9 +173,9 @@ public class TestFSUtils { FileSystem fs = cluster.getFileSystem(); // create a file with three blocks - testFile = new Path("/test2.txt"); + testFile = new Path("/test2.txt"); WriteDataToHDFS(fs, testFile, 3*DEFAULT_BLOCK_SIZE); - + // given the default replication factor is 3, we will have total of 9 // replica of blocks; thus the host with the highest weight should have // weight == 3 * DEFAULT_BLOCK_SIZE @@ -199,7 +199,7 @@ public class TestFSUtils { htu.shutdownMiniDFSCluster(); } - + try { // set up a cluster with 4 nodes String hosts[] = new String[] { "host1", "host2", "host3", "host4" }; @@ -208,9 +208,9 @@ public class TestFSUtils { FileSystem fs = cluster.getFileSystem(); // create a file with one block - testFile = new Path("/test3.txt"); + testFile = new Path("/test3.txt"); WriteDataToHDFS(fs, testFile, DEFAULT_BLOCK_SIZE); - + // given the default replication factor is 3, we will have total of 3 // replica of blocks; thus there is one host without weight final long maxTime = System.currentTimeMillis() + 2000; @@ -257,7 +257,7 @@ public class TestFSUtils { fs.delete(p, true); } } - + @Test public void testDeleteAndExists() throws Exception { HBaseTestingUtility htu = new HBaseTestingUtility(); @@ -292,30 +292,30 @@ public class TestFSUtils { public void testRenameAndSetModifyTime() throws Exception { HBaseTestingUtility htu = new HBaseTestingUtility(); Configuration conf = htu.getConfiguration(); - + MiniDFSCluster cluster = htu.startMiniDFSCluster(1); assertTrue(FSUtils.isHDFS(conf)); FileSystem fs = FileSystem.get(conf); - Path testDir = htu.getDataTestDir("testArchiveFile"); - + Path testDir = htu.getDataTestDirOnTestFS("testArchiveFile"); + String file = UUID.randomUUID().toString(); Path p = new Path(testDir, file); FSDataOutputStream out = fs.create(p); out.close(); assertTrue("The created file should be present", FSUtils.isExists(fs, p)); - + long expect = System.currentTimeMillis() + 1000; assertNotEquals(expect, fs.getFileStatus(p).getModificationTime()); - + ManualEnvironmentEdge mockEnv = new ManualEnvironmentEdge(); mockEnv.setValue(expect); EnvironmentEdgeManager.injectEdge(mockEnv); - + String dstFile = UUID.randomUUID().toString(); Path dst = new Path(testDir , dstFile); - + assertTrue(FSUtils.renameAndSetModifyTime(fs, p, dst)); assertFalse("The moved file should not be present", FSUtils.isExists(fs, p)); assertTrue("The dst file should be present", FSUtils.isExists(fs, dst));