diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java index cba3d36..70c8020 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.io.encoding; import java.io.IOException; import java.io.OutputStream; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -34,11 +35,11 @@ import org.apache.hadoop.hbase.util.Bytes; public enum DataBlockEncoding { /** Disable data block encoding. */ - NONE(0, null), + NONE(0), // id 1 is reserved for the BITSET algorithm to be added later - PREFIX(2, createEncoder("org.apache.hadoop.hbase.io.encoding.PrefixKeyDeltaEncoder")), - DIFF(3, createEncoder("org.apache.hadoop.hbase.io.encoding.DiffKeyDeltaEncoder")), - FAST_DIFF(4, createEncoder("org.apache.hadoop.hbase.io.encoding.FastDiffDeltaEncoder")); + PREFIX(2, "org.apache.hadoop.hbase.io.encoding.PrefixKeyDeltaEncoder"), + DIFF(3, "org.apache.hadoop.hbase.io.encoding.DiffKeyDeltaEncoder"), + FAST_DIFF(4, "org.apache.hadoop.hbase.io.encoding.FastDiffDeltaEncoder"); private final short id; private final byte[] idInBytes; @@ -46,14 +47,14 @@ public enum DataBlockEncoding { public static final int ID_SIZE = Bytes.SIZEOF_SHORT; - /** Maps data block encoding ids to enum instances. */ - private static Map idToEncoding = - new HashMap(); + /** Maps data block encoding ids to enum instances, unmodifiable */ + private static Map idToEncoding = createIdToEncoding(); - static { + private static Map createIdToEncoding() { + Map idToEncoding = new HashMap(); for (DataBlockEncoding algo : values()) { if (idToEncoding.containsKey(algo.id)) { - throw new RuntimeException(String.format( + throw new AssertionError(String.format( "Two data block encoder algorithms '%s' and '%s' have " + "the same id %d", idToEncoding.get(algo.id).toString(), algo.toString(), @@ -61,22 +62,50 @@ public enum DataBlockEncoding { } idToEncoding.put(algo.id, algo); } + return Collections.unmodifiableMap(idToEncoding); } - private DataBlockEncoding(int id, DataBlockEncoder encoder) { + DataBlockEncoding(int id) { + this(validateId(id), (DataBlockEncoder)null); + } + + DataBlockEncoding(int id, String encoderClassName) { + this(validateId(id), getEncoder(encoderClassName)); + } + + private static short validateId(int id) { if (id < Short.MIN_VALUE || id > Short.MAX_VALUE) { throw new AssertionError( - "Data block encoding algorithm id is out of range: " + id); + "Data block encoding algorithm id is out of range: " + id); } - this.id = (short) id; - this.idInBytes = Bytes.toBytes(this.id); + return (short)id; + } + + private static DataBlockEncoder getEncoder(String encoderClassName) { + try { + return (DataBlockEncoder) Class.forName(encoderClassName).newInstance(); + } catch(Exception e) { + AssertionError ae = new AssertionError( + "Failed to instanciate the class: " + encoderClassName); + ae.initCause(e); + throw ae; + } + } + + /** + * @param encoder nullable + */ + DataBlockEncoding(short id, DataBlockEncoder encoder) { + this.id = id; + this.idInBytes = Bytes.toBytes(id); + this.encoder = encoder; + if (idInBytes.length != ID_SIZE) { - // White this may seem redundant, if we accidentally serialize + // While this may seem redundant, if we accidentally serialize // the id as e.g. an int instead of a short, all encoders will break. - throw new RuntimeException("Unexpected length of encoder ID byte " + + throw new AssertionError("Unexpected length of encoder ID byte " + "representation: " + Bytes.toStringBinary(idInBytes)); } - this.encoder = encoder; } /** @@ -169,17 +198,4 @@ public enum DataBlockEncoding { public static DataBlockEncoding getEncodingById(short dataBlockEncodingId) { return idToEncoding.get(dataBlockEncodingId); } - - protected static DataBlockEncoder createEncoder(String fullyQualifiedClassName){ - try { - return (DataBlockEncoder)Class.forName(fullyQualifiedClassName).newInstance(); - } catch (InstantiationException e) { - throw new RuntimeException(e); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } catch (ClassNotFoundException e) { - throw new IllegalArgumentException(e); - } - } - } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java index d207953..4304c33 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java @@ -591,20 +591,26 @@ public final class Constraints { continue; } + // add the constraint, now that we expect it to be valid. + + Class clazz; try { - // add the constraint, now that we expect it to be valid. - Class clazz = classloader.loadClass(key) - .asSubclass(Constraint.class); - Constraint constraint = clazz.newInstance(); - constraint.setConf(conf); - constraints.add(constraint); - } catch (ClassNotFoundException e1) { - throw new IOException(e1); - } catch (InstantiationException e1) { - throw new IOException(e1); - } catch (IllegalAccessException e1) { - throw new IOException(e1); + clazz = classloader.loadClass(key).asSubclass(Constraint.class); + } catch (ClassCastException ex) { + throw new IOException(ex); + } catch (ClassNotFoundException ex) { + throw new IOException(ex); } + + Constraint constraint; + try { + constraint = clazz.newInstance(); + } catch (Exception ex) { + throw new IOException(ex); + } + + constraint.setConf(conf); + constraints.add(constraint); } } // sort them, based on the priorities diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index cd7cd3a..dbf5d26 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -73,7 +73,9 @@ public class AggregateImplementation extends AggregateService implements AggregateResponse response = null; T max = null; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + @SuppressWarnings("unchecked") // Catch ClassCastException whenever you use the instance. + ColumnInterpreter ci = + (ColumnInterpreter) constructColumnInterpreterFromRequest(request); T temp; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); @@ -101,6 +103,8 @@ public class AggregateImplementation extends AggregateService implements } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -127,7 +131,9 @@ public class AggregateImplementation extends AggregateService implements InternalScanner scanner = null; T min = null; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + @SuppressWarnings("unchecked") // Catch ClassCastException whenever you use the instance. + ColumnInterpreter ci = + (ColumnInterpreter) constructColumnInterpreterFromRequest(request); T temp; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); @@ -153,6 +159,8 @@ public class AggregateImplementation extends AggregateService implements } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -179,7 +187,9 @@ public class AggregateImplementation extends AggregateService implements InternalScanner scanner = null; long sum = 0l; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + @SuppressWarnings("unchecked") // Catch ClassCastException whenever you use the instance. + ColumnInterpreter ci = + (ColumnInterpreter) constructColumnInterpreterFromRequest(request); S sumVal = null; T temp; Scan scan = ProtobufUtil.toScan(request.getScan()); @@ -207,6 +217,8 @@ public class AggregateImplementation extends AggregateService implements } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -287,7 +299,9 @@ public class AggregateImplementation extends AggregateService implements AggregateResponse response = null; InternalScanner scanner = null; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + @SuppressWarnings("unchecked") // Catch ClassCastException whenever you use the instance. + ColumnInterpreter ci = + (ColumnInterpreter) constructColumnInterpreterFromRequest(request); S sumVal = null; Long rowCountVal = 0l; Scan scan = ProtobufUtil.toScan(request.getScan()); @@ -321,6 +335,8 @@ public class AggregateImplementation extends AggregateService implements } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -346,7 +362,9 @@ public class AggregateImplementation extends AggregateService implements InternalScanner scanner = null; AggregateResponse response = null; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + @SuppressWarnings("unchecked") // Catch ClassCastException whenever you use the instance. + ColumnInterpreter ci = + (ColumnInterpreter) constructColumnInterpreterFromRequest(request); S sumVal = null, sumSqVal = null, tempVal = null; long rowCountVal = 0l; Scan scan = ProtobufUtil.toScan(request.getScan()); @@ -386,6 +404,8 @@ public class AggregateImplementation extends AggregateService implements } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -410,7 +430,9 @@ public class AggregateImplementation extends AggregateService implements AggregateResponse response = null; InternalScanner scanner = null; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + @SuppressWarnings("unchecked") // Catch ClassCastException whenever you use the instance. + ColumnInterpreter ci = + (ColumnInterpreter) constructColumnInterpreterFromRequest(request); S sumVal = null, sumWeights = null, tempVal = null, tempWeight = null; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); @@ -451,6 +473,8 @@ public class AggregateImplementation extends AggregateService implements response = pair.build(); } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -461,25 +485,20 @@ public class AggregateImplementation extends AggregateService implements done.run(response); } - @SuppressWarnings("unchecked") - ColumnInterpreter constructColumnInterpreterFromRequest( - AggregateArgument request) throws IOException { + private static ColumnInterpreter constructColumnInterpreterFromRequest( + AggregateArgument request) throws Exception { String className = request.getInterpreterClassName(); - Class cls; - try { - cls = Class.forName(className); - ColumnInterpreter ci = (ColumnInterpreter) cls.newInstance(); - if (request.hasInterpreterSpecificBytes()) { - ci.initialize(request.getInterpreterSpecificBytes()); - } - return ci; - } catch (ClassNotFoundException e) { - throw new IOException(e); - } catch (InstantiationException e) { - throw new IOException(e); - } catch (IllegalAccessException e) { - throw new IOException(e); + + @SuppressWarnings("rawtypes") + Class cls = + Class.forName(className).asSubclass(ColumnInterpreter.class); + + ColumnInterpreter ci = cls.newInstance(); + + if (request.hasInterpreterSpecificBytes()) { + ci.initialize(request.getInterpreterSpecificBytes()); } + return ci; } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java index 368f1f7..3826e1c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.coprocessor; import java.io.IOException; -import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import org.apache.hadoop.classification.InterfaceAudience; @@ -65,13 +64,16 @@ extends RowProcessorService implements CoprocessorService, Coprocessor { RpcCallback done) { RowProcessorResult resultProto = null; try { - RowProcessor processor = constructRowProcessorFromRequest(request); + @SuppressWarnings("unchecked") // Catch ClassCastException whenever you use the instance. + RowProcessor processor = (RowProcessor) constructRowProcessorFromRequest(request); HRegion region = env.getRegion(); region.processRowsWithLocks(processor); T result = processor.getResult(); RowProcessorResult.Builder b = RowProcessorResult.newBuilder(); b.setRowProcessorResult(result.toByteString()); resultProto = b.build(); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); } catch (Exception e) { ResponseConverter.setControllerException(controller, new IOException(e)); } @@ -107,42 +109,33 @@ extends RowProcessorService implements CoprocessorService, Coprocessor { // nothing to do } - @SuppressWarnings("unchecked") - RowProcessor constructRowProcessorFromRequest(RowProcessorRequest request) - throws IOException { + private static RowProcessor constructRowProcessorFromRequest(RowProcessorRequest request) + throws Exception { String className = request.getRowProcessorClassName(); - Class cls; - try { - cls = Class.forName(className); - RowProcessor ci = (RowProcessor) cls.newInstance(); - if (request.hasRowProcessorInitializerMessageName()) { - Class imn = Class.forName(request.getRowProcessorInitializerMessageName()) - .asSubclass(Message.class); - Method m; - try { - m = imn.getMethod("parseFrom", ByteString.class); - } catch (SecurityException e) { - throw new IOException(e); - } catch (NoSuchMethodException e) { - throw new IOException(e); - } - S s; - try { - s = (S)m.invoke(null,request.getRowProcessorInitializerMessage()); - } catch (IllegalArgumentException e) { - throw new IOException(e); - } catch (InvocationTargetException e) { - throw new IOException(e); - } - ci.initialize(s); - } - return ci; - } catch (ClassNotFoundException e) { - throw new IOException(e); - } catch (InstantiationException e) { - throw new IOException(e); - } catch (IllegalAccessException e) { - throw new IOException(e); + + @SuppressWarnings("rawtypes") + Class cls = + Class.forName(className).asSubclass(RowProcessor.class); + + RowProcessor ci = cls.newInstance(); + + initializeRowProcessor(ci, request); + + return ci; + } + + private static void initializeRowProcessor( + RowProcessor ci, RowProcessorRequest request) throws Exception { + if (request.hasRowProcessorInitializerMessageName()) { + Class imn = + Class.forName(request.getRowProcessorInitializerMessageName()) + .asSubclass(Message.class); + + Method imnMethod = imn.getMethod("parseFrom", ByteString.class); + + @SuppressWarnings("unchecked") + S msg = (S) imnMethod.invoke(null, request.getRowProcessorInitializerMessage()); + ci.initialize(msg); // might throw ClassCastException } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java index 49d4c0f..3548d45 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java @@ -261,13 +261,9 @@ public abstract class CoprocessorHost { // create the instance Coprocessor impl; - Object o = null; try { - o = implClass.newInstance(); - impl = (Coprocessor)o; - } catch (InstantiationException e) { - throw new IOException(e); - } catch (IllegalAccessException e) { + impl = (Coprocessor) implClass.newInstance(); + } catch (Exception e) { throw new IOException(e); } // create the environment diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java index 5e98d31..3043300 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java @@ -471,23 +471,26 @@ public class FixedFileTrailer { } @SuppressWarnings("unchecked") + // This unchecked cast might cause RawComparator.compare(byte[], byte[]) + // to throw ClassCastException, which the method specification allows. private static Class> getComparatorClass( String comparatorClassName) throws IOException { try { return (Class>) - Class.forName(comparatorClassName); + Class.forName(comparatorClassName).asSubclass(RawComparator.class); } catch (ClassNotFoundException ex) { throw new IOException(ex); + } catch (ClassCastException ex) { + throw new IOException(ex); } } public static RawComparator createComparator( String comparatorClassName) throws IOException { + Class> clazz = getComparatorClass(comparatorClassName); try { - return getComparatorClass(comparatorClassName).newInstance(); - } catch (InstantiationException e) { - throw new IOException(e); - } catch (IllegalAccessException e) { + return clazz.newInstance(); + } catch (Exception e) { throw new IOException(e); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java index 629b346..b32e2d1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java @@ -137,23 +137,35 @@ public class HFileReaderV1 extends AbstractHFileReader { * @return an instance of the comparator to use * @throws IOException in case comparator class name is invalid */ - @SuppressWarnings("unchecked") private RawComparator getComparator(final String clazzName) throws IOException { if (clazzName == null || clazzName.length() == 0) { return null; } + + Class> clazz = getComparatorClass(clazzName); try { - return (RawComparator)Class.forName(clazzName).newInstance(); - } catch (InstantiationException e) { - throw new IOException(e); - } catch (IllegalAccessException e) { - throw new IOException(e); - } catch (ClassNotFoundException e) { + return clazz.newInstance(); + } catch (Exception e) { throw new IOException(e); } } + @SuppressWarnings("unchecked") + // This unchecked cast might cause RawComparator.compare(byte[], byte[]) + // to throw ClassCastException, which the method specification allows. + private static Class> getComparatorClass( + String comparatorClassName) throws IOException { + try { + return (Class>) + Class.forName(comparatorClassName).asSubclass(RawComparator.class); + } catch (ClassNotFoundException ex) { + throw new IOException(ex); + } catch (ClassCastException ex) { + throw new IOException(ex); + } + } + /** * Create a Scanner on this file. No seeks or reads are done on creation. Call * {@link HFileScanner#seekTo(byte[])} to position an start the read. There is diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 2a49f83..c152212 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -36,14 +36,14 @@ import org.apache.hadoop.hbase.util.FSUtils; * Abstract Cleaner that uses a chain of delegates to clean a directory of files * @param Cleaner delegate class that is dynamically loaded from configuration */ -public abstract class CleanerChore extends Chore { +public abstract class CleanerChore extends Chore { private static final Log LOG = LogFactory.getLog(CleanerChore.class.getName()); private final FileSystem fs; private final Path oldFileDir; private final Configuration conf; - protected List cleanersChain; + protected List cleanersChain; /** * @param name name of the chore being run @@ -77,11 +77,11 @@ public abstract class CleanerChore extends Chore * @param confKey key to get the file cleaner classes from the configuration */ private void initCleanerChain(String confKey) { - this.cleanersChain = new LinkedList(); + this.cleanersChain = new LinkedList(); String[] logCleaners = conf.getStrings(confKey); if (logCleaners != null) { for (String className : logCleaners) { - T logCleaner = newFileCleaner(className, conf); + FileCleanerDelegate logCleaner = newFileCleaner(className, conf); if (logCleaner != null) { LOG.debug("initialize cleaner=" + className); this.cleanersChain.add(logCleaner); @@ -91,18 +91,18 @@ public abstract class CleanerChore extends Chore } /** - * A utility method to create new instances of LogCleanerDelegate based on the class name of the - * LogCleanerDelegate. - * @param className fully qualified class name of the LogCleanerDelegate + * A utility method to create a new instance of a subclass of {@code FileCleanerDelegate} + * based on the given class name. + * + * @param className fully qualified class name of the subclass of {@code FileCleanerDelegate} * @param conf - * @return the new instance + * @return the new instance, or null if failed to create such an instance */ - public T newFileCleaner(String className, Configuration conf) { + public FileCleanerDelegate newFileCleaner(String className, Configuration conf) { try { Class c = Class.forName(className).asSubclass( FileCleanerDelegate.class); - @SuppressWarnings("unchecked") - T cleaner = (T) c.newInstance(); + FileCleanerDelegate cleaner = c.newInstance(); cleaner.setConf(conf); return cleaner; } catch (Exception e) { @@ -191,7 +191,7 @@ public abstract class CleanerChore extends Chore } // check each of the cleaners for the file - for (T cleaner : cleanersChain) { + for (FileCleanerDelegate cleaner : cleanersChain) { if (cleaner.isStopped() || this.stopper.isStopped()) { LOG.warn("A file cleaner" + this.getName() + " is stopped, won't delete any file in:" + this.oldFileDir); @@ -217,7 +217,7 @@ public abstract class CleanerChore extends Chore @Override public void cleanup() { - for (T lc : this.cleanersChain) { + for (FileCleanerDelegate lc : this.cleanersChain) { try { lc.stop("Exiting"); } catch (Throwable t) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java index a25114f..8b73586 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.regionserver.StoreFile; * folder that are deletable for each HFile cleaner in the chain. */ @InterfaceAudience.Private -public class HFileCleaner extends CleanerChore { +public class HFileCleaner extends CleanerChore { public static final String MASTER_HFILE_CLEANER_PLUGINS = "hbase.master.hfilecleaner.plugins"; @@ -58,7 +58,7 @@ public class HFileCleaner extends CleanerChore { /** * Exposed for TESTING! */ - public List getDelegatesForTesting() { + public List getDelegatesForTesting() { return this.cleanersChain; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java index 3aedfea..5b61f44 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java @@ -26,7 +26,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Stoppable; -import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogUtil; /** @@ -35,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.wal.HLogUtil; * @see BaseLogCleanerDelegate */ @InterfaceAudience.Private -public class LogCleaner extends CleanerChore { +public class LogCleaner extends CleanerChore { static final Log LOG = LogFactory.getLog(LogCleaner.class.getName()); /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java index 7888aba..2676081 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java @@ -36,8 +36,6 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hbase.ServerName; -import org.apache.hadoop.hbase.regionserver.wal.HLog.Reader; -import org.apache.hadoop.hbase.regionserver.wal.HLog.Writer; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.HConstants; @@ -56,6 +54,14 @@ public class HLogUtil { return Bytes.equals(HLog.METAFAMILY, family); } + /** + * Returns the class whose name is the string value + * for the key "hbase.regionserver.hlog.keyclass" in the given {@code conf}, + * or returns {@code HLogKey.class} if the value is null. + * Note that it is possible that the return class is not a subclass of {@code HLogKey}. + * + * @throws RuntimeException if the name provided by {@code conf} is not a valid name of a class + */ @SuppressWarnings("unchecked") public static Class getKeyClass(Configuration conf) { return (Class) conf.getClass( @@ -63,13 +69,11 @@ public class HLogUtil { } public static HLogKey newKey(Configuration conf) throws IOException { - Class keyClass = getKeyClass(conf); try { + Class keyClass = getKeyClass(conf).asSubclass(HLogKey.class); return keyClass.newInstance(); - } catch (InstantiationException e) { - throw new IOException("cannot create hlog key"); - } catch (IllegalAccessException e) { - throw new IOException("cannot create hlog key"); + } catch (Exception e) { + throw new IOException("cannot create hlog key", e); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java index 7448f09..98fb005 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java @@ -218,10 +218,8 @@ public class SequenceFileLogReader implements HLog.Reader { } else { try { key = keyClass.newInstance(); - } catch (InstantiationException ie) { - throw new IOException(ie); - } catch (IllegalAccessException iae) { - throw new IOException(iae); + } catch (Exception ex) { + throw new IOException(ex); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java index 85c30a5..181edda 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java @@ -325,10 +325,12 @@ public class ReplicationSourceManager { final String peerId) throws IOException { ReplicationSourceInterface src; try { - @SuppressWarnings("rawtypes") - Class c = Class.forName(conf.get("replication.replicationsource.implementation", - ReplicationSource.class.getCanonicalName())); - src = (ReplicationSourceInterface) c.newInstance(); + String className = conf.get( + "replication.replicationsource.implementation", + ReplicationSource.class.getCanonicalName()); + Class c = + Class.forName(className).asSubclass(ReplicationSourceInterface.class); + src = c.newInstance(); } catch (Exception e) { LOG.warn("Passed replication source implementation throws errors, " + "defaulting to ReplicationSource", e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java index 6114af2..2e12b6b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java @@ -61,28 +61,26 @@ public class ProtobufMessageBodyConsumer Annotation[] annotations, MediaType mediaType, MultivaluedMap httpHeaders, InputStream inputStream) throws IOException, WebApplicationException { - ProtobufMessageHandler obj = null; + ProtobufMessageHandler obj; try { obj = type.newInstance(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - byte[] buffer = new byte[4096]; - int read; - do { - read = inputStream.read(buffer, 0, buffer.length); - if (read > 0) { - baos.write(buffer, 0, read); - } - } while (read > 0); - if (LOG.isDebugEnabled()) { - LOG.debug(getClass() + ": read " + baos.size() + " bytes from " + - inputStream); - } - obj = obj.getObjectFromMessage(baos.toByteArray()); - } catch (InstantiationException e) { - throw new WebApplicationException(e); - } catch (IllegalAccessException e) { + } catch(Exception e) { throw new WebApplicationException(e); } - return obj; + + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + byte[] buffer = new byte[4096]; + int read; + do { + read = inputStream.read(buffer, 0, buffer.length); + if (read > 0) { + baos.write(buffer, 0, read); + } + } while (read > 0); + if (LOG.isDebugEnabled()) { + LOG.debug(getClass() + ": read " + baos.size() + " bytes from " + + inputStream); + } + return obj.getObjectFromMessage(baos.toByteArray()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java index 529ad90..7fd6374 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate; +import org.apache.hadoop.hbase.master.cleaner.FileCleanerDelegate; import org.apache.hadoop.hbase.master.cleaner.HFileCleaner; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.Store; @@ -161,7 +162,7 @@ public class TestZooKeeperTableArchiveClient { // setup the delegate Stoppable stop = new StoppableImplementation(); HFileCleaner cleaner = setupAndCreateCleaner(conf, fs, archiveDir, stop); - List cleaners = turnOnArchiving(STRING_TABLE_NAME, cleaner); + List cleaners = turnOnArchiving(STRING_TABLE_NAME, cleaner); final LongTermArchivingHFileCleaner delegate = (LongTermArchivingHFileCleaner) cleaners.get(0); // create the region @@ -210,7 +211,7 @@ public class TestZooKeeperTableArchiveClient { // setup the delegate Stoppable stop = new StoppableImplementation(); HFileCleaner cleaner = setupAndCreateCleaner(conf, fs, archiveDir, stop); - List cleaners = turnOnArchiving(STRING_TABLE_NAME, cleaner); + List cleaners = turnOnArchiving(STRING_TABLE_NAME, cleaner); final LongTermArchivingHFileCleaner delegate = (LongTermArchivingHFileCleaner) cleaners.get(0); // create the region @@ -303,7 +304,7 @@ public class TestZooKeeperTableArchiveClient { * @throws IOException on failure * @throws KeeperException on failure */ - private List turnOnArchiving(String tableName, HFileCleaner cleaner) + private List turnOnArchiving(String tableName, HFileCleaner cleaner) throws IOException, KeeperException { // turn on hfile retention LOG.debug("----Starting archiving for table:" + tableName); @@ -311,7 +312,7 @@ public class TestZooKeeperTableArchiveClient { assertTrue("Archving didn't get turned on", archivingClient.getArchivingEnabled(tableName)); // wait for the archiver to get the notification - List cleaners = cleaner.getDelegatesForTesting(); + List cleaners = cleaner.getDelegatesForTesting(); LongTermArchivingHFileCleaner delegate = (LongTermArchivingHFileCleaner) cleaners.get(0); while (!delegate.archiveTracker.keepHFiles(STRING_TABLE_NAME)) { // spin until propagation - should be fast @@ -326,7 +327,7 @@ public class TestZooKeeperTableArchiveClient { * least the expected number of times. */ private CountDownLatch setupCleanerWatching(LongTermArchivingHFileCleaner cleaner, - List cleaners, final int expected) { + List cleaners, final int expected) { // replace the cleaner with one that we can can check BaseHFileCleanerDelegate delegateSpy = Mockito.spy(cleaner); final int[] counter = new int[] { 0 }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java index 8503caf..d6cd6b2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java @@ -225,7 +225,7 @@ public class TestCleanerChore { Mockito.reset(spy); } - private static class AllValidPaths extends CleanerChore { + private static class AllValidPaths extends CleanerChore { public AllValidPaths(String name, Stoppable s, Configuration conf, FileSystem fs, Path oldFileDir, String confkey) {