diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java index 4db0b7f..4821b0f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.io.encoding; import java.io.IOException; import java.io.OutputStream; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -32,32 +33,93 @@ import org.apache.hadoop.hbase.util.Bytes; */ @InterfaceAudience.Private public enum DataBlockEncoding { + // Lazy initialization applied for encoders. + // Use the lazy initialization holder class idiom + // and avoid synchronization problems. /** Disable data block encoding. */ - NONE(0, null), + NONE(0) { + @Override + public DataBlockEncoder getEncoder() { return null; } + }, + // id 1 is reserved for the BITSET algorithm to be added later - PREFIX(2, "org.apache.hadoop.hbase.io.encoding.PrefixKeyDeltaEncoder"), - DIFF(3, "org.apache.hadoop.hbase.io.encoding.DiffKeyDeltaEncoder"), - FAST_DIFF(4, "org.apache.hadoop.hbase.io.encoding.FastDiffDeltaEncoder"), - // id 5 is reserved for the COPY_KEY algorithm for benchmarking - // COPY_KEY(5, "org.apache.hadoop.hbase.io.encoding.CopyKeyDataBlockEncoder"), - PREFIX_TREE(6, "org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec"); + + PREFIX(2) { + @Override + public DataBlockEncoder getEncoder() { return PrefixEncoderHolder.encoder; } + }, + + DIFF(3) { + @Override + public DataBlockEncoder getEncoder() { return DiffEncoderHolder.encoder; } + }, + + FAST_DIFF(4) { + @Override + public DataBlockEncoder getEncoder() { return FastDiffEncoderHolder.encoder; } + }, + +// // id 5 is reserved for the COPY_KEY algorithm for benchmarking +// COPY_KEY(5) { +// @Override +// public DataBlockEncoder getEncoder() { return CopyKeyEncoderHolder.encoder; } +// }, + + PREFIX_TREE(6) { + @Override + public DataBlockEncoder getEncoder() { return PrefixTreeEncoderHolder.encoder; } + }; + + private static DataBlockEncoder getEncoder(String encoderClassName) { + try { + return (DataBlockEncoder) Class.forName(encoderClassName).newInstance(); + } catch(Exception e) { + AssertionError ae = new AssertionError( + "Failed to instantiate the class: " + encoderClassName); + ae.initCause(e); + throw ae; + } + } + + private static class PrefixEncoderHolder { + static final DataBlockEncoder encoder = getEncoder( + "org.apache.hadoop.hbase.io.encoding.PrefixKeyDeltaEncoder"); + } + + private static class DiffEncoderHolder { + static final DataBlockEncoder encoder = getEncoder( + "org.apache.hadoop.hbase.io.encoding.DiffKeyDeltaEncoder"); + } + + private static class FastDiffEncoderHolder { + static final DataBlockEncoder encoder = getEncoder( + "org.apache.hadoop.hbase.io.encoding.FastDiffDeltaEncoder"); + } + +// private static class CopyKeyEncoderHolder { +// static final DataBlockEncoder encoder = getEncoder( +// "org.apache.hadoop.hbase.io.encoding.CopyKeyDataBlockEncoder"); +// } + + private static class PrefixTreeEncoderHolder { + static final DataBlockEncoder encoder = getEncoder( + "org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec"); + } private final short id; private final byte[] idInBytes; - private DataBlockEncoder encoder; - private final String encoderCls; public static final int ID_SIZE = Bytes.SIZEOF_SHORT; - /** Maps data block encoding ids to enum instances. */ - private static Map idToEncoding = - new HashMap(); + /** Maps data block encoding ids to enum instances, unmodifiable */ + private static Map idToEncoding = createIdToEncoding(); - static { + private static Map createIdToEncoding() { + Map idToEncoding = new HashMap(); for (DataBlockEncoding algo : values()) { if (idToEncoding.containsKey(algo.id)) { - throw new RuntimeException(String.format( + throw new AssertionError(String.format( "Two data block encoder algorithms '%s' and '%s' have " + "the same id %d", idToEncoding.get(algo.id).toString(), algo.toString(), @@ -65,22 +127,27 @@ public enum DataBlockEncoding { } idToEncoding.put(algo.id, algo); } + return Collections.unmodifiableMap(idToEncoding); } - private DataBlockEncoding(int id, String encoderClsName) { + private static short validateId(int id) { if (id < Short.MIN_VALUE || id > Short.MAX_VALUE) { throw new AssertionError( - "Data block encoding algorithm id is out of range: " + id); + "Data block encoding algorithm id is out of range: " + id); } - this.id = (short) id; + return (short)id; + } + + DataBlockEncoding(int id) { + this.id = validateId(id); this.idInBytes = Bytes.toBytes(this.id); + if (idInBytes.length != ID_SIZE) { - // White this may seem redundant, if we accidentally serialize + // While this may seem redundant, if we accidentally serialize // the id as e.g. an int instead of a short, all encoders will break. - throw new RuntimeException("Unexpected length of encoder ID byte " + + throw new AssertionError("Unexpected length of encoder ID byte " + "representation: " + Bytes.toStringBinary(idInBytes)); } - this.encoderCls = encoderClsName; } /** @@ -105,7 +172,6 @@ public enum DataBlockEncoding { stream.write(idInBytes); } - /** * Writes id bytes to the given array starting from offset. * @@ -122,13 +188,7 @@ public enum DataBlockEncoding { * @return data block encoder if algorithm is specified, null if none is * selected. */ - public DataBlockEncoder getEncoder() { - if (encoder == null && id != 0) { - // lazily create the encoder - encoder = createEncoder(encoderCls); - } - return encoder; - } + public abstract DataBlockEncoder getEncoder(); /** * Find and create data block encoder for given id; @@ -177,17 +237,4 @@ public enum DataBlockEncoding { public static DataBlockEncoding getEncodingById(short dataBlockEncodingId) { return idToEncoding.get(dataBlockEncodingId); } - - protected static DataBlockEncoder createEncoder(String fullyQualifiedClassName){ - try { - return (DataBlockEncoder)Class.forName(fullyQualifiedClassName).newInstance(); - } catch (InstantiationException e) { - throw new RuntimeException(e); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } catch (ClassNotFoundException e) { - throw new IllegalArgumentException(e); - } - } - } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java index 328603a..204407b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java @@ -591,20 +591,26 @@ public final class Constraints { continue; } + // add the constraint, now that we expect it to be valid. + + Class clazz; try { - // add the constraint, now that we expect it to be valid. - Class clazz = classloader.loadClass(key) - .asSubclass(Constraint.class); - Constraint constraint = clazz.newInstance(); - constraint.setConf(conf); - constraints.add(constraint); - } catch (ClassNotFoundException e1) { - throw new IOException(e1); - } catch (InstantiationException e1) { - throw new IOException(e1); - } catch (IllegalAccessException e1) { - throw new IOException(e1); + clazz = classloader.loadClass(key).asSubclass(Constraint.class); + } catch (ClassCastException ex) { + throw new IOException(ex); + } catch (ClassNotFoundException ex) { + throw new IOException(ex); } + + Constraint constraint; + try { + constraint = clazz.newInstance(); + } catch (Exception ex) { + throw new IOException(ex); + } + + constraint.setConf(conf); + constraints.add(constraint); } } // sort them, based on the priorities diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index 780da4a..d74b931 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -109,6 +109,8 @@ extends AggregateService implements CoprocessorService, Coprocessor { } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -161,6 +163,8 @@ extends AggregateService implements CoprocessorService, Coprocessor { } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -215,6 +219,8 @@ extends AggregateService implements CoprocessorService, Coprocessor { } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -329,6 +335,8 @@ extends AggregateService implements CoprocessorService, Coprocessor { } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -394,6 +402,8 @@ extends AggregateService implements CoprocessorService, Coprocessor { } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -459,6 +469,8 @@ extends AggregateService implements CoprocessorService, Coprocessor { response = pair.build(); } catch (IOException e) { ResponseConverter.setControllerException(controller, e); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); } finally { if (scanner != null) { try { @@ -469,27 +481,26 @@ extends AggregateService implements CoprocessorService, Coprocessor { done.run(response); } - @SuppressWarnings("unchecked") - ColumnInterpreter constructColumnInterpreterFromRequest( - AggregateArgument request) throws IOException { + /** + * Catch ClassCastException whenever you use the return instance. + */ + @SuppressWarnings({ "rawtypes", "unchecked" }) + private static + ColumnInterpreter constructColumnInterpreterFromRequest( + AggregateArgument request) throws Exception { + String className = request.getInterpreterClassName(); - Class cls; - try { - cls = Class.forName(className); - ColumnInterpreter ci = (ColumnInterpreter) cls.newInstance(); - if (request.hasInterpreterSpecificBytes()) { - ByteString b = request.getInterpreterSpecificBytes(); - P initMsg = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 2, b); - ci.initialize(initMsg); - } - return ci; - } catch (ClassNotFoundException e) { - throw new IOException(e); - } catch (InstantiationException e) { - throw new IOException(e); - } catch (IllegalAccessException e) { - throw new IOException(e); + + Class cls = + Class.forName(className).asSubclass(ColumnInterpreter.class); + + ColumnInterpreter ci = cls.newInstance(); + if (request.hasInterpreterSpecificBytes()) { + ByteString b = request.getInterpreterSpecificBytes(); + P initMsg = (P)ProtobufUtil.getParsedGenericInstance(ci.getClass(), 2, b); + ci.initialize(initMsg); } + return ci; } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java index 02d768d..f0c5646 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.coprocessor; import java.io.IOException; -import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import org.apache.hadoop.classification.InterfaceAudience; @@ -73,6 +72,8 @@ extends RowProcessorService implements CoprocessorService, Coprocessor { RowProcessorResult.Builder b = RowProcessorResult.newBuilder(); b.setRowProcessorResult(result.toByteString()); resultProto = b.build(); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); } catch (Exception e) { ResponseConverter.setControllerException(controller, new IOException(e)); } @@ -108,42 +109,32 @@ extends RowProcessorService implements CoprocessorService, Coprocessor { // nothing to do } - @SuppressWarnings("unchecked") - RowProcessor constructRowProcessorFromRequest(RowProcessorRequest request) - throws IOException { + /** + * Catch ClassCastException whenever you use the return instance. + */ + @SuppressWarnings({ "rawtypes", "unchecked" }) + private static + RowProcessor constructRowProcessorFromRequest(RowProcessorRequest request) + throws Exception { + String className = request.getRowProcessorClassName(); - Class cls; - try { - cls = Class.forName(className); - RowProcessor ci = (RowProcessor) cls.newInstance(); - if (request.hasRowProcessorInitializerMessageName()) { - Class imn = Class.forName(request.getRowProcessorInitializerMessageName()) - .asSubclass(Message.class); - Method m; - try { - m = imn.getMethod("parseFrom", ByteString.class); - } catch (SecurityException e) { - throw new IOException(e); - } catch (NoSuchMethodException e) { - throw new IOException(e); - } - S s; - try { - s = (S)m.invoke(null,request.getRowProcessorInitializerMessage()); - } catch (IllegalArgumentException e) { - throw new IOException(e); - } catch (InvocationTargetException e) { - throw new IOException(e); - } - ci.initialize(s); - } - return ci; - } catch (ClassNotFoundException e) { - throw new IOException(e); - } catch (InstantiationException e) { - throw new IOException(e); - } catch (IllegalAccessException e) { - throw new IOException(e); + + Class cls = + Class.forName(className).asSubclass(RowProcessor.class); + + RowProcessor ci = cls.newInstance(); + + if (request.hasRowProcessorInitializerMessageName()) { + Class imn = + Class.forName(request.getRowProcessorInitializerMessageName()) + .asSubclass(Message.class); + + Method imnMethod = imn.getMethod("parseFrom", ByteString.class); + + S msg = (S) imnMethod.invoke(null, request.getRowProcessorInitializerMessage()); + ci.initialize(msg); // might throw ClassCastException } + + return ci; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java index 6d3b86e..58ef48c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java @@ -313,13 +313,9 @@ public abstract class CoprocessorHost { // create the instance Coprocessor impl; - Object o = null; try { - o = implClass.newInstance(); - impl = (Coprocessor)o; - } catch (InstantiationException e) { - throw new IOException(e); - } catch (IllegalAccessException e) { + impl = (Coprocessor) implClass.newInstance(); + } catch (Exception e) { throw new IOException(e); } // create the environment diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java index d8f7090..fb7b120 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java @@ -597,24 +597,26 @@ public class FixedFileTrailer { } @SuppressWarnings("unchecked") + // This unchecked cast might cause RawComparator.compare(byte[], byte[]) + // to throw ClassCastException, which the method specification allows. private static Class> getComparatorClass( String comparatorClassName) throws IOException { try { return (Class>) - Class.forName(comparatorClassName); + Class.forName(comparatorClassName).asSubclass(RawComparator.class); } catch (ClassNotFoundException ex) { throw new IOException(ex); + } catch (ClassCastException ex) { + throw new IOException(ex); } } public static RawComparator createComparator( String comparatorClassName) throws IOException { + Class> clazz = getComparatorClass(comparatorClassName); try { - return getComparatorClass(comparatorClassName).newInstance(); - } catch (InstantiationException e) { - throw new IOException("Comparator class " + comparatorClassName + - " is not instantiable", e); - } catch (IllegalAccessException e) { + return clazz.newInstance(); + } catch (Exception e) { throw new IOException("Comparator class " + comparatorClassName + " is not instantiable", e); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java index 863456c..84d5d1a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java @@ -106,9 +106,7 @@ public class ClusterStatusPublisher extends Chore { this.messagePeriod = conf.getInt(STATUS_PUBLISH_PERIOD, DEFAULT_STATUS_PUBLISH_PERIOD); try { this.publisher = publisherClass.newInstance(); - } catch (InstantiationException e) { - throw new IOException("Can't create publisher " + publisherClass.getName(), e); - } catch (IllegalAccessException e) { + } catch (Exception e) { throw new IOException("Can't create publisher " + publisherClass.getName(), e); } this.publisher.connect(conf); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 04a735f..78df6a9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -36,14 +36,14 @@ import org.apache.hadoop.hbase.util.FSUtils; * Abstract Cleaner that uses a chain of delegates to clean a directory of files * @param Cleaner delegate class that is dynamically loaded from configuration */ -public abstract class CleanerChore extends Chore { +public abstract class CleanerChore extends Chore { private static final Log LOG = LogFactory.getLog(CleanerChore.class.getName()); private final FileSystem fs; private final Path oldFileDir; private final Configuration conf; - protected List cleanersChain; + protected List cleanersChain; /** * @param name name of the chore being run @@ -77,11 +77,11 @@ public abstract class CleanerChore extends Chore * @param confKey key to get the file cleaner classes from the configuration */ private void initCleanerChain(String confKey) { - this.cleanersChain = new LinkedList(); + this.cleanersChain = new LinkedList(); String[] logCleaners = conf.getStrings(confKey); if (logCleaners != null) { for (String className : logCleaners) { - T logCleaner = newFileCleaner(className, conf); + FileCleanerDelegate logCleaner = newFileCleaner(className, conf); if (logCleaner != null) { LOG.debug("initialize cleaner=" + className); this.cleanersChain.add(logCleaner); @@ -91,18 +91,18 @@ public abstract class CleanerChore extends Chore } /** - * A utility method to create new instances of LogCleanerDelegate based on the class name of the - * LogCleanerDelegate. - * @param className fully qualified class name of the LogCleanerDelegate + * A utility method to create a new instance of a subclass of {@code FileCleanerDelegate} + * based on the given class name. + * + * @param className fully qualified class name of the subclass of {@code FileCleanerDelegate} * @param conf - * @return the new instance + * @return the new instance, or null if failed to create such an instance */ - public T newFileCleaner(String className, Configuration conf) { + public FileCleanerDelegate newFileCleaner(String className, Configuration conf) { try { Class c = Class.forName(className).asSubclass( FileCleanerDelegate.class); - @SuppressWarnings("unchecked") - T cleaner = (T) c.newInstance(); + FileCleanerDelegate cleaner = c.newInstance(); cleaner.setConf(conf); return cleaner; } catch (Exception e) { @@ -216,7 +216,7 @@ public abstract class CleanerChore extends Chore } // check each of the cleaners for the file - for (T cleaner : cleanersChain) { + for (FileCleanerDelegate cleaner : cleanersChain) { if (cleaner.isStopped() || this.stopper.isStopped()) { LOG.warn("A file cleaner" + this.getName() + " is stopped, won't delete any file in:" + this.oldFileDir); @@ -246,7 +246,7 @@ public abstract class CleanerChore extends Chore @Override public void cleanup() { - for (T lc : this.cleanersChain) { + for (FileCleanerDelegate lc : this.cleanersChain) { try { lc.stop("Exiting"); } catch (Throwable t) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java index 451e9db..e13f3e9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.regionserver.StoreFileInfo; * folder that are deletable for each HFile cleaner in the chain. */ @InterfaceAudience.Private -public class HFileCleaner extends CleanerChore { +public class HFileCleaner extends CleanerChore { public static final String MASTER_HFILE_CLEANER_PLUGINS = "hbase.master.hfilecleaner.plugins"; @@ -58,7 +58,7 @@ public class HFileCleaner extends CleanerChore { /** * Exposed for TESTING! */ - public List getDelegatesForTesting() { + public List getDelegatesForTesting() { return this.cleanersChain; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java index 3aedfea..5b61f44 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java @@ -26,7 +26,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Stoppable; -import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogUtil; /** @@ -35,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.wal.HLogUtil; * @see BaseLogCleanerDelegate */ @InterfaceAudience.Private -public class LogCleaner extends CleanerChore { +public class LogCleaner extends CleanerChore { static final Log LOG = LogFactory.getLog(LogCleaner.class.getName()); /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java index b2cd2f6..c2362d5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java @@ -49,6 +49,14 @@ public class HLogUtil { return Bytes.equals(HLog.METAFAMILY, family); } + /** + * Returns the class whose name is the string value + * for the key "hbase.regionserver.hlog.keyclass" in the given {@code conf}, + * or returns {@code HLogKey.class} if the value is null. + * Note that it is possible that the return class is not a subclass of {@code HLogKey}. + * + * @throws RuntimeException if the name provided by {@code conf} is not a valid name of a class + */ @SuppressWarnings("unchecked") public static Class getKeyClass(Configuration conf) { return (Class) conf.getClass( @@ -56,13 +64,11 @@ public class HLogUtil { } public static HLogKey newKey(Configuration conf) throws IOException { - Class keyClass = getKeyClass(conf); try { + Class keyClass = getKeyClass(conf).asSubclass(HLogKey.class); return keyClass.newInstance(); - } catch (InstantiationException e) { - throw new IOException("cannot create hlog key"); - } catch (IllegalAccessException e) { - throw new IOException("cannot create hlog key"); + } catch (Exception e) { + throw new IOException("cannot create hlog key", e); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java index fea92a3..bb89175 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java @@ -221,10 +221,8 @@ public class SequenceFileLogReader implements HLog.Reader { } else { try { key = keyClass.newInstance(); - } catch (InstantiationException ie) { - throw new IOException(ie); - } catch (IllegalAccessException iae) { - throw new IOException(iae); + } catch (Exception ex) { + throw new IOException(ex); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java index a615430..bc8a3c1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java @@ -328,10 +328,12 @@ public class ReplicationSourceManager { final String peerId) throws IOException { ReplicationSourceInterface src; try { - @SuppressWarnings("rawtypes") - Class c = Class.forName(conf.get("replication.replicationsource.implementation", - ReplicationSource.class.getCanonicalName())); - src = (ReplicationSourceInterface) c.newInstance(); + String className = conf.get( + "replication.replicationsource.implementation", + ReplicationSource.class.getCanonicalName()); + Class c = + Class.forName(className).asSubclass(ReplicationSourceInterface.class); + src = c.newInstance(); } catch (Exception e) { LOG.warn("Passed replication source implementation throws errors, " + "defaulting to ReplicationSource", e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java index 61d38b9..73183a0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java @@ -61,28 +61,26 @@ public class ProtobufMessageBodyConsumer Annotation[] annotations, MediaType mediaType, MultivaluedMap httpHeaders, InputStream inputStream) throws IOException, WebApplicationException { - ProtobufMessageHandler obj = null; + ProtobufMessageHandler obj; try { obj = type.newInstance(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - byte[] buffer = new byte[4096]; - int read; - do { - read = inputStream.read(buffer, 0, buffer.length); - if (read > 0) { - baos.write(buffer, 0, read); - } - } while (read > 0); - if (LOG.isDebugEnabled()) { - LOG.debug(getClass() + ": read " + baos.size() + " bytes from " + - inputStream); - } - obj = obj.getObjectFromMessage(baos.toByteArray()); - } catch (InstantiationException e) { - throw new WebApplicationException(e); - } catch (IllegalAccessException e) { + } catch(Exception e) { throw new WebApplicationException(e); } - return obj; + + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + byte[] buffer = new byte[4096]; + int read; + do { + read = inputStream.read(buffer, 0, buffer.length); + if (read > 0) { + baos.write(buffer, 0, read); + } + } while (read > 0); + if (LOG.isDebugEnabled()) { + LOG.debug(getClass() + ": read " + baos.size() + " bytes from " + + inputStream); + } + return obj.getObjectFromMessage(baos.toByteArray()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java index 529ad90..7fd6374 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate; +import org.apache.hadoop.hbase.master.cleaner.FileCleanerDelegate; import org.apache.hadoop.hbase.master.cleaner.HFileCleaner; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.Store; @@ -161,7 +162,7 @@ public class TestZooKeeperTableArchiveClient { // setup the delegate Stoppable stop = new StoppableImplementation(); HFileCleaner cleaner = setupAndCreateCleaner(conf, fs, archiveDir, stop); - List cleaners = turnOnArchiving(STRING_TABLE_NAME, cleaner); + List cleaners = turnOnArchiving(STRING_TABLE_NAME, cleaner); final LongTermArchivingHFileCleaner delegate = (LongTermArchivingHFileCleaner) cleaners.get(0); // create the region @@ -210,7 +211,7 @@ public class TestZooKeeperTableArchiveClient { // setup the delegate Stoppable stop = new StoppableImplementation(); HFileCleaner cleaner = setupAndCreateCleaner(conf, fs, archiveDir, stop); - List cleaners = turnOnArchiving(STRING_TABLE_NAME, cleaner); + List cleaners = turnOnArchiving(STRING_TABLE_NAME, cleaner); final LongTermArchivingHFileCleaner delegate = (LongTermArchivingHFileCleaner) cleaners.get(0); // create the region @@ -303,7 +304,7 @@ public class TestZooKeeperTableArchiveClient { * @throws IOException on failure * @throws KeeperException on failure */ - private List turnOnArchiving(String tableName, HFileCleaner cleaner) + private List turnOnArchiving(String tableName, HFileCleaner cleaner) throws IOException, KeeperException { // turn on hfile retention LOG.debug("----Starting archiving for table:" + tableName); @@ -311,7 +312,7 @@ public class TestZooKeeperTableArchiveClient { assertTrue("Archving didn't get turned on", archivingClient.getArchivingEnabled(tableName)); // wait for the archiver to get the notification - List cleaners = cleaner.getDelegatesForTesting(); + List cleaners = cleaner.getDelegatesForTesting(); LongTermArchivingHFileCleaner delegate = (LongTermArchivingHFileCleaner) cleaners.get(0); while (!delegate.archiveTracker.keepHFiles(STRING_TABLE_NAME)) { // spin until propagation - should be fast @@ -326,7 +327,7 @@ public class TestZooKeeperTableArchiveClient { * least the expected number of times. */ private CountDownLatch setupCleanerWatching(LongTermArchivingHFileCleaner cleaner, - List cleaners, final int expected) { + List cleaners, final int expected) { // replace the cleaner with one that we can can check BaseHFileCleanerDelegate delegateSpy = Mockito.spy(cleaner); final int[] counter = new int[] { 0 }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java index 9ab955c..99b3c76 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java @@ -285,7 +285,7 @@ public class TestCleanerChore { Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any(Path.class)); } - private static class AllValidPaths extends CleanerChore { + private static class AllValidPaths extends CleanerChore { public AllValidPaths(String name, Stoppable s, Configuration conf, FileSystem fs, Path oldFileDir, String confkey) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java index 9beb347..be3f063 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java @@ -347,9 +347,9 @@ public class TestSnapshotFromMaster { // make sure that we don't keep around the hfiles that aren't in a snapshot // make sure we wait long enough to refresh the snapshot hfile - List delegates = UTIL.getMiniHBaseCluster().getMaster() + List delegates = UTIL.getMiniHBaseCluster().getMaster() .getHFileCleaner().cleanersChain; - for (BaseHFileCleanerDelegate delegate: delegates) { + for (FileCleanerDelegate delegate: delegates) { if (delegate instanceof SnapshotHFileCleaner) { ((SnapshotHFileCleaner)delegate).getFileCacheForTesting().triggerCacheRefreshForTesting(); }