From 8f53933d0dfcceb243ae2cea2e5bdf7e839a0851 Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Mon, 12 Mar 2018 16:05:39 -0500 Subject: [PATCH] HBASE-20180 Avoid Class::newInstance --- .../hadoop/hbase/backup/BackupClientFactory.java | 5 +- hbase-build-configuration/pom.xml | 1 + .../hbase/coprocessor/AggregateImplementation.java | 15 +++--- .../hbase/mapreduce/MultithreadedTableMapper.java | 3 +- .../mapreduce/TableSnapshotInputFormatImpl.java | 17 +++--- .../hadoop/hbase/constraint/Constraints.java | 10 ++-- .../hadoop/hbase/master/MasterCoprocessorHost.java | 28 ++++++---- .../master/replication/ReplicationPeerManager.java | 4 +- .../hadoop/hbase/regionserver/RSRpcServices.java | 13 +++-- .../hbase/regionserver/RegionCoprocessorHost.java | 28 ++++++---- .../regionserver/RegionServerCoprocessorHost.java | 29 ++++++---- .../hbase/regionserver/wal/WALCoprocessorHost.java | 11 ++-- .../regionserver/ReplicationSource.java | 18 +++++-- .../regionserver/ReplicationSourceFactory.java | 5 +- .../hbase/coprocessor/TestCoprocessorHost.java | 62 ++++++++++++++-------- 15 files changed, 148 insertions(+), 101 deletions(-) diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupClientFactory.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupClientFactory.java index 4c962290e8..e3abb60399 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupClientFactory.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupClientFactory.java @@ -38,8 +38,9 @@ public final class BackupClientFactory { try { String clsName = conf.get(TableBackupClient.BACKUP_CLIENT_IMPL_CLASS); if (clsName != null) { - Class clientImpl = Class.forName(clsName); - TableBackupClient client = (TableBackupClient) clientImpl.newInstance(); + Class clientImpl; + clientImpl = Class.forName(clsName).asSubclass(TableBackupClient.class); + TableBackupClient client = clientImpl.getDeclaredConstructor().newInstance(); client.init(conn, backupId, request); return client; } diff --git a/hbase-build-configuration/pom.xml b/hbase-build-configuration/pom.xml index ac98044335..311b1c0394 100644 --- a/hbase-build-configuration/pom.xml +++ b/hbase-build-configuration/pom.xml @@ -82,6 +82,7 @@ -XepDisableWarningsInGeneratedCode -Xep:FallThrough:OFF + -Xep:ClassNewInstance:ERROR diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index 6beb3f66fd..c8badd3692 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -27,6 +27,7 @@ import com.google.protobuf.RpcController; import com.google.protobuf.Service; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; @@ -480,21 +481,19 @@ extends AggregateService implements RegionCoprocessor { ColumnInterpreter constructColumnInterpreterFromRequest( AggregateRequest request) throws IOException { String className = request.getInterpreterClassName(); - Class cls; try { - cls = Class.forName(className); - ColumnInterpreter ci = (ColumnInterpreter) cls.newInstance(); + ColumnInterpreter ci; + Class cls = Class.forName(className); + ci = (ColumnInterpreter) cls.getDeclaredConstructor().newInstance(); + if (request.hasInterpreterSpecificBytes()) { ByteString b = request.getInterpreterSpecificBytes(); P initMsg = getParsedGenericInstance(ci.getClass(), 2, b); ci.initialize(initMsg); } return ci; - } catch (ClassNotFoundException e) { - throw new IOException(e); - } catch (InstantiationException e) { - throw new IOException(e); - } catch (IllegalAccessException e) { + } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | + NoSuchMethodException | InvocationTargetException e) { throw new IOException(e); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java index 626deffda4..5cd87ae6a5 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java @@ -281,7 +281,8 @@ public class MultithreadedTableMapper extends TableMapper { outer.getInputSplit()); Class wrappedMapperClass = Class.forName("org.apache.hadoop.mapreduce.lib.map.WrappedMapper"); Method getMapContext = wrappedMapperClass.getMethod("getMapContext", MapContext.class); - subcontext = (Context) getMapContext.invoke(wrappedMapperClass.newInstance(), mc); + subcontext = (Context) getMapContext.invoke( + wrappedMapperClass.getDeclaredConstructor().newInstance(), mc); } catch (Exception ee) { // FindBugs: REC_CATCH_EXCEPTION // rethrow as IOE throw new IOException(e); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java index add0985fde..1795ce195a 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java @@ -22,6 +22,7 @@ import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; import java.util.UUID; @@ -300,17 +301,11 @@ public class TableSnapshotInputFormatImpl { if (splitAlgoClassName == null) return null; try { - return ((Class) - Class.forName(splitAlgoClassName)).newInstance(); - } catch (ClassNotFoundException e) { - throw new IOException("SplitAlgo class " + splitAlgoClassName + - " is not found", e); - } catch (InstantiationException e) { - throw new IOException("SplitAlgo class " + splitAlgoClassName + - " is not instantiable", e); - } catch (IllegalAccessException e) { - throw new IOException("SplitAlgo class " + splitAlgoClassName + - " is not instantiable", e); + return Class.forName(splitAlgoClassName).asSubclass(RegionSplitter.SplitAlgorithm.class) + .getDeclaredConstructor().newInstance(); + } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | + NoSuchMethodException | InvocationTargetException e) { + throw new IOException("SplitAlgo class " + splitAlgoClassName + " is not found", e); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java index 426e516153..759d2520b0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java @@ -21,6 +21,7 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -593,14 +594,11 @@ public final class Constraints { // add the constraint, now that we expect it to be valid. Class clazz = classloader.loadClass(key) .asSubclass(Constraint.class); - Constraint constraint = clazz.newInstance(); + Constraint constraint = clazz.getDeclaredConstructor().newInstance(); constraint.setConf(conf); constraints.add(constraint); - } catch (ClassNotFoundException e1) { - throw new IOException(e1); - } catch (InstantiationException e1) { - throw new IOException(e1); - } catch (IllegalAccessException e1) { + } catch (InvocationTargetException | NoSuchMethodException | ClassNotFoundException | + InstantiationException | IllegalAccessException e1) { throw new IOException(e1); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java index 7f17227d4c..8c8c02c01d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.master; import com.google.protobuf.Service; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.util.List; import java.util.Set; import org.apache.hadoop.conf.Configuration; @@ -170,17 +171,22 @@ public class MasterCoprocessorHost @Override public MasterCoprocessor checkAndGetInstance(Class implClass) throws InstantiationException, IllegalAccessException { - if (MasterCoprocessor.class.isAssignableFrom(implClass)) { - return (MasterCoprocessor)implClass.newInstance(); - } else if (CoprocessorService.class.isAssignableFrom(implClass)) { - // For backward compatibility with old CoprocessorService impl which don't extend - // MasterCoprocessor. - return new CoprocessorServiceBackwardCompatiblity.MasterCoprocessorService( - (CoprocessorService)implClass.newInstance()); - } else { - LOG.error(implClass.getName() + " is not of type MasterCoprocessor. Check the " - + "configuration " + CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY); - return null; + try { + if (MasterCoprocessor.class.isAssignableFrom(implClass)) { + return implClass.asSubclass(MasterCoprocessor.class).getDeclaredConstructor().newInstance(); + } else if (CoprocessorService.class.isAssignableFrom(implClass)) { + // For backward compatibility with old CoprocessorService impl which don't extend + // MasterCoprocessor. + CoprocessorService cs; + cs = implClass.asSubclass(CoprocessorService.class).getDeclaredConstructor().newInstance(); + return new CoprocessorServiceBackwardCompatiblity.MasterCoprocessorService(cs); + } else { + LOG.error("{} is not of type MasterCoprocessor. Check the configuration of {}", + implClass.getName(), CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY); + return null; + } + } catch (NoSuchMethodException | InvocationTargetException e) { + throw (InstantiationException) new InstantiationException(implClass.getName()).initCause(e); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java index 76206385f1..1e933730d4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java @@ -310,10 +310,10 @@ public class ReplicationPeerManager { String[] filters = filterCSV.split(","); for (String filter : filters) { try { - Class.forName(filter).newInstance(); + Class.forName(filter).getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new DoNotRetryIOException("Configured WALEntryFilter " + filter + - " could not be created. Failing add/update " + "peer operation.", e); + " could not be created. Failing add/update peer operation.", e); } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 3efea4e9a3..c559823bd5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InterruptedIOException; +import java.lang.reflect.InvocationTargetException; import java.net.BindException; import java.net.InetSocketAddress; import java.net.UnknownHostException; @@ -1185,11 +1186,13 @@ public class RSRpcServices implements HBaseRPCErrorHandler, rowSizeWarnThreshold = rs.conf.getInt(BATCH_ROWS_THRESHOLD_NAME, BATCH_ROWS_THRESHOLD_DEFAULT); RpcSchedulerFactory rpcSchedulerFactory; try { - Class rpcSchedulerFactoryClass = rs.conf.getClass( + Class cls = rs.conf.getClass( REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS, SimpleRpcSchedulerFactory.class); - rpcSchedulerFactory = ((RpcSchedulerFactory) rpcSchedulerFactoryClass.newInstance()); - } catch (InstantiationException | IllegalAccessException e) { + rpcSchedulerFactory = cls.asSubclass(RpcSchedulerFactory.class) + .getDeclaredConstructor().newInstance(); + } catch (NoSuchMethodException | InvocationTargetException | + InstantiationException | IllegalAccessException e) { throw new IllegalArgumentException(e); } // Server to handle client requests. @@ -3549,8 +3552,8 @@ public class RSRpcServices implements HBaseRPCErrorHandler, for (RemoteProcedureRequest req : request.getProcList()) { RSProcedureCallable callable; try { - callable = - Class.forName(req.getProcClass()).asSubclass(RSProcedureCallable.class).newInstance(); + callable = Class.forName(req.getProcClass()).asSubclass(RSProcedureCallable.class) + .getDeclaredConstructor().newInstance(); } catch (Exception e) { regionServer.remoteProcedureComplete(req.getProcId(), e); continue; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index 47b389af11..b35f4fb0f1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -22,6 +22,7 @@ import com.google.protobuf.Message; import com.google.protobuf.Service; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -465,17 +466,22 @@ public class RegionCoprocessorHost @Override public RegionCoprocessor checkAndGetInstance(Class implClass) throws InstantiationException, IllegalAccessException { - if (RegionCoprocessor.class.isAssignableFrom(implClass)) { - return (RegionCoprocessor)implClass.newInstance(); - } else if (CoprocessorService.class.isAssignableFrom(implClass)) { - // For backward compatibility with old CoprocessorService impl which don't extend - // RegionCoprocessor. - return new CoprocessorServiceBackwardCompatiblity.RegionCoprocessorService( - (CoprocessorService)implClass.newInstance()); - } else { - LOG.error(implClass.getName() + " is not of type RegionCoprocessor. Check the " - + "configuration " + CoprocessorHost.REGION_COPROCESSOR_CONF_KEY); - return null; + try { + if (RegionCoprocessor.class.isAssignableFrom(implClass)) { + return implClass.asSubclass(RegionCoprocessor.class).getDeclaredConstructor().newInstance(); + } else if (CoprocessorService.class.isAssignableFrom(implClass)) { + // For backward compatibility with old CoprocessorService impl which don't extend + // RegionCoprocessor. + CoprocessorService cs; + cs = implClass.asSubclass(CoprocessorService.class).getDeclaredConstructor().newInstance(); + return new CoprocessorServiceBackwardCompatiblity.RegionCoprocessorService(cs); + } else { + LOG.error("{} is not of type RegionCoprocessor. Check the configuration of {}", + implClass.getName(), CoprocessorHost.REGION_COPROCESSOR_CONF_KEY); + return null; + } + } catch (NoSuchMethodException | InvocationTargetException e) { + throw (InstantiationException) new InstantiationException(implClass.getName()).initCause(e); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java index f4122cef2b..42a4e00e8e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import com.google.protobuf.Service; @@ -82,17 +83,23 @@ public class RegionServerCoprocessorHost extends @Override public RegionServerCoprocessor checkAndGetInstance(Class implClass) throws InstantiationException, IllegalAccessException { - if (RegionServerCoprocessor.class.isAssignableFrom(implClass)) { - return (RegionServerCoprocessor)implClass.newInstance(); - } else if (SingletonCoprocessorService.class.isAssignableFrom(implClass)) { - // For backward compatibility with old CoprocessorService impl which don't extend - // RegionCoprocessor. - return new CoprocessorServiceBackwardCompatiblity.RegionServerCoprocessorService( - (SingletonCoprocessorService)implClass.newInstance()); - } else { - LOG.error(implClass.getName() + " is not of type RegionServerCoprocessor. Check the " - + "configuration " + CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY); - return null; + try { + if (RegionServerCoprocessor.class.isAssignableFrom(implClass)) { + return implClass.asSubclass(RegionServerCoprocessor.class).getDeclaredConstructor() + .newInstance(); + } else if (SingletonCoprocessorService.class.isAssignableFrom(implClass)) { + // For backward compatibility with old CoprocessorService impl which don't extend + // RegionCoprocessor. + SingletonCoprocessorService tmp = implClass.asSubclass(SingletonCoprocessorService.class) + .getDeclaredConstructor().newInstance(); + return new CoprocessorServiceBackwardCompatiblity.RegionServerCoprocessorService(tmp); + } else { + LOG.error("{} is not of type RegionServerCoprocessor. Check the configuration of {}", + implClass.getName(), CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY); + return null; + } + } catch (NoSuchMethodException | InvocationTargetException e) { + throw (InstantiationException) new InstantiationException(implClass.getName()).initCause(e); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java index 7b6182e6b6..40d6d0fc94 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -118,10 +119,14 @@ public class WALCoprocessorHost } @Override - public WALCoprocessor checkAndGetInstance(Class implClass) - throws InstantiationException, IllegalAccessException { + public WALCoprocessor checkAndGetInstance(Class implClass) throws IllegalAccessException, + InstantiationException { if (WALCoprocessor.class.isAssignableFrom(implClass)) { - return (WALCoprocessor)implClass.newInstance(); + try { + return implClass.asSubclass(WALCoprocessor.class).getDeclaredConstructor().newInstance(); + } catch (NoSuchMethodException | InvocationTargetException e) { + throw (InstantiationException) new InstantiationException(implClass.getName()).initCause(e); + } } else { LOG.error(implClass.getName() + " is not of type WALCoprocessor. Check the " + "configuration " + CoprocessorHost.WAL_COPROCESSOR_CONF_KEY); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java index 80d08ef66f..348091951e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.replication.regionserver; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; @@ -242,12 +243,21 @@ public class ReplicationSource implements ReplicationSourceInterface { rsServerHost = ((HRegionServer) server).getRegionServerCoprocessorHost(); } String replicationEndpointImpl = replicationPeer.getPeerConfig().getReplicationEndpointImpl(); + + ReplicationEndpoint replicationEndpoint; if (replicationEndpointImpl == null) { - // Default to HBase inter-cluster replication endpoint - replicationEndpointImpl = HBaseInterClusterReplicationEndpoint.class.getName(); + // Default to HBase inter-cluster replication endpoint; skip reflection + replicationEndpoint = new HBaseInterClusterReplicationEndpoint(); + } else { + try { + replicationEndpoint = Class.forName(replicationEndpointImpl) + .asSubclass(ReplicationEndpoint.class) + .getDeclaredConstructor() + .newInstance(); + } catch (NoSuchMethodException | InvocationTargetException e) { + throw new IllegalArgumentException(e); + } } - ReplicationEndpoint replicationEndpoint = - Class.forName(replicationEndpointImpl).asSubclass(ReplicationEndpoint.class).newInstance(); if (rsServerHost != null) { ReplicationEndpoint newReplicationEndPoint = rsServerHost.postCreateReplicationEndPoint(replicationEndpoint); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java index 93e83318d7..d613049d38 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java @@ -40,10 +40,9 @@ public class ReplicationSourceFactory { String defaultReplicationSourceImpl = isQueueRecovered ? RecoveredReplicationSource.class.getCanonicalName() : ReplicationSource.class.getCanonicalName(); - @SuppressWarnings("rawtypes") - Class c = Class.forName( + Class c = Class.forName( conf.get("replication.replicationsource.implementation", defaultReplicationSourceImpl)); - src = (ReplicationSourceInterface) c.newInstance(); + src = c.asSubclass(ReplicationSourceInterface.class).getDeclaredConstructor().newInstance(); } catch (Exception e) { LOG.warn("Passed replication source implementation throws errors, " + "defaulting to ReplicationSource", diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorHost.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorHost.java index c541647e19..3cf2221619 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorHost.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorHost.java @@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.coprocessor; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import java.lang.reflect.InvocationTargetException; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.Coprocessor; @@ -56,47 +58,61 @@ public class TestCoprocessorHost { return this.aborted; } } + @Test public void testDoubleLoadingAndPriorityValue() { final Configuration conf = HBaseConfiguration.create(); - CoprocessorHost> host = - new CoprocessorHost>( - new TestAbortable()) { - @Override - public RegionCoprocessor checkAndGetInstance(Class implClass) - throws InstantiationException, IllegalAccessException { - if(RegionCoprocessor.class.isAssignableFrom(implClass)) { - return (RegionCoprocessor)implClass.newInstance(); - } - return null; - } - - final Configuration cpHostConf = conf; - - @Override - public CoprocessorEnvironment createEnvironment( - final RegionCoprocessor instance, final int priority, int sequence, Configuration conf) { - return new BaseEnvironment(instance, priority, 0, cpHostConf); - } - }; final String key = "KEY"; final String coprocessor = "org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver"; + + CoprocessorHost> host; + host = new CoprocessorHostForTest<>(conf); + // Try and load a coprocessor three times conf.setStrings(key, coprocessor, coprocessor, coprocessor, SimpleRegionObserverV2.class.getName()); host.loadSystemCoprocessors(conf, key); + // Two coprocessors(SimpleRegionObserver and SimpleRegionObserverV2) loaded Assert.assertEquals(2, host.coprocEnvironments.size()); + // Check the priority value - CoprocessorEnvironment simpleEnv = host.findCoprocessorEnvironment( + CoprocessorEnvironment simpleEnv = host.findCoprocessorEnvironment( SimpleRegionObserver.class.getName()); - CoprocessorEnvironment simpleEnv_v2 = host.findCoprocessorEnvironment( + CoprocessorEnvironment simpleEnv_v2 = host.findCoprocessorEnvironment( SimpleRegionObserverV2.class.getName()); + assertNotNull(simpleEnv); assertNotNull(simpleEnv_v2); assertEquals(Coprocessor.PRIORITY_SYSTEM, simpleEnv.getPriority()); assertEquals(Coprocessor.PRIORITY_SYSTEM + 1, simpleEnv_v2.getPriority()); } - public static class SimpleRegionObserverV2 extends SimpleRegionObserver { + + public static class SimpleRegionObserverV2 extends SimpleRegionObserver { } + + private static class CoprocessorHostForTest extends + CoprocessorHost> { + final Configuration cpHostConf; + + public CoprocessorHostForTest(Configuration conf) { + super(new TestAbortable()); + cpHostConf = conf; + } + + @Override + public E checkAndGetInstance(Class implClass) + throws InstantiationException, IllegalAccessException { + try { + return (E) implClass.getDeclaredConstructor().newInstance(); + } catch (InvocationTargetException | NoSuchMethodException e) { + throw (InstantiationException) new InstantiationException().initCause(e); + } + } + + @Override + public CoprocessorEnvironment createEnvironment(final E instance, final int priority, + int sequence, Configuration conf) { + return new BaseEnvironment<>(instance, priority, 0, cpHostConf); + } } } -- 2.16.1