From 1144b922bfd05454f10d405296e052a3cd6312ce Mon Sep 17 00:00:00 2001 From: Elliott Clark Date: Mon, 13 Oct 2014 16:23:09 -0700 Subject: [PATCH] Add checkstyle --- dev-support/test-patch.sh | 40 +++++++++++++++++--- hbase-checkstyle/pom.xml | 32 ++++++++++++++++ .../resources/hbase/checkstyle-suppressions.xml | 8 ++++ .../src/main/resources/hbase/checkstyle.xml | 32 ++++++++++++++++ .../main/java/org/apache/hadoop/hbase/Chore.java | 10 ++++- .../org/apache/hadoop/hbase/ClusterStatus.java | 1 + .../apache/hadoop/hbase/DoNotRetryIOException.java | 1 - .../org/apache/hadoop/hbase/HColumnDescriptor.java | 7 +++- .../java/org/apache/hadoop/hbase/HRegionInfo.java | 5 --- .../org/apache/hadoop/hbase/HRegionLocation.java | 3 +- .../main/java/org/apache/hadoop/hbase/CellKey.java | 3 +- .../java/org/apache/hadoop/hbase/CellScanner.java | 1 - .../java/org/apache/hadoop/hbase/CellUtil.java | 11 +++++- .../apache/hadoop/hbase/CompoundConfiguration.java | 9 ++--- .../apache/hadoop/hbase/HBaseConfiguration.java | 17 +++++---- .../hadoop/hbase/HBaseInterfaceAudience.java | 8 +++- .../java/org/apache/hadoop/hbase/HConstants.java | 16 ++++++-- .../hbase/CoordinatedStateManagerFactory.java | 7 +++- .../apache/hadoop/hbase/DaemonThreadFactory.java | 8 ++-- .../org/apache/hadoop/hbase/HealthCheckChore.java | 2 +- .../hadoop/hbase/master/cleaner/CleanerChore.java | 2 +- .../regionserver/TestEndToEndSplitTransaction.java | 2 +- pom.xml | 44 +++++++++++++++++++++- 23 files changed, 222 insertions(+), 47 deletions(-) create mode 100644 hbase-checkstyle/pom.xml create mode 100644 hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml create mode 100644 hbase-checkstyle/src/main/resources/hbase/checkstyle.xml diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh index 1cad668..0a0f808 100755 --- a/dev-support/test-patch.sh +++ b/dev-support/test-patch.sh @@ -220,7 +220,6 @@ setup () { against trunk revision ${SVN_REVISION}. ATTACHMENT ID: ${ATTACHMENT_ID}" - #PENDING: cp -f $SUPPORT_DIR/etc/checkstyle* ./src/test ### Copy the patch file to $PATCH_DIR else VERSION=PATCH-${defect} @@ -250,16 +249,17 @@ setup () { echo "======================================================================" echo "" echo "" - echo "$MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavacWarnings.txt 2>&1" + echo "$MVN clean test checkstyle:checkstyle-aggregate -DskipTests -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavacWarnings.txt 2>&1" export MAVEN_OPTS="${MAVEN_OPTS}" # build core and tests - $MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavacWarnings.txt 2>&1 + $MVN clean test checkstyle:checkstyle-aggregate -DskipTests -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavacWarnings.txt 2>&1 if [[ $? != 0 ]] ; then ERR=`$GREP -A 5 'Compilation failure' $PATCH_DIR/trunkJavacWarnings.txt` echo "Trunk compilation is broken? {code}$ERR{code}" cleanupAndExit 1 fi + mv target/checkstyle-result.xml $PATCH_DIR/trunkCheckstyle.xml } ############################################################################### @@ -493,6 +493,35 @@ checkJavacWarnings () { return 0 } +checkCheckstyleErrors() { + echo "" + echo "" + echo "======================================================================" + echo "======================================================================" + echo " Determining number of patched Checkstyle errors." + echo "======================================================================" + echo "======================================================================" + echo "" + echo "" + if [[ -f $PATCH_DIR/trunkCheckstyle.xml ]] ; then + $MVN package -DskipTests checkstyle:checkstyle-aggregate > /dev/null 2>&1 + mv target/checkstyle-result.xml $PATCH_DIR/patchCheckstyle.xml + trunkCheckstyleErrors=`$GREP ' + + +4.0.0 +org.apache.hbase +hbase-checkstyle +2.0.0-SNAPSHOT +HBase - Checkstyle +Module to hold Checkstyle properties for HBase. + + + \ No newline at end of file diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml b/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml new file mode 100644 index 0000000..3531e2a --- /dev/null +++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml @@ -0,0 +1,8 @@ + + + + + + \ No newline at end of file diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml new file mode 100644 index 0000000..8f58623 --- /dev/null +++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/Chore.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/Chore.java index 7d1346f..42d9d37 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/Chore.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/Chore.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.util.Sleeper; public abstract class Chore extends HasThread { private final Log LOG = LogFactory.getLog(this.getClass()); private final Sleeper sleeper; - protected final Stoppable stopper; + private final Stoppable stopper; /** * @param p Period at which we should run. Will be adjusted appropriately @@ -146,4 +146,12 @@ public abstract class Chore extends HasThread { */ protected void cleanup() { } + + protected Stoppable getStopper() { + return stopper; + } + + protected Sleeper getSleeper() { + return sleeper; + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java index 7599e3e..35c7f76 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java @@ -216,6 +216,7 @@ public class ClusterStatus extends VersionedWritable { * @return region server information * @deprecated Use {@link #getServers()} */ + @Deprecated public Collection getServerInfo() { return getServers(); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java index b566fcf..8be2518 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.HBaseIOException; /** * Subclass if exception is not meant to be retried: e.g. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 4958fb7..6be3dcf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -631,6 +631,7 @@ public class HColumnDescriptor implements Comparable { Integer.decode(value): Integer.valueOf(DEFAULT_BLOCKSIZE); } return this.blocksize.intValue(); + } /** @@ -663,7 +664,10 @@ public class HColumnDescriptor implements Comparable { return setValue(COMPRESSION, type.getName().toUpperCase()); } - /** @return data block encoding algorithm used on disk */ + /** + * @return data block encoding algorithm used on disk + * @deprecated See getDataBlockEncoding() + */ @Deprecated public DataBlockEncoding getDataBlockEncodingOnDisk() { return getDataBlockEncoding(); @@ -673,6 +677,7 @@ public class HColumnDescriptor implements Comparable { * This method does nothing now. Flag ENCODE_ON_DISK is not used * any more. Data blocks have the same encoding in cache as on disk. * @return this (for chained invocation) + * @deprecated This does nothing now. */ @Deprecated public HColumnDescriptor setEncodeOnDisk(boolean encodeOnDisk) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java index 49f31d6..2fefb55 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java @@ -18,13 +18,8 @@ */ package org.apache.hadoop.hbase; -import java.io.ByteArrayInputStream; -import java.io.DataInput; import java.io.DataInputStream; -import java.io.DataOutput; -import java.io.EOFException; import java.io.IOException; -import java.io.SequenceInputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java index 373e76b..edb53dc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java @@ -104,7 +104,8 @@ public class HRegionLocation implements Comparable { } /** - * @return String made of hostname and port formatted as per {@link Addressing#createHostAndPortStr(String, int)} + * @return String made of hostname and port formatted as + * per {@link Addressing#createHostAndPortStr(String, int)} */ public String getHostnamePort() { return Addressing.createHostAndPortStr(this.getHostname(), this.getPort()); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellKey.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellKey.java index f4c0722..41a13fb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellKey.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellKey.java @@ -62,8 +62,7 @@ public class CellKey { familyLength); String qualifier = (qualifierLength == 0) ? "" : Bytes.toStringBinary(qualifierArray, qualifierOffset, qualifierLength); - return row + "/" + family + - (family != null && family.length() > 0 ? ":" : "") + qualifier + return row + "/" + family + (family != null && family.length() > 0 ? ":" : "") + qualifier + "/" + KeyValue.humanReadableTimestamp(ts) + "/" + Type.codeToType(type); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java index 3b5cdb9..f337122 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java @@ -22,7 +22,6 @@ import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.Cell; /** * An interface for iterating through a sequence of cells. Similar to Java's Iterator, but without diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index ce0f546..7335c91 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -41,6 +41,11 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceStability.Evolving public final class CellUtil { + /** + * Private constructor to keep this class from being instantiated. + */ + private CellUtil(){} + /******************* ByteRange *******************************/ public static ByteRange fillRowRange(Cell cell, ByteRange range) { @@ -175,7 +180,8 @@ public final class CellUtil { } public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier, - final long timestamp, final byte type, final byte[] value, byte[] tags, final long memstoreTS) { + final long timestamp, final byte type, final byte[] value, byte[] tags, + final long memstoreTS) { KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, KeyValue.Type.codeToType(type), value, tags); keyValue.setSequenceId(memstoreTS); @@ -212,7 +218,8 @@ public final class CellUtil { * @param cellScannerables * @return CellScanner interface over cellIterables */ - public static CellScanner createCellScanner(final List cellScannerables) { + public static CellScanner createCellScanner( + final List cellScannerables) { return new CellScanner() { private final Iterator iterator = cellScannerables.iterator(); private CellScanner cellScanner = null; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java index c0b3580..9ce74cd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java @@ -32,7 +32,6 @@ import org.apache.commons.collections.iterators.UnmodifiableIterator; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.Bytes; /** * Do a shallow merge of multiple KV configuration pools. This is a very useful @@ -73,11 +72,11 @@ public class CompoundConfiguration extends Configuration { int size(); } - protected List configs + private final List configs = new ArrayList(); static class ImmutableConfWrapper implements ImmutableConfigMap { - Configuration c; + private final Configuration c; ImmutableConfWrapper(Configuration conf) { c = conf; @@ -164,7 +163,7 @@ public class CompoundConfiguration extends Configuration { // put new map at the front of the list (top priority) this.configs.add(0, new ImmutableConfigMap() { - Map m = map; + private final Map m = map; @Override public Iterator> iterator() { @@ -225,7 +224,7 @@ public class CompoundConfiguration extends Configuration { // put new map at the front of the list (top priority) this.configs.add(0, new ImmutableConfigMap() { - Map m = map; + private final Map m = map; @Override public Iterator> iterator() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java index 808c4c1..85f05d4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java @@ -42,6 +42,7 @@ public class HBaseConfiguration extends Configuration { /** * Instantinating HBaseConfiguration() is deprecated. Please use * HBaseConfiguration#create() to construct a plain Configuration + * @deprecated Please use create() instead. */ @Deprecated public HBaseConfiguration() { @@ -55,6 +56,7 @@ public class HBaseConfiguration extends Configuration { /** * Instantiating HBaseConfiguration() is deprecated. Please use * HBaseConfiguration#create(conf) to construct a plain Configuration + * @deprecated Please user create(conf) instead. */ @Deprecated public HBaseConfiguration(final Configuration c) { @@ -167,8 +169,9 @@ public class HBaseConfiguration extends Configuration { * Get the password from the Configuration instance using the * getPassword method if it exists. If not, then fall back to the * general get method for configuration elements. - * @param conf configuration instance for accessing the passwords - * @param alias the name of the password element + * + * @param conf configuration instance for accessing the passwords + * @param alias the name of the password element * @param defPass the default password * @return String password or default password * @throws IOException @@ -181,10 +184,9 @@ public class HBaseConfiguration extends Configuration { char[] p = (char[]) m.invoke(conf, alias); if (p != null) { LOG.debug(String.format("Config option \"%s\" was found through" + - " the Configuration getPassword method.", alias)); + " the Configuration getPassword method.", alias)); passwd = new String(p); - } - else { + } else { LOG.debug(String.format( "Config option \"%s\" was not found. Using provided default value", alias)); @@ -195,7 +197,7 @@ public class HBaseConfiguration extends Configuration { //provider API doesn't exist yet LOG.debug(String.format( "Credential.getPassword method is not available." + - " Falling back to configuration.")); + " Falling back to configuration.")); passwd = conf.get(alias, defPass); } catch (SecurityException e) { throw new IOException(e.getMessage(), e); @@ -209,7 +211,8 @@ public class HBaseConfiguration extends Configuration { return passwd; } - /** For debugging. Dump configurations to system output as xml format. + /** + * For debugging. Dump configurations to system output as xml format. * Master and RS configurations can also be dumped using * http services. e.g. "curl http://master:16010/dump" */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java index 2acdd1c..840bbdc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java @@ -25,7 +25,13 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; */ @InterfaceAudience.Public @InterfaceStability.Evolving -public class HBaseInterfaceAudience { +public final class HBaseInterfaceAudience { + + /** + * Can't create this class. + */ + private HBaseInterfaceAudience(){} + public static final String COPROC = "Coprocesssor"; public static final String REPLICATION = "Replication"; public static final String PHOENIX = "Phoenix"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 1f23828..7befad9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -150,7 +150,9 @@ public final class HConstants { /** Parameter name for the master type being backup (waits for primary to go inactive). */ public static final String MASTER_TYPE_BACKUP = "hbase.master.backup"; - /** by default every master is a possible primary master unless the conf explicitly overrides it */ + /** + * by default every master is a possible primary master unless the conf explicitly overrides it + */ public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false; /** Name of ZooKeeper quorum configuration parameter. */ @@ -179,8 +181,11 @@ public final class HConstants { /** Default client port that the zookeeper listens on */ public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181; - /** Parameter name for the wait time for the recoverable zookeeper */ - public static final String ZOOKEEPER_RECOVERABLE_WAITTIME = "hbase.zookeeper.recoverable.waittime"; + /** + * Parameter name for the wait time for the recoverable zookeeper + */ + public static final String ZOOKEEPER_RECOVERABLE_WAITTIME = + "hbase.zookeeper.recoverable.waittime"; /** Default wait time for the recoverable zookeeper */ public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000; @@ -380,7 +385,10 @@ public final class HConstants { // should go down. - /** The hbase:meta table's name. */ + /** + * The hbase:meta table's name. + * + */ @Deprecated // for compat from 0.94 -> 0.96. public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/CoordinatedStateManagerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/CoordinatedStateManagerFactory.java index e7e7832..7cc3f6d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/CoordinatedStateManagerFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/CoordinatedStateManagerFactory.java @@ -27,7 +27,12 @@ import org.apache.hadoop.util.ReflectionUtils; * based on configuration. */ @InterfaceAudience.Private -public class CoordinatedStateManagerFactory { +public final class CoordinatedStateManagerFactory { + + /** + * Private to keep this class from being accidentally instantiated. + */ + private CoordinatedStateManagerFactory(){} /** * Creates consensus provider from the given configuration. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/DaemonThreadFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/DaemonThreadFactory.java index d621cbf..11da20f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/DaemonThreadFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/DaemonThreadFactory.java @@ -24,10 +24,10 @@ import java.util.concurrent.atomic.AtomicInteger; * Thread factory that creates daemon threads */ public class DaemonThreadFactory implements ThreadFactory { - static final AtomicInteger poolNumber = new AtomicInteger(1); - final ThreadGroup group; - final AtomicInteger threadNumber = new AtomicInteger(1); - final String namePrefix; + private static final AtomicInteger poolNumber = new AtomicInteger(1); + private final ThreadGroup group; + private final AtomicInteger threadNumber = new AtomicInteger(1); + private final String namePrefix; public DaemonThreadFactory(String name) { SecurityManager s = System.getSecurityManager(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java index 4226c3f..8d65c66 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java @@ -58,7 +58,7 @@ import org.apache.hadoop.util.StringUtils; if (!isHealthy) { boolean needToStop = decideToStop(); if (needToStop) { - this.stopper.stop("The node reported unhealthy " + threshold + this.getStopper().stop("The node reported unhealthy " + threshold + " number of times consecutively."); } // Always log health report. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 6cd5b05..0d94bc9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -226,7 +226,7 @@ public abstract class CleanerChore extends Chore Iterable deletableValidFiles = validFiles; // check each of the cleaners for the valid files for (T cleaner : cleanersChain) { - if (cleaner.isStopped() || this.stopper.isStopped()) { + if (cleaner.isStopped() || this.getStopper().isStopped()) { LOG.warn("A file cleaner" + this.getName() + " is stopped, won't delete any more files in:" + this.oldFileDir); return false; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java index 1651ab6..962b408 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java @@ -385,7 +385,7 @@ public class TestEndToEndSplitTransaction { verify(); } catch (Throwable ex) { this.ex = ex; - stopper.stop("caught exception"); + getStopper().stop("caught exception"); } } } diff --git a/pom.xml b/pom.xml index e301d51..1b97e38 100644 --- a/pom.xml +++ b/pom.xml @@ -63,6 +63,7 @@ hbase-testing-util hbase-annotations hbase-rest + hbase-checkstyle scm:git:git://git.apache.org/hbase.git @@ -664,6 +665,22 @@ ${protoc.path} + + org.apache.maven.plugins + maven-checkstyle-plugin + 2.13 + + + org.apache.hbase + hbase-checkstyle + ${project.version} + + + + hbase/checkstyle.xml + hbase/checkstyle-suppressions.xml + + @@ -713,6 +730,21 @@ + + org.apache.maven.plugins + maven-checkstyle-plugin + + + org.apache.hbase + hbase-checkstyle + ${project.version} + + + + hbase/checkstyle.xml + hbase/checkstyle-suppressions.xml + + @@ -2361,8 +2393,16 @@ - - + + + org.apache.maven.plugins + maven-checkstyle-plugin + 2.13 + + hbase/checkstyle.xml + hbase/checkstyle-suppressions.xml + + -- 2.1.1