From b57c55ce191b8c367093a38e8a884fbb7378399c Mon Sep 17 00:00:00 2001 From: Balazs Meszaros Date: Mon, 2 Jul 2018 12:11:03 +0200 Subject: [PATCH] HBASE-20833 Modify pre-upgrade coprocessor validator to support table level coprocessors - -jar parameter now accepts multiple jar files and directories of jar files. - observer classes can be verified by -class option. - -table parameter was added to check table level coprocessors. - -config parameter was added to obtain the coprocessor classes from HBase cofiguration. - -scan option was removed. --- .../hbase/tool/PreUpgradeValidator.java | 8 +- .../coprocessor/CoprocessorValidator.java | 254 ++++++++++++------ .../coprocessor/CoprocessorViolation.java | 26 +- .../coprocessor/CoprocessorValidatorTest.java | 32 +-- src/main/asciidoc/_chapters/ops_mgt.adoc | 26 +- 5 files changed, 230 insertions(+), 116 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/PreUpgradeValidator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/PreUpgradeValidator.java index a3c505ef60..7bf307484b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/PreUpgradeValidator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/PreUpgradeValidator.java @@ -65,9 +65,9 @@ public class PreUpgradeValidator implements Tool { private void printUsage() { System.out.println("usage: hbase " + TOOL_NAME + " command ..."); System.out.println("Available commands:"); - System.out.printf(" %-12s Validate co-processors are compatible with HBase%n", + System.out.printf(" %-15s Validate co-processors are compatible with HBase%n", VALIDATE_CP_NAME); - System.out.printf(" %-12s Validate DataBlockEncoding are compatible on the cluster%n", + System.out.printf(" %-15s Validate DataBlockEncodings are compatible with HBase%n", VALIDATE_DBE_NAME); System.out.println("For further information, please use command -h"); } @@ -104,8 +104,10 @@ public class PreUpgradeValidator implements Tool { public static void main(String[] args) { int ret; + Configuration conf = HBaseConfiguration.create(); + try { - ret = ToolRunner.run(HBaseConfiguration.create(), new PreUpgradeValidator(), args); + ret = ToolRunner.run(conf, new PreUpgradeValidator(), args); } catch (Exception e) { LOG.error("Error running command-line tool", e); ret = AbstractHBaseTool.EXIT_FAILURE; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidator.java index c6d57236ae..3899814a0e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidator.java @@ -23,18 +23,28 @@ import java.io.IOException; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; +import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; import java.util.List; -import java.util.jar.JarEntry; -import java.util.jar.JarFile; +import java.util.Optional; +import java.util.regex.Pattern; import java.util.stream.Collectors; -import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.HBaseInterfaceAudience; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.CoprocessorDescriptor; +import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.tool.PreUpgradeValidator; import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorViolation.Severity; import org.apache.hadoop.hbase.util.AbstractHBaseTool; @@ -42,9 +52,7 @@ import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; -import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) public class CoprocessorValidator extends AbstractHBaseTool { @@ -54,13 +62,20 @@ public class CoprocessorValidator extends AbstractHBaseTool { private CoprocessorMethods branch1; private CoprocessorMethods current; + private final List jars; + private final List tablePatterns; + private final List classes; + private boolean config; + private boolean dieOnWarnings; - private boolean scan; - private List args; public CoprocessorValidator() { branch1 = new Branch1CoprocessorMethods(); current = new CurrentCoprocessorMethods(); + + jars = new ArrayList<>(); + tablePatterns = new ArrayList<>(); + classes = new ArrayList<>(); } /** @@ -71,8 +86,8 @@ public class CoprocessorValidator extends AbstractHBaseTool { * according to JLS. */ private static final class ResolverUrlClassLoader extends URLClassLoader { - private ResolverUrlClassLoader(URL[] urls) { - super(urls, ResolverUrlClassLoader.class.getClassLoader()); + private ResolverUrlClassLoader(ClassLoader parent, URL[] urls) { + super(urls, parent); } @Override @@ -82,14 +97,33 @@ public class CoprocessorValidator extends AbstractHBaseTool { } private ResolverUrlClassLoader createClassLoader(URL[] urls) { + return createClassLoader(getClass().getClassLoader(), urls); + } + + private ResolverUrlClassLoader createClassLoader(ClassLoader parent, URL[] urls) { return AccessController.doPrivileged(new PrivilegedAction() { @Override public ResolverUrlClassLoader run() { - return new ResolverUrlClassLoader(urls); + return new ResolverUrlClassLoader(parent, urls); } }); } + private ResolverUrlClassLoader createClassLoader(ClassLoader parent, + org.apache.hadoop.fs.Path path) throws IOException { + Path tempPath = Files.createTempFile("hbase-coprocessor-", ".jar"); + org.apache.hadoop.fs.Path destination = new org.apache.hadoop.fs.Path(tempPath.toString()); + + LOG.debug("Copying coprocessor jar '{}' to '{}'.", path, tempPath); + + FileSystem fileSystem = FileSystem.get(getConf()); + fileSystem.copyToLocalFile(path, destination); + + URL url = tempPath.toUri().toURL(); + + return createClassLoader(new URL[] { url }); + } + private void validate(ClassLoader classLoader, String className, List violations) { LOG.debug("Validating class '{}'.", className); @@ -101,133 +135,186 @@ public class CoprocessorValidator extends AbstractHBaseTool { LOG.trace("Validating method '{}'.", method); if (branch1.hasMethod(method) && !current.hasMethod(method)) { - CoprocessorViolation violation = new CoprocessorViolation(Severity.WARNING, - "Method '" + method + "' was removed from new coprocessor API, " - + "so it won't be called by HBase."); + CoprocessorViolation violation = new CoprocessorViolation( + className, Severity.WARNING, "method '" + method + + "' was removed from new coprocessor API, so it won't be called by HBase"); violations.add(violation); } } } catch (ClassNotFoundException e) { - CoprocessorViolation violation = new CoprocessorViolation(Severity.ERROR, - "No such class '" + className + "'.", e); + CoprocessorViolation violation = new CoprocessorViolation( + className, Severity.ERROR, "no such class", e); violations.add(violation); } catch (RuntimeException | Error e) { - CoprocessorViolation violation = new CoprocessorViolation(Severity.ERROR, - "Could not validate class '" + className + "'.", e); + CoprocessorViolation violation = new CoprocessorViolation( + className, Severity.ERROR, "could not validate class", e); violations.add(violation); } } - public List validate(ClassLoader classLoader, List classNames) { - List violations = new ArrayList<>(); - + public void validateClasses(ClassLoader classLoader, List classNames, + List violations) { for (String className : classNames) { validate(classLoader, className, violations); } - - return violations; } - public List validate(List urls, List classNames) - throws IOException { - URL[] urlArray = new URL[urls.size()]; - urls.toArray(urlArray); - - try (ResolverUrlClassLoader classLoader = createClassLoader(urlArray)) { - return validate(classLoader, classNames); - } - } - - @VisibleForTesting - protected List getJarClasses(Path path) throws IOException { - try (JarFile jarFile = new JarFile(path.toFile())) { - return jarFile.stream() - .map(JarEntry::getName) - .filter((name) -> name.endsWith(".class")) - .map((name) -> name.substring(0, name.length() - 6).replace('/', '.')) - .collect(Collectors.toList()); + public void validateClasses(ClassLoader classLoader, String[] classNames, + List violations) { + for (String className : classNames) { + validate(classLoader, className, violations); } } - @VisibleForTesting - protected List filterObservers(ClassLoader classLoader, - Iterable classNames) throws ClassNotFoundException { - List filteredClassNames = new ArrayList<>(); - - for (String className : classNames) { - LOG.debug("Scanning class '{}'.", className); - - Class clazz = classLoader.loadClass(className); - - if (Coprocessor.class.isAssignableFrom(clazz)) { - LOG.debug("Found coprocessor class '{}'.", className); - filteredClassNames.add(className); + private void validateTables(ClassLoader classLoader, Pattern pattern, + List violations) throws IOException { + try (Connection connection = ConnectionFactory.createConnection(getConf()); + Admin admin = connection.getAdmin()) { + List tableDescriptors = admin.listTableDescriptors(pattern); + + for (TableDescriptor tableDescriptor : tableDescriptors) { + LOG.debug("Validating table {}", tableDescriptor.getTableName()); + + Collection coprocessorDescriptors = + tableDescriptor.getCoprocessorDescriptors(); + + for (CoprocessorDescriptor coprocessorDescriptor : coprocessorDescriptors) { + String className = coprocessorDescriptor.getClassName(); + Optional jarPath = coprocessorDescriptor.getJarPath(); + + if (jarPath.isPresent()) { + org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(jarPath.get()); + try (ResolverUrlClassLoader cpClassLoader = createClassLoader(classLoader, path)) { + validate(cpClassLoader, className, violations); + } catch (IOException e) { + CoprocessorViolation violation = new CoprocessorViolation( + className, Severity.ERROR, + "could not validate jar file '" + path + "'", e); + violations.add(violation); + } + } else { + validate(classLoader, className, violations); + } + } } } - - return filteredClassNames; } @Override protected void printUsage() { String header = "hbase " + PreUpgradeValidator.TOOL_NAME + " " + - PreUpgradeValidator.VALIDATE_CP_NAME + " -scan|"; + PreUpgradeValidator.VALIDATE_CP_NAME + + " [-jar ...] [-class ... | -table ... | -config]"; printUsage(header, "Options:", ""); } @Override protected void addOptions() { addOptNoArg("e", "Treat warnings as errors."); - addOptNoArg("scan", "Scan jar for observers."); + addOptWithArg("jar", "Jar file/directory of the coprocessor."); + addOptWithArg("table", "Table coprocessor(s) to check."); + addOptWithArg("class", "Coprocessor class(es) to check."); + addOptNoArg("config", "Obtain coprocessor class(es) from configuration."); } @Override protected void processOptions(CommandLine cmd) { - scan = cmd.hasOption("scan"); + String[] jars = cmd.getOptionValues("jar"); + if (jars != null) { + Collections.addAll(this.jars, jars); + } + + String[] tables = cmd.getOptionValues("table"); + if (tables != null) { + Arrays.stream(tables).forEach((table) -> { + Pattern pattern = Pattern.compile(table); + tablePatterns.add(pattern); + }); + } + + String[] classes = cmd.getOptionValues("class"); + if (classes != null) { + Collections.addAll(this.classes, classes); + } + + config = cmd.hasOption("config"); dieOnWarnings = cmd.hasOption("e"); - args = cmd.getArgList(); + } + + private List buildClasspath(List jars) throws IOException { + List urls = new ArrayList<>(); + + for (String jar : jars) { + Path jarPath = Paths.get(jar); + if (Files.isDirectory(jarPath)) { + List files = Files.list(jarPath) + .filter((path) -> Files.isRegularFile(path)) + .collect(Collectors.toList()); + + for (Path file : files) { + URL url = file.toUri().toURL(); + urls.add(url); + } + } else { + URL url = jarPath.toUri().toURL(); + urls.add(url); + } + } + + return urls; } @Override protected int doWork() throws Exception { - if (args.size() < 1) { - System.err.println("Missing jar file."); + if (tablePatterns.isEmpty() && classes.isEmpty() && !config) { + LOG.error("Please give at least one -table, -class or -config parameter."); printUsage(); return EXIT_FAILURE; } - String jar = args.get(0); + List urlList = buildClasspath(jars); + URL[] urls = urlList.toArray(new URL[urlList.size()]); - if (args.size() == 1 && !scan) { - throw new ParseException("Missing classes or -scan option."); - } else if (args.size() > 1 && scan) { - throw new ParseException("Can't use classes with -scan option."); - } - - Path jarPath = Paths.get(jar); - URL[] urls = new URL[] { jarPath.toUri().toURL() }; + LOG.debug("Classpath: {}", urlList); - List violations; + List violations = new ArrayList<>(); try (ResolverUrlClassLoader classLoader = createClassLoader(urls)) { - List classNames; - - if (scan) { - List jarClassNames = getJarClasses(jarPath); - classNames = filterObservers(classLoader, jarClassNames); - } else { - classNames = args.subList(1, args.size()); + for (Pattern tablePattern : tablePatterns) { + validateTables(classLoader, tablePattern, violations); } - violations = validate(classLoader, classNames); + validateClasses(classLoader, classes, violations); + + if (config) { + String[] masterCoprocessors = + getConf().getStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY); + if (masterCoprocessors != null) { + validateClasses(classLoader, masterCoprocessors, violations); + } + + String[] regionCoprocessors = + getConf().getStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY); + if (regionCoprocessors != null) { + validateClasses(classLoader, regionCoprocessors, violations); + } + } } boolean error = false; for (CoprocessorViolation violation : violations) { + String className = violation.getClassName(); + String message = violation.getMessage(); + Throwable throwable = violation.getThrowable(); + switch (violation.getSeverity()) { case WARNING: - System.err.println("[WARNING] " + violation.getMessage()); + if (throwable == null) { + LOG.warn("Error in class '{}': {}.", className, message); + } else { + LOG.warn("Error in class '{}': {}.", className, message, throwable); + } if (dieOnWarnings) { error = true; @@ -235,7 +322,12 @@ public class CoprocessorValidator extends AbstractHBaseTool { break; case ERROR: - System.err.println("[ERROR] " + violation.getMessage()); + if (throwable == null) { + LOG.error("Error in class '{}': {}.", className, message); + } else { + LOG.error("Error in class '{}': {}.", className, message, throwable); + } + error = true; break; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorViolation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorViolation.java index c403c07417..ce10919d78 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorViolation.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorViolation.java @@ -21,29 +21,31 @@ package org.apache.hadoop.hbase.tool.coprocessor; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hbase.thirdparty.com.google.common.base.Throwables; - @InterfaceAudience.Private public class CoprocessorViolation { public enum Severity { WARNING, ERROR } + private final String className; private final Severity severity; private final String message; + private final Throwable throwable; - public CoprocessorViolation(Severity severity, String message) { - this(severity, message, null); + public CoprocessorViolation(String className, Severity severity, String message) { + this(className, severity, message, null); } - public CoprocessorViolation(Severity severity, String message, Throwable t) { + public CoprocessorViolation(String className, Severity severity, String message, + Throwable t) { + this.className = className; this.severity = severity; + this.message = message; + this.throwable = t; + } - if (t == null) { - this.message = message; - } else { - this.message = message + "\n" + Throwables.getStackTraceAsString(t); - } + public String getClassName() { + return className; } public Severity getSeverity() { @@ -53,4 +55,8 @@ public class CoprocessorViolation { public String getMessage() { return message; } + + public Throwable getThrowable() { + return throwable; + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java index 8926ff56ee..d4f55d74fe 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Coprocessor; @@ -37,6 +38,7 @@ import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.apache.hbase.thirdparty.com.google.common.base.Throwables; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; @Category({ SmallTests.class }) @@ -71,17 +73,6 @@ public class CoprocessorValidatorTest { } } - @Test - public void testFilterObservers() throws Exception { - String filterObservers = getFullClassName("TestObserver"); - List classNames = Lists.newArrayList( - filterObservers, getClass().getName()); - List filteredClassNames = validator.filterObservers(getClassLoader(), classNames); - - assertEquals(1, filteredClassNames.size()); - assertEquals(filterObservers, filteredClassNames.get(0)); - } - private List validate(String className) { ClassLoader classLoader = getClass().getClassLoader(); return validate(classLoader, className); @@ -89,7 +80,11 @@ public class CoprocessorValidatorTest { private List validate(ClassLoader classLoader, String className) { List classNames = Lists.newArrayList(getClass().getName() + "$" + className); - return validator.validate(classLoader, classNames); + List violations = new ArrayList<>(); + + validator.validateClasses(classLoader, classNames, violations); + + return violations; } /* @@ -101,9 +96,11 @@ public class CoprocessorValidatorTest { assertEquals(1, violations.size()); CoprocessorViolation violation = violations.get(0); + assertEquals(getFullClassName("NoSuchClass"), violation.getClassName()); assertEquals(Severity.ERROR, violation.getSeverity()); - assertTrue(violation.getMessage().contains( - "java.lang.ClassNotFoundException: " + + + String stackTrace = Throwables.getStackTraceAsString(violation.getThrowable()); + assertTrue(stackTrace.contains("java.lang.ClassNotFoundException: " + "org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$NoSuchClass")); } @@ -147,9 +144,11 @@ public class CoprocessorValidatorTest { assertEquals(1, violations.size()); CoprocessorViolation violation = violations.get(0); + assertEquals(getFullClassName("MissingClassObserver"), violation.getClassName()); assertEquals(Severity.ERROR, violation.getSeverity()); - assertTrue(violation.getMessage().contains( - "java.lang.ClassNotFoundException: " + + + String stackTrace = Throwables.getStackTraceAsString(violation.getThrowable()); + assertTrue(stackTrace.contains("java.lang.ClassNotFoundException: " + "org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$MissingClass")); } @@ -172,6 +171,7 @@ public class CoprocessorValidatorTest { CoprocessorViolation violation = violations.get(0); assertEquals(Severity.WARNING, violation.getSeverity()); + assertEquals(getFullClassName("ObsoleteMethodObserver"), violation.getClassName()); assertTrue(violation.getMessage().contains("was removed from new coprocessor API")); } } diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc b/src/main/asciidoc/_chapters/ops_mgt.adoc index d2166e8ddb..b6be8679ad 100644 --- a/src/main/asciidoc/_chapters/ops_mgt.adoc +++ b/src/main/asciidoc/_chapters/ops_mgt.adoc @@ -858,14 +858,19 @@ whether the old co-processors are still compatible with the actual HBase version [source, bash] ---- -$ bin/hbase pre-upgrade validate-cp -scan| +$ bin/hbase pre-upgrade validate-cp [-jar ...] [-class ... | -table ... | -config] Options: - -e Treat warnings as errors. - -scan Scan jar for observers. + -e Treat warnings as errors. + -jar Jar file/directory of the coprocessor. + -table Table coprocessor(s) to check. + -class Coprocessor class(es) to check. + -config Scan jar for observers. ---- -The first parameter of the tool is the `jar` file which holds the co-processor implementation. Further parameters can be `-scan` when the tool will -search the jar file for `Coprocessor` implementations or the `classes` can be explicitly given. +The co-processor classes can be explicitly declared by `-class` option, or they can be obtained from HBase configuration by `-config` option. +Table level co-processors can be also checked by `-table` option. The tool searches for co-processors on its classpath, but it can be extended +by the `-jar` option. It is possible to test multiple classes with multiple `-class`, multiple tables with multiple `-table` options as well as +adding multiple jars to the classpath with multiple `-jar` options. The tool can report errors and warnings. Errors mean that HBase won't be able to load the coprocessor, because it is incompatible with the current version of HBase. Warnings mean that the co-processors can be loaded, but they won't work as expected. If `-e` option is given, then the tool will also fail @@ -877,9 +882,18 @@ For example: [source, bash] ---- -$ bin/hbase pre-upgrade validate-cp my-coprocessor.jar MyMasterObserver MyRegionObserver +$ bin/hbase pre-upgrade validate-cp -jar my-coprocessor.jar -class MyMasterObserver -class MyRegionObserver ---- +It validates `MyMasterObserver` and `MyRegionObserver` classes which are located in `my-coprocessor.jar`. + +[source, bash] +---- +$ bin/hbase pre-upgrade validate-cp -table .* +---- + +It validates every table level co-processors where the table name matches to `.*` regular expression. + ==== DataBlockEncoding validation HBase 2.0 removed `PREFIX_TREE` Data Block Encoding from column families. To verify that none of the column families are using incompatible Data Block Encodings in the cluster run the following command. -- 2.17.0