diff --git a/hbase-archetypes/hbase-client-project/src/main/java/org/apache/hbase/archetypes/exemplars/client/HelloHBase.java b/hbase-archetypes/hbase-client-project/src/main/java/org/apache/hbase/archetypes/exemplars/client/HelloHBase.java index ee2f034a53..5164ab2171 100644 --- a/hbase-archetypes/hbase-client-project/src/main/java/org/apache/hbase/archetypes/exemplars/client/HelloHBase.java +++ b/hbase-archetypes/hbase-client-project/src/main/java/org/apache/hbase/archetypes/exemplars/client/HelloHBase.java @@ -112,7 +112,7 @@ public final class HelloHBase { + "], with one Column Family [" + Bytes.toString(MY_COLUMN_FAMILY_NAME) + "]."); TableDescriptor desc = TableDescriptorBuilder.newBuilder(MY_TABLE_NAME) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(MY_COLUMN_FAMILY_NAME)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(MY_COLUMN_FAMILY_NAME)) .build(); admin.createTable(desc); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index e512b2c7dc..e59ea45e10 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase; import java.io.IOException; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -27,6 +28,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.client.CoprocessorDescriptor; +import org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.TableDescriptor; @@ -433,7 +436,7 @@ public class HTableDescriptor implements TableDescriptor, Comparable kvs) throws IOException { - getDelegateeForModification().addCoprocessor(className, jarFilePath, priority, kvs); + getDelegateeForModification().setCoprocessor( + CoprocessorDescriptorBuilder.newBuilder(className) + .setJarPath(jarFilePath == null ? null : jarFilePath.toString()) + .setPriority(priority) + .setProperties(kvs == null ? Collections.emptyMap() : kvs) + .build()); return this; } @@ -734,7 +742,7 @@ public class HTableDescriptor implements TableDescriptor, Comparable getCoprocessorDescriptors() { + return delegatee.getCoprocessorDescriptors(); + } + /** * Return the list of attached co-processor represented by their name className * * @return The list of co-processors classNames */ - @Override public List getCoprocessors() { - return delegatee.getCoprocessors(); + return getCoprocessorDescriptors().stream().map(CoprocessorDescriptor::getClassName) + .collect(Collectors.toList()); } /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CoprocessorDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CoprocessorDescriptor.java new file mode 100644 index 0000000000..72d588bc97 --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CoprocessorDescriptor.java @@ -0,0 +1,51 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import java.util.Map; +import java.util.Optional; +import org.apache.yetus.audience.InterfaceAudience; + +/** + * CoprocessorDescriptor contains the details about how to build a coprocessor. + * This class is a pojo so there are no checks for the details carried by this class. + * Use {@link CoprocessorDescriptorBuilder} to instantiate a CoprocessorDescriptor + */ +@InterfaceAudience.Public +public interface CoprocessorDescriptor { + /** + * @return the name of the class or interface represented by this object. + */ + String getClassName(); + + /** + * @return Path of the jar file. If it's null, the class will be loaded from default classloader. + */ + Optional getJarPath(); + + /** + * @return The order to execute this coprocessor + */ + int getPriority(); + + /** + * @return Arbitrary key-value parameter pairs passed into the coprocessor. + */ + Map getProperties(); +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CoprocessorDescriptorBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CoprocessorDescriptorBuilder.java new file mode 100644 index 0000000000..71d1264c07 --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CoprocessorDescriptorBuilder.java @@ -0,0 +1,118 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.TreeMap; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.yetus.audience.InterfaceAudience; + +/** + * Used to build the {@link CoprocessorDescriptor} + */ +@InterfaceAudience.Public +public final class CoprocessorDescriptorBuilder { + + public static CoprocessorDescriptor of(String className) { + return new CoprocessorDescriptorBuilder(className).build(); + } + + public static CoprocessorDescriptorBuilder newBuilder(String className) { + return new CoprocessorDescriptorBuilder(className); + } + + private final String className; + private String jarPath; + private int priority = Coprocessor.PRIORITY_USER; + private Map properties = new TreeMap(); + + public CoprocessorDescriptorBuilder setJarPath(String jarPath) { + this.jarPath = jarPath; + return this; + } + + public CoprocessorDescriptorBuilder setPriority(int priority) { + this.priority = priority; + return this; + } + + public CoprocessorDescriptorBuilder setProperty(String key, String value) { + this.properties.put(key, value); + return this; + } + + public CoprocessorDescriptorBuilder setProperties(Map properties) { + this.properties.putAll(properties); + return this; + } + + public CoprocessorDescriptor build() { + return new CoprocessorDescriptorImpl(className, jarPath, priority, properties); + } + + private CoprocessorDescriptorBuilder(String className) { + this.className = Objects.requireNonNull(className); + } + + private static final class CoprocessorDescriptorImpl implements CoprocessorDescriptor { + private final String className; + private final String jarPath; + private final int priority; + private final Map properties; + + private CoprocessorDescriptorImpl(String className, String jarPath, int priority, + Map properties) { + this.className = className; + this.jarPath = jarPath; + this.priority = priority; + this.properties = properties; + } + + @Override + public String getClassName() { + return className; + } + + @Override + public Optional getJarPath() { + return Optional.ofNullable(jarPath); + } + + @Override + public int getPriority() { + return priority; + } + + @Override + public Map getProperties() { + return Collections.unmodifiableMap(properties); + } + + @Override + public String toString() { + return "class:" + className + + ", jarPath:" + jarPath + + ", priority:" + priority + + ", properties:" + properties; + } + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java index 305b35225a..4c46a8fe7e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java @@ -80,12 +80,11 @@ public interface TableDescriptor { int getColumnFamilyCount(); /** - * Return the list of attached co-processor represented by their name - * className + * Return the list of attached co-processor represented * - * @return The list of co-processors classNames + * @return The list of CoprocessorDescriptor */ - Collection getCoprocessors(); + Collection getCoprocessorDescriptors(); /** * Returns the durability setting for the table. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java index c1db64bf4c..0f5d3ad89c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java @@ -27,12 +27,14 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import java.util.function.Function; import java.util.regex.Matcher; -import org.apache.hadoop.fs.Path; +import java.util.regex.Pattern; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -214,6 +216,24 @@ public class TableDescriptorBuilder { @InterfaceAudience.Private public final static byte[] NAMESPACE_COL_DESC_BYTES = Bytes.toBytes("d"); + /** + *
+   * Pattern that matches a coprocessor specification. Form is:
+   * {@code  '|'  ['|'  ['|' ]]}
+   * where arguments are {@code  '='  [,...]}
+   * For example: {@code hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2}
+   * 
+ */ + private static final Pattern CP_HTD_ATTR_VALUE_PATTERN = + Pattern.compile("(^[^\\|]*)\\|([^\\|]+)\\|[\\s]*([\\d]*)[\\s]*(\\|.*)?$"); + + private static final String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN = "[^=,]+"; + private static final String CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN = "[^,]+"; + private static final Pattern CP_HTD_ATTR_VALUE_PARAM_PATTERN = Pattern.compile( + "(" + CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN + ")=(" + + CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN + "),?"); + public static final Pattern CP_HTD_ATTR_KEY_PATTERN = + Pattern.compile("^coprocessor\\$([0-9]+)$", Pattern.CASE_INSENSITIVE); /** * Table descriptor for namespace table */ @@ -222,14 +242,14 @@ public class TableDescriptorBuilder { // rethink about adding back the setCacheDataInL1 for NS table. public static final TableDescriptor NAMESPACE_TABLEDESC = TableDescriptorBuilder.newBuilder(TableName.NAMESPACE_TABLE_NAME) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(NAMESPACE_FAMILY_INFO_BYTES) - // Ten is arbitrary number. Keep versions to help debugging. - .setMaxVersions(10) - .setInMemory(true) - .setBlocksize(8 * 1024) - .setScope(HConstants.REPLICATION_SCOPE_LOCAL) - .build()) - .build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(NAMESPACE_FAMILY_INFO_BYTES) + // Ten is arbitrary number. Keep versions to help debugging. + .setMaxVersions(10) + .setInMemory(true) + .setBlocksize(8 * 1024) + .setScope(HConstants.REPLICATION_SCOPE_LOCAL) + .build()) + .build(); private final ModifyableTableDescriptor desc; /** @@ -282,28 +302,36 @@ public class TableDescriptorBuilder { this.desc = new ModifyableTableDescriptor(desc); } - public TableDescriptorBuilder addCoprocessor(String className) throws IOException { - return addCoprocessor(className, null, Coprocessor.PRIORITY_USER, null); + public TableDescriptorBuilder setCoprocessor(String className) throws IOException { + return setCoprocessor(CoprocessorDescriptorBuilder.of(className)); + } + + public TableDescriptorBuilder setCoprocessor(CoprocessorDescriptor cpDesc) throws IOException { + desc.setCoprocessor(Objects.requireNonNull(cpDesc)); + return this; } - public TableDescriptorBuilder addCoprocessor(String className, Path jarFilePath, - int priority, final Map kvs) throws IOException { - desc.addCoprocessor(className, jarFilePath, priority, kvs); + public TableDescriptorBuilder setCoprocessors(Collection cpDescs) + throws IOException { + for (CoprocessorDescriptor cpDesc : cpDescs) { + desc.setCoprocessor(cpDesc); + } return this; } - public TableDescriptorBuilder addCoprocessorWithSpec(final String specStr) throws IOException { - desc.addCoprocessorWithSpec(specStr); + public TableDescriptorBuilder setColumnFamily(final ColumnFamilyDescriptor family) { + desc.setColumnFamily(Objects.requireNonNull(family)); return this; } - public TableDescriptorBuilder addColumnFamily(final ColumnFamilyDescriptor family) { - desc.addColumnFamily(family); + public TableDescriptorBuilder setColumnFamilies( + final Collection families) { + families.forEach(desc::setColumnFamily); return this; } public TableDescriptorBuilder modifyColumnFamily(final ColumnFamilyDescriptor family) { - desc.modifyColumnFamily(family); + desc.modifyColumnFamily(Objects.requireNonNull(family)); return this; } @@ -421,7 +449,7 @@ public class TableDescriptorBuilder { newFamilies .forEach((cf, cfDesc) -> { desc.removeColumnFamily(cf); - desc.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(cfDesc).setScope(scope) + desc.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(cfDesc).setScope(scope) .build()); }); return this; @@ -839,7 +867,7 @@ public class TableDescriptorBuilder { * @param family to add. * @return the modifyable TD */ - public ModifyableTableDescriptor addColumnFamily(final ColumnFamilyDescriptor family) { + public ModifyableTableDescriptor setColumnFamily(final ColumnFamilyDescriptor family) { if (family.getName() == null || family.getName().length <= 0) { throw new IllegalArgumentException("Family name cannot be null or empty"); } @@ -1154,8 +1182,10 @@ public class TableDescriptorBuilder { * @throws IOException * @return the modifyable TD */ - public ModifyableTableDescriptor addCoprocessor(String className) throws IOException { - return addCoprocessor(className, null, Coprocessor.PRIORITY_USER, null); + public ModifyableTableDescriptor setCoprocessor(String className) throws IOException { + return setCoprocessor( + CoprocessorDescriptorBuilder.newBuilder(className).setPriority(Coprocessor.PRIORITY_USER) + .build()); } /** @@ -1164,44 +1194,38 @@ public class TableDescriptorBuilder { * check if the class can be loaded or not. Whether a coprocessor is * loadable or not will be determined when a region is opened. * - * @param jarFilePath Path of the jar file. If it's null, the class will be - * loaded from default classloader. - * @param className Full class name. - * @param priority Priority - * @param kvs Arbitrary key-value parameter pairs passed into the - * coprocessor. - * @throws IOException + * @throws IOException any illegal parameter key/value * @return the modifyable TD */ - public ModifyableTableDescriptor addCoprocessor(String className, Path jarFilePath, - int priority, final Map kvs) + public ModifyableTableDescriptor setCoprocessor(CoprocessorDescriptor cp) throws IOException { - checkHasCoprocessor(className); - + checkHasCoprocessor(cp.getClassName()); + if (cp.getPriority() < 0) { + throw new IOException("Priority must be bigger than or equal with zero, current:" + + cp.getPriority()); + } // Validate parameter kvs and then add key/values to kvString. StringBuilder kvString = new StringBuilder(); - if (kvs != null) { - for (Map.Entry e : kvs.entrySet()) { - if (!e.getKey().matches(HConstants.CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN)) { - throw new IOException("Illegal parameter key = " + e.getKey()); - } - if (!e.getValue().matches(HConstants.CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN)) { - throw new IOException("Illegal parameter (" + e.getKey() - + ") value = " + e.getValue()); - } - if (kvString.length() != 0) { - kvString.append(','); - } - kvString.append(e.getKey()); - kvString.append('='); - kvString.append(e.getValue()); + for (Map.Entry e : cp.getProperties().entrySet()) { + if (!e.getKey().matches(CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN)) { + throw new IOException("Illegal parameter key = " + e.getKey()); + } + if (!e.getValue().matches(CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN)) { + throw new IOException("Illegal parameter (" + e.getKey() + + ") value = " + e.getValue()); } + if (kvString.length() != 0) { + kvString.append(','); + } + kvString.append(e.getKey()); + kvString.append('='); + kvString.append(e.getValue()); } - String value = ((jarFilePath == null) ? "" : jarFilePath.toString()) - + "|" + className + "|" + Integer.toString(priority) + "|" + String value = cp.getJarPath().orElse("") + + "|" + cp.getClassName() + "|" + Integer.toString(cp.getPriority()) + "|" + kvString.toString(); - return addCoprocessorToMap(value); + return setCoprocessorToMap(value); } /** @@ -1211,18 +1235,19 @@ public class TableDescriptorBuilder { * loadable or not will be determined when a region is opened. * * @param specStr The Coprocessor specification all in in one String - * formatted so matches {@link HConstants#CP_HTD_ATTR_VALUE_PATTERN} * @throws IOException * @return the modifyable TD + * @deprecated used by HTableDescriptor and admin.rb. + * As of release 2.0.0, this will be removed in HBase 3.0.0. */ - public ModifyableTableDescriptor addCoprocessorWithSpec(final String specStr) throws IOException { - String className = getCoprocessorClassNameFromSpecStr(specStr); - if (className == null) { - throw new IllegalArgumentException("Format does not match " - + HConstants.CP_HTD_ATTR_VALUE_PATTERN + ": " + specStr); - } - checkHasCoprocessor(className); - return addCoprocessorToMap(specStr); + @Deprecated + public ModifyableTableDescriptor setCoprocessorWithSpec(final String specStr) + throws IOException { + CoprocessorDescriptor cpDesc = toCoprocessorDescriptor(specStr).orElseThrow( + () -> new IllegalArgumentException( + "Format does not match " + CP_HTD_ATTR_VALUE_PATTERN + ": " + specStr)); + checkHasCoprocessor(cpDesc.getClassName()); + return setCoprocessorToMap(specStr); } private void checkHasCoprocessor(final String className) throws IOException { @@ -1233,12 +1258,10 @@ public class TableDescriptorBuilder { /** * Add coprocessor to values Map - * * @param specStr The Coprocessor specification all in in one String - * formatted so matches {@link HConstants#CP_HTD_ATTR_VALUE_PATTERN} * @return Returns this */ - private ModifyableTableDescriptor addCoprocessorToMap(final String specStr) { + private ModifyableTableDescriptor setCoprocessorToMap(final String specStr) { if (specStr == null) { return this; } @@ -1246,7 +1269,7 @@ public class TableDescriptorBuilder { int maxCoprocessorNumber = 0; Matcher keyMatcher; for (Map.Entry e : this.values.entrySet()) { - keyMatcher = HConstants.CP_HTD_ATTR_KEY_PATTERN.matcher(Bytes.toString(e.getKey().get())); + keyMatcher = CP_HTD_ATTR_KEY_PATTERN.matcher(Bytes.toString(e.getKey().get())); if (!keyMatcher.matches()) { continue; } @@ -1266,24 +1289,8 @@ public class TableDescriptorBuilder { */ @Override public boolean hasCoprocessor(String classNameToMatch) { - Matcher keyMatcher; - for (Map.Entry e - : this.values.entrySet()) { - keyMatcher - = HConstants.CP_HTD_ATTR_KEY_PATTERN.matcher( - Bytes.toString(e.getKey().get())); - if (!keyMatcher.matches()) { - continue; - } - String className = getCoprocessorClassNameFromSpecStr(Bytes.toString(e.getValue().get())); - if (className == null) { - continue; - } - if (className.equals(classNameToMatch.trim())) { - return true; - } - } - return false; + return getCoprocessorDescriptors().stream().anyMatch(cp -> cp.getClassName() + .equals(classNameToMatch)); } /** @@ -1293,35 +1300,18 @@ public class TableDescriptorBuilder { * @return The list of co-processors classNames */ @Override - public List getCoprocessors() { - List result = new ArrayList<>(this.values.entrySet().size()); - Matcher keyMatcher; - for (Map.Entry e : this.values.entrySet()) { - keyMatcher = HConstants.CP_HTD_ATTR_KEY_PATTERN.matcher(Bytes.toString(e.getKey().get())); - if (!keyMatcher.matches()) { - continue; - } - String className = getCoprocessorClassNameFromSpecStr(Bytes.toString(e.getValue().get())); - if (className == null) { - continue; + public List getCoprocessorDescriptors() { + List result = new ArrayList<>(); + for (Map.Entry e: getValues().entrySet()) { + String key = Bytes.toString(e.getKey().get()).trim(); + if (CP_HTD_ATTR_KEY_PATTERN.matcher(key).matches()) { + toCoprocessorDescriptor(Bytes.toString(e.getValue().get()).trim()) + .ifPresent(result::add); } - result.add(className); // classname is the 2nd field } return result; } - /** - * @param spec String formatted as per - * {@link HConstants#CP_HTD_ATTR_VALUE_PATTERN} - * @return Class parsed from passed in spec or null if no match - * or classpath found - */ - private static String getCoprocessorClassNameFromSpecStr(final String spec) { - Matcher matcher = HConstants.CP_HTD_ATTR_VALUE_PATTERN.matcher(spec); - // Classname is the 2nd field - return matcher != null && matcher.matches() ? matcher.group(2).trim() : null; - } - /** * Remove a coprocessor from those set on the table * @@ -1333,12 +1323,12 @@ public class TableDescriptorBuilder { Matcher valueMatcher; for (Map.Entry e : this.values .entrySet()) { - keyMatcher = HConstants.CP_HTD_ATTR_KEY_PATTERN.matcher(Bytes.toString(e + keyMatcher = CP_HTD_ATTR_KEY_PATTERN.matcher(Bytes.toString(e .getKey().get())); if (!keyMatcher.matches()) { continue; } - valueMatcher = HConstants.CP_HTD_ATTR_VALUE_PATTERN.matcher(Bytes + valueMatcher = CP_HTD_ATTR_VALUE_PATTERN.matcher(Bytes .toString(e.getValue().get())); if (!valueMatcher.matches()) { continue; @@ -1413,4 +1403,40 @@ public class TableDescriptorBuilder { } } + private static Optional toCoprocessorDescriptor(String spec) { + Matcher matcher = CP_HTD_ATTR_VALUE_PATTERN.matcher(spec); + if (matcher.matches()) { + // jar file path can be empty if the cp class can be loaded + // from class loader. + String path = matcher.group(1).trim().isEmpty() ? + null : matcher.group(1).trim(); + String className = matcher.group(2).trim(); + if (className.isEmpty()) { + return Optional.empty(); + } + String priorityStr = matcher.group(3).trim(); + int priority = priorityStr.isEmpty() ? + Coprocessor.PRIORITY_USER : Integer.parseInt(priorityStr); + String cfgSpec = null; + try { + cfgSpec = matcher.group(4); + } catch (IndexOutOfBoundsException ex) { + // ignore + } + Map ourConf = new TreeMap<>(); + if (cfgSpec != null && !cfgSpec.trim().equals("|")) { + cfgSpec = cfgSpec.substring(cfgSpec.indexOf('|') + 1); + Matcher m = CP_HTD_ATTR_VALUE_PARAM_PATTERN.matcher(cfgSpec); + while (m.find()) { + ourConf.put(m.group(1), m.group(2)); + } + } + return Optional.of(CoprocessorDescriptorBuilder.newBuilder(className) + .setJarPath(path) + .setPriority(priority) + .setProperties(ourConf) + .build()); + } + return Optional.empty(); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index 520a4cdea6..564adefcee 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -2847,7 +2847,7 @@ public final class ProtobufUtil { ts.getColumnFamiliesList() .stream() .map(ProtobufUtil::toColumnFamilyDescriptor) - .forEach(builder::addColumnFamily); + .forEach(builder::setColumnFamily); ts.getAttributesList() .forEach(a -> builder.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray())); ts.getConfigurationList() diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java new file mode 100644 index 0000000000..b288f98f1f --- /dev/null +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java @@ -0,0 +1,100 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Category({ MiscTests.class, SmallTests.class }) +public class TestCoprocessorDescriptor { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestCoprocessorDescriptor.class); + + private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorDescriptor.class); + + @Rule + public TestName name = new TestName(); + + @Test + public void testBuild() { + String className = "className"; + String path = "path"; + int priority = 100; + String propertyKey = "propertyKey"; + String propertyValue = "propertyValue"; + CoprocessorDescriptor cp = + CoprocessorDescriptorBuilder.newBuilder(className).setJarPath(path).setPriority(priority) + .setProperty(propertyKey, propertyValue).build(); + assertEquals(className, cp.getClassName()); + assertEquals(path, cp.getJarPath().get()); + assertEquals(priority, cp.getPriority()); + assertEquals(1, cp.getProperties().size()); + assertEquals(propertyValue, cp.getProperties().get(propertyKey)); + } + + @Test + public void testSetCoprocessor() throws IOException { + String propertyKey = "propertyKey"; + List cps = new ArrayList<>(); + for (String className : Arrays.asList("className0", "className1", "className2")) { + String path = "path"; + int priority = Math.abs(className.hashCode()); + String propertyValue = "propertyValue"; + cps.add( + CoprocessorDescriptorBuilder.newBuilder(className).setJarPath(path).setPriority(priority) + .setProperty(propertyKey, propertyValue).build()); + } + TableDescriptor tableDescriptor = + TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) + .setCoprocessors(cps).build(); + for (CoprocessorDescriptor cp : cps) { + boolean match = false; + for (CoprocessorDescriptor that : tableDescriptor.getCoprocessorDescriptors()) { + if (cp.getClassName().equals(that.getClassName())) { + assertEquals(cp.getJarPath().get(), that.getJarPath().get()); + assertEquals(cp.getPriority(), that.getPriority()); + assertEquals(cp.getProperties().size(), that.getProperties().size()); + assertEquals(cp.getProperties().get(propertyKey), that.getProperties().get(propertyKey)); + match = true; + break; + } + } + if (!match) { + fail("expect:" + cp + ", actual:" + tableDescriptor.getCoprocessorDescriptors()); + } + } + } +} diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java index f83e13f551..959ae91bed 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java @@ -60,49 +60,11 @@ public class TestTableDescriptorBuilder { String cpName = "a.b.c.d"; TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME) - .addCoprocessor(cpName) - .addCoprocessor(cpName) + .setCoprocessor(cpName) + .setCoprocessor(cpName) .build(); } - @Test - public void testAddCoprocessorWithSpecStr() throws IOException { - String cpName = "a.b.c.d"; - TableDescriptorBuilder builder - = TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME); - - try { - builder.addCoprocessorWithSpec(cpName); - fail(); - } catch (IllegalArgumentException iae) { - // Expected as cpName is invalid - } - - // Try minimal spec. - try { - builder.addCoprocessorWithSpec("file:///some/path" + "|" + cpName); - fail(); - } catch (IllegalArgumentException iae) { - // Expected to be invalid - } - - // Try more spec. - String spec = "hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2"; - try { - builder.addCoprocessorWithSpec(spec); - } catch (IllegalArgumentException iae) { - fail(); - } - - // Try double add of same coprocessor - try { - builder.addCoprocessorWithSpec(spec); - fail(); - } catch (IOException ioe) { - // Expect that the coprocessor already exists - } - } - @Test public void testPb() throws DeserializationException, IOException { final int v = 123; @@ -133,7 +95,7 @@ public class TestTableDescriptorBuilder { String className = "org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver"; TableDescriptor desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .addCoprocessor(className) // add and check that it is present + .setCoprocessor(className) // add and check that it is present .build(); assertTrue(desc.hasCoprocessor(className)); desc = TableDescriptorBuilder.newBuilder(desc) @@ -151,40 +113,46 @@ public class TestTableDescriptorBuilder { TableDescriptor desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); // Check that any coprocessor is present. - assertTrue(desc.getCoprocessors().isEmpty()); + assertTrue(desc.getCoprocessorDescriptors().isEmpty()); // simple CP String className1 = "org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver"; String className2 = "org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver"; desc = TableDescriptorBuilder.newBuilder(desc) - .addCoprocessor(className1) // Add the 1 coprocessor and check if present. + .setCoprocessor(className1) // Add the 1 coprocessor and check if present. .build(); - assertTrue(desc.getCoprocessors().size() == 1); - assertTrue(desc.getCoprocessors().contains(className1)); + assertTrue(desc.getCoprocessorDescriptors().size() == 1); + assertTrue(desc.getCoprocessorDescriptors().stream().map(CoprocessorDescriptor::getClassName) + .anyMatch(name -> name.equals(className1))); desc = TableDescriptorBuilder.newBuilder(desc) // Add the 2nd coprocessor and check if present. // remove it and check that it is gone - .addCoprocessor(className2) + .setCoprocessor(className2) .build(); - assertTrue(desc.getCoprocessors().size() == 2); - assertTrue(desc.getCoprocessors().contains(className2)); + assertTrue(desc.getCoprocessorDescriptors().size() == 2); + assertTrue(desc.getCoprocessorDescriptors().stream().map(CoprocessorDescriptor::getClassName) + .anyMatch(name -> name.equals(className2))); desc = TableDescriptorBuilder.newBuilder(desc) // Remove one and check .removeCoprocessor(className1) .build(); - assertTrue(desc.getCoprocessors().size() == 1); - assertFalse(desc.getCoprocessors().contains(className1)); - assertTrue(desc.getCoprocessors().contains(className2)); + assertTrue(desc.getCoprocessorDescriptors().size() == 1); + assertFalse(desc.getCoprocessorDescriptors().stream().map(CoprocessorDescriptor::getClassName) + .anyMatch(name -> name.equals(className1))); + assertTrue(desc.getCoprocessorDescriptors().stream().map(CoprocessorDescriptor::getClassName) + .anyMatch(name -> name.equals(className2))); desc = TableDescriptorBuilder.newBuilder(desc) // Remove the last and check .removeCoprocessor(className2) .build(); - assertTrue(desc.getCoprocessors().isEmpty()); - assertFalse(desc.getCoprocessors().contains(className1)); - assertFalse(desc.getCoprocessors().contains(className2)); + assertTrue(desc.getCoprocessorDescriptors().isEmpty()); + assertFalse(desc.getCoprocessorDescriptors().stream().map(CoprocessorDescriptor::getClassName) + .anyMatch(name -> name.equals(className1))); + assertFalse(desc.getCoprocessorDescriptors().stream().map(CoprocessorDescriptor::getClassName) + .anyMatch(name -> name.equals(className2))); } /** @@ -292,7 +260,7 @@ public class TestTableDescriptorBuilder { .build(); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(hcd) + .setColumnFamily(hcd) .build(); assertEquals(1000, htd.getColumnFamily(familyName).getBlocksize()); @@ -325,14 +293,14 @@ public class TestTableDescriptorBuilder { .setBlocksize(1000) .build(); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(hcd) + .setColumnFamily(hcd) .build(); assertEquals(1000, htd.getColumnFamily(familyName).getBlocksize()); hcd = ColumnFamilyDescriptorBuilder.newBuilder(familyName) .setBlocksize(2000) .build(); // add duplicate column - TableDescriptorBuilder.newBuilder(htd).addColumnFamily(hcd).build(); + TableDescriptorBuilder.newBuilder(htd).setColumnFamily(hcd).build(); } @Test diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index d826ca0911..372d9b1466 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -953,6 +953,10 @@ public final class HConstants { */ public static final float HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD = 0.2f; + /** + * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. + */ + @Deprecated public static final Pattern CP_HTD_ATTR_KEY_PATTERN = Pattern.compile("^coprocessor\\$([0-9]+)$", Pattern.CASE_INSENSITIVE); @@ -963,12 +967,25 @@ public final class HConstants { * where arguments are {@code '=' [,...]} * For example: {@code hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2} * + * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. */ + @Deprecated public static final Pattern CP_HTD_ATTR_VALUE_PATTERN = Pattern.compile("(^[^\\|]*)\\|([^\\|]+)\\|[\\s]*([\\d]*)[\\s]*(\\|.*)?$"); - + /** + * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. + */ + @Deprecated public static final String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN = "[^=,]+"; + /** + * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. + */ + @Deprecated public static final String CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN = "[^,]+"; + /** + * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. + */ + @Deprecated public static final Pattern CP_HTD_ATTR_VALUE_PARAM_PATTERN = Pattern.compile( "(" + CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN + ")=(" + CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN + "),?"); diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java index bc75881292..1bf36f5e86 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java @@ -334,7 +334,7 @@ public class TestClassLoading { htd.setValue(cpKey2, cpValue2); htd.setValue(cpKey3, cpValue3); - // add 2 coprocessor by using new htd.addCoprocessor() api + // add 2 coprocessor by using new htd.setCoprocessor() api htd.addCoprocessor(cpName5, new Path(getLocalPath(jarFile5)), Coprocessor.PRIORITY_USER, null); Map kvs = new HashMap<>(); diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java index 852f1abc74..38c3081bb0 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java @@ -260,7 +260,7 @@ public class TestSecureExport { final String exportTable = name.getMethodName(); TableDescriptor exportHtd = TableDescriptorBuilder .newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYA)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYA)) .setOwnerString(USER_OWNER) .build(); SecureTestUtil.createTable(UTIL, exportHtd, new byte[][]{Bytes.toBytes("s")}); @@ -344,7 +344,7 @@ public class TestSecureExport { final String exportTable = name.getMethodName() + "_export"; final String importTable = name.getMethodName() + "_import"; final TableDescriptor exportHtd = TableDescriptorBuilder.newBuilder(TableName.valueOf(exportTable)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYA)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYA)) .setOwnerString(USER_OWNER) .build(); SecureTestUtil.createTable(UTIL, exportHtd, new byte[][]{Bytes.toBytes("s")}); @@ -401,7 +401,7 @@ public class TestSecureExport { }; SecureTestUtil.verifyAllowed(exportAction, getUserByLogin(USER_OWNER)); final TableDescriptor importHtd = TableDescriptorBuilder.newBuilder(TableName.valueOf(importTable)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYB)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYB)) .setOwnerString(USER_OWNER) .build(); SecureTestUtil.createTable(UTIL, importHtd, new byte[][]{Bytes.toBytes("s")}); diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/ExportEndpointExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/ExportEndpointExample.java index cc06844595..e15c993de3 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/ExportEndpointExample.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/ExportEndpointExample.java @@ -56,8 +56,8 @@ public class ExportEndpointExample { Admin admin = con.getAdmin()) { TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName) // MUST mount the export endpoint - .addCoprocessor(Export.class.getName()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)) + .setCoprocessor(Export.class.getName()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)) .build(); admin.createTable(desc); diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestScanModifyingObserver.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestScanModifyingObserver.java index f90a0f4349..76d9cb9da6 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestScanModifyingObserver.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestScanModifyingObserver.java @@ -68,10 +68,10 @@ public class TestScanModifyingObserver { UTIL.startMiniCluster(1); UTIL.getAdmin() .createTable(TableDescriptorBuilder.newBuilder(NAME) - .addCoprocessor(ScanModifyingObserver.class.getName()) + .setCoprocessor(ScanModifyingObserver.class.getName()) .setValue(ScanModifyingObserver.FAMILY_TO_ADD_KEY, Bytes.toString(FAMILY)) .setValue(ScanModifyingObserver.QUALIFIER_TO_ADD_KEY, Bytes.toString(IMPLICIT_QUAL)) - .addColumnFamily(CFD).build()); + .setColumnFamily(CFD).build()); } @AfterClass diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestValueReplacingCompaction.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestValueReplacingCompaction.java index 0dbdfe18b2..6974c2051a 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestValueReplacingCompaction.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestValueReplacingCompaction.java @@ -67,10 +67,10 @@ public class TestValueReplacingCompaction { UTIL.startMiniCluster(1); UTIL.getAdmin() .createTable(TableDescriptorBuilder.newBuilder(NAME) - .addCoprocessor(ValueRewritingObserver.class.getName()) + .setCoprocessor(ValueRewritingObserver.class.getName()) .setValue(ValueRewritingObserver.ORIGINAL_VALUE_KEY, value) .setValue(ValueRewritingObserver.REPLACED_VALUE_KEY, replacedValue) - .addColumnFamily(CFD).build()); + .setColumnFamily(CFD).build()); } @AfterClass diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserver.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserver.java index 639461bc8a..b76861d515 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserver.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserver.java @@ -47,8 +47,8 @@ public class TestWriteHeavyIncrementObserver extends WriteHeavyIncrementObserver WriteHeavyIncrementObserverTestBase.setUp(); UTIL.getAdmin() .createTable(TableDescriptorBuilder.newBuilder(NAME) - .addCoprocessor(WriteHeavyIncrementObserver.class.getName()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build()); + .setCoprocessor(WriteHeavyIncrementObserver.class.getName()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build()); TABLE = UTIL.getConnection().getTable(NAME); } diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserverWithMemStoreCompaction.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserverWithMemStoreCompaction.java index ae93d88448..60b032b426 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserverWithMemStoreCompaction.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserverWithMemStoreCompaction.java @@ -45,10 +45,10 @@ public class TestWriteHeavyIncrementObserverWithMemStoreCompaction WriteHeavyIncrementObserverTestBase.setUp(); UTIL.getAdmin() .createTable(TableDescriptorBuilder.newBuilder(NAME) - .addCoprocessor(WriteHeavyIncrementObserver.class.getName()) + .setCoprocessor(WriteHeavyIncrementObserver.class.getName()) .setValue(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY, MemoryCompactionPolicy.EAGER.name()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build()); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build()); TABLE = UTIL.getConnection().getTable(NAME); } diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java index 3c89fa5f26..23c97dc674 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java @@ -64,11 +64,11 @@ public class TestZooKeeperScanPolicyObserver { UTIL.startMiniCluster(3); UTIL.getAdmin() .createTable(TableDescriptorBuilder.newBuilder(NAME) - .addCoprocessor(ZooKeeperScanPolicyObserver.class.getName()) + .setCoprocessor(ZooKeeperScanPolicyObserver.class.getName()) .setValue(ZooKeeperScanPolicyObserver.ZK_ENSEMBLE_KEY, "localhost:" + UTIL.getZkCluster().getClientPort()) .setValue(ZooKeeperScanPolicyObserver.ZK_SESSION_TIMEOUT_KEY, "2000") - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build()).build()); + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build()).build()); TABLE = UTIL.getConnection().getTable(NAME); } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java index 4d0d7e0e97..2fb12c3124 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java @@ -440,7 +440,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase { String tableName = String.format("ittable-%010d", RandomUtils.nextInt()); String familyName = "cf-" + Math.abs(RandomUtils.nextInt()); return TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(familyName)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(familyName)) .build(); } } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java index 6c8554a353..3473684a3d 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java @@ -63,7 +63,7 @@ public class AddColumnAction extends Action { LOG.debug("Performing action: Adding " + columnDescriptor + " to " + tableName); TableDescriptor modifiedTable = TableDescriptorBuilder.newBuilder(tableDescriptor) - .addColumnFamily(columnDescriptor).build(); + .setColumnFamily(columnDescriptor).build(); admin.modifyTable(modifiedTable); } } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/TestChangeSplitPolicyAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/TestChangeSplitPolicyAction.java index 0bdc07ca72..66cdff4fda 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/TestChangeSplitPolicyAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/TestChangeSplitPolicyAction.java @@ -56,7 +56,7 @@ public class TestChangeSplitPolicyAction extends Action { public void setUp() throws Exception { this.admin = TEST_UTIL.getAdmin(); TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); - admin.createTable(builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of("fam")).build()); + admin.createTable(builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of("fam")).build()); } @Test diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java index ee410ca72a..2fa2229b57 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java @@ -212,7 +212,7 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase { Admin admin = util.getAdmin(); TableDescriptor desc = admin.getDescriptor(t); TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(desc); - builder.addCoprocessor(SlowMeCoproScanOperations.class.getName()); + builder.setCoprocessor(SlowMeCoproScanOperations.class.getName()); HBaseTestingUtility.modifyTableSync(admin, builder.build()); } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java index 850e123101..4c2e3796e2 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java @@ -241,7 +241,7 @@ public class IntegrationTestMTTR { ColumnFamilyDescriptorBuilder colDescriptorBldr = ColumnFamilyDescriptorBuilder.newBuilder(FAMILY); colDescriptorBldr.setMaxVersions(1); - builder.addColumnFamily(colDescriptorBldr.build()); + builder.setColumnFamily(colDescriptorBldr.build()); util.getAdmin().createTable(builder.build()); // Setup the table for LoadTestTool diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java index d52863e99a..4a22699b8b 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java @@ -297,7 +297,7 @@ public class TestCellBasedImportExport2 { public void testExportScannerBatching() throws Throwable { TableDescriptor desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(1) .build()) .build(); @@ -328,7 +328,7 @@ public class TestCellBasedImportExport2 { public void testWithDeletes() throws Throwable { TableDescriptor desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) .build()) @@ -362,7 +362,7 @@ public class TestCellBasedImportExport2 { final String IMPORT_TABLE = name.getMethodName() + "import"; desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(IMPORT_TABLE)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) .build()) @@ -397,7 +397,7 @@ public class TestCellBasedImportExport2 { final TableName exportTable = TableName.valueOf(name.getMethodName()); TableDescriptor desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) .build()) @@ -435,7 +435,7 @@ public class TestCellBasedImportExport2 { final String importTable = name.getMethodName() + "import"; desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(importTable)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) .build()) @@ -477,7 +477,7 @@ public class TestCellBasedImportExport2 { // Create simple table to export TableDescriptor desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .build()) .build(); @@ -505,7 +505,7 @@ public class TestCellBasedImportExport2 { final String IMPORT_TABLE = name.getMethodName() + "import"; desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(IMPORT_TABLE)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .build()) .build(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index 0dbf738d6d..c7916de37a 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -297,7 +297,7 @@ public class TestImportExport { public void testExportScannerBatching() throws Throwable { TableDescriptor desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(1) .build()) .build(); @@ -328,7 +328,7 @@ public class TestImportExport { public void testWithDeletes() throws Throwable { TableDescriptor desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) .build()) @@ -362,7 +362,7 @@ public class TestImportExport { final String IMPORT_TABLE = name.getMethodName() + "import"; desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(IMPORT_TABLE)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) .build()) @@ -397,7 +397,7 @@ public class TestImportExport { final TableName exportTable = TableName.valueOf(name.getMethodName()); TableDescriptor desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) .build()) @@ -435,7 +435,7 @@ public class TestImportExport { final String importTable = name.getMethodName() + "import"; desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(importTable)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) .build()) @@ -477,7 +477,7 @@ public class TestImportExport { // Create simple table to export TableDescriptor desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .build()) .build(); @@ -505,7 +505,7 @@ public class TestImportExport { final String IMPORT_TABLE = name.getMethodName() + "import"; desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(IMPORT_TABLE)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA) .setMaxVersions(5) .build()) .build(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplication.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplication.java index 1f080c8204..e1fda4eb0b 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplication.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplication.java @@ -150,7 +150,7 @@ public class TestVerifyReplication extends TestReplicationBase { ColumnFamilyDescriptor fam = ColumnFamilyDescriptorBuilder.newBuilder(familyname) .setMaxVersions(100).setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build(); TableDescriptor table = - TableDescriptorBuilder.newBuilder(tableName).addColumnFamily(fam).build(); + TableDescriptorBuilder.newBuilder(tableName).setColumnFamily(fam).build(); scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (ColumnFamilyDescriptor f : table.getColumnFamilies()) { scopes.put(f.getName(), f.getScope()); diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsWithACL.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsWithACL.java index 30547ba90d..afdff71250 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsWithACL.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsWithACL.java @@ -140,7 +140,7 @@ public class TestRSGroupsWithACL extends SecureTestUtil{ TableDescriptorBuilder tableBuilder = TableDescriptorBuilder.newBuilder(TEST_TABLE); ColumnFamilyDescriptorBuilder cfd = ColumnFamilyDescriptorBuilder.newBuilder(TEST_FAMILY); cfd.setMaxVersions(100); - tableBuilder.addColumnFamily(cfd.build()); + tableBuilder.setColumnFamily(cfd.build()); tableBuilder.setValue(TableDescriptorBuilder.OWNER, USER_OWNER.getShortName()); createTable(TEST_UTIL, tableBuilder.build(), new byte[][] { Bytes.toBytes("s") }); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/package-info.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/package-info.java index a6b5c4bc20..92eb121945 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/package-info.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/package-info.java @@ -256,7 +256,7 @@ policy implementations, perhaps) ahead of observers. // create a table that references the jar TableDescriptor htd = TableDescriptorBuilder .newBuilder(TableName.valueOf(getClass().getTableName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("test")) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("test")) .setValue(Bytes.toBytes("Coprocessor$1", path.toString()+ ":" + classFullName + ":" + Coprocessor.Priority.USER)) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 1812d8c6f9..0ce6681a4a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -2150,7 +2150,7 @@ public class HMaster extends HRegionServer implements MasterServices { } TableDescriptor newDesc = TableDescriptorBuilder - .newBuilder(old).addColumnFamily(column).build(); + .newBuilder(old).setColumnFamily(column).build(); return modifyTable(tableName, newDesc, nonceGroup, nonce); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index b35f4fb0f1..171be5fa9e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -29,8 +29,7 @@ import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import java.util.regex.Matcher; - +import java.util.stream.Collectors; import org.apache.commons.collections4.map.AbstractReferenceMap; import org.apache.commons.collections4.map.ReferenceMap; import org.apache.hadoop.conf.Configuration; @@ -38,9 +37,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.RawCellBuilder; import org.apache.hadoop.hbase.RawCellBuilderFactory; import org.apache.hadoop.hbase.ServerName; @@ -82,8 +79,6 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTrack import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker; import org.apache.hadoop.hbase.security.User; -import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CoprocessorClassLoader; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.WALEdit; @@ -314,59 +309,19 @@ public class RegionCoprocessorHost static List getTableCoprocessorAttrsFromSchema(Configuration conf, TableDescriptor htd) { - List result = Lists.newArrayList(); - for (Map.Entry e: htd.getValues().entrySet()) { - String key = Bytes.toString(e.getKey().get()).trim(); - if (HConstants.CP_HTD_ATTR_KEY_PATTERN.matcher(key).matches()) { - String spec = Bytes.toString(e.getValue().get()).trim(); - // found one - try { - Matcher matcher = HConstants.CP_HTD_ATTR_VALUE_PATTERN.matcher(spec); - if (matcher.matches()) { - // jar file path can be empty if the cp class can be loaded - // from class loader. - Path path = matcher.group(1).trim().isEmpty() ? - null : new Path(matcher.group(1).trim()); - String className = matcher.group(2).trim(); - if (className.isEmpty()) { - LOG.error("Malformed table coprocessor specification: key=" + - key + ", spec: " + spec); - continue; - } - String priorityStr = matcher.group(3).trim(); - int priority = priorityStr.isEmpty() ? - Coprocessor.PRIORITY_USER : Integer.parseInt(priorityStr); - String cfgSpec = null; - try { - cfgSpec = matcher.group(4); - } catch (IndexOutOfBoundsException ex) { - // ignore - } - Configuration ourConf; - if (cfgSpec != null && !cfgSpec.trim().equals("|")) { - cfgSpec = cfgSpec.substring(cfgSpec.indexOf('|') + 1); - // do an explicit deep copy of the passed configuration - ourConf = new Configuration(false); - HBaseConfiguration.merge(ourConf, conf); - Matcher m = HConstants.CP_HTD_ATTR_VALUE_PARAM_PATTERN.matcher(cfgSpec); - while (m.find()) { - ourConf.set(m.group(1), m.group(2)); - } - } else { - ourConf = conf; - } - result.add(new TableCoprocessorAttribute(path, className, priority, ourConf)); - } else { - LOG.error("Malformed table coprocessor specification: key=" + key + - ", spec: " + spec); - } - } catch (Exception ioe) { - LOG.error("Malformed table coprocessor specification: key=" + key + - ", spec: " + spec); - } + return htd.getCoprocessorDescriptors().stream().map(cp -> { + Path path = cp.getJarPath().map(p -> new Path(p)).orElse(null); + Configuration ourConf; + if (!cp.getProperties().isEmpty()) { + // do an explicit deep copy of the passed configuration + ourConf = new Configuration(false); + HBaseConfiguration.merge(ourConf, conf); + cp.getProperties().forEach((k, v) -> ourConf.set(k, v)); + } else { + ourConf = conf; } - } - return result; + return new TableCoprocessorAttribute(path, cp.getClassName(), cp.getPriority(), ourConf); + }).collect(Collectors.toList()); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index cfc0b914a0..90fdca60df 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -1128,7 +1128,7 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor, setScope(HConstants.REPLICATION_SCOPE_LOCAL).build(); TableDescriptor td = TableDescriptorBuilder.newBuilder(AccessControlLists.ACL_TABLE_NAME). - addColumnFamily(cfd).build(); + setColumnFamily(cfd).build(); admin.createTable(td); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java index 44a4f57204..44f736b2e9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java @@ -21,21 +21,17 @@ package org.apache.hadoop.hbase.security.access; import java.io.IOException; import java.util.Collection; import java.util.Optional; -import java.util.regex.Matcher; - import org.apache.commons.io.FilenameUtils; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseInterfaceAudience; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.CoprocessorDescriptor; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.MasterObserver; import org.apache.hadoop.hbase.coprocessor.ObserverContext; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -81,10 +77,8 @@ public class CoprocessorWhitelistMasterObserver implements MasterCoprocessor, Ma * "file:///usr/hbase/coprocessors" or for all * filesystems "/usr/hbase/coprocessors") * @return if the path was found under the wlPath - * @throws IOException if a failure occurs in getting the path file system */ - private static boolean validatePath(Path coprocPath, Path wlPath, - Configuration conf) throws IOException { + private static boolean validatePath(Path coprocPath, Path wlPath) { // verify if all are allowed if (wlPath.toString().equals("*")) { return(true); @@ -143,58 +137,26 @@ public class CoprocessorWhitelistMasterObserver implements MasterCoprocessor, Ma * @param ctx as passed in from the coprocessor * @param htd as passed in from the coprocessor */ - private void verifyCoprocessors(ObserverContext ctx, + private static void verifyCoprocessors(ObserverContext ctx, TableDescriptor htd) throws IOException { - - Configuration conf = ctx.getEnvironment().getConfiguration(); - Collection paths = - conf.getStringCollection( + ctx.getEnvironment().getConfiguration().getStringCollection( CP_COPROCESSOR_WHITELIST_PATHS_KEY); - - Collection coprocs = htd.getCoprocessors(); - for (int i = 0; i < coprocs.size(); i++) { - - String coprocSpec = Bytes.toString(htd.getValue( - Bytes.toBytes("coprocessor$" + (i + 1)))); - if (coprocSpec == null) { - continue; - } - - // File path is the 1st field of the coprocessor spec - Matcher matcher = - HConstants.CP_HTD_ATTR_VALUE_PATTERN.matcher(coprocSpec); - if (matcher == null || !matcher.matches()) { - continue; - } - - String coprocPathStr = matcher.group(1).trim(); - // Check if coprocessor is being loaded via the classpath (i.e. no file path) - if (coprocPathStr.equals("")) { - break; - } - Path coprocPath = new Path(coprocPathStr); - String coprocessorClass = matcher.group(2).trim(); - - boolean foundPathMatch = false; - for (String pathStr : paths) { - Path wlPath = new Path(pathStr); - try { - foundPathMatch = validatePath(coprocPath, wlPath, conf); - if (foundPathMatch == true) { + for (CoprocessorDescriptor cp : htd.getCoprocessorDescriptors()) { + if (cp.getJarPath().isPresent()) { + if (paths.stream().noneMatch(p -> { + Path wlPath = new Path(p); + if (validatePath(new Path(cp.getJarPath().get()), wlPath)) { LOG.debug(String.format("Coprocessor %s found in directory %s", - coprocessorClass, pathStr)); - break; + cp.getClassName(), p)); + return true; } - } catch (IOException e) { - LOG.warn(String.format("Failed to validate white list path %s for coprocessor path %s", - pathStr, coprocPathStr)); + return false; + })) { + throw new IOException(String.format("Loading %s DENIED in %s", + cp.getClassName(), CP_COPROCESSOR_WHITELIST_PATHS_KEY)); } } - if (!foundPathMatch) { - throw new IOException(String.format("Loading %s DENIED in %s", - coprocessorClass, CP_COPROCESSOR_WHITELIST_PATHS_KEY)); - } } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java index e6b64b479d..e027ac68c3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java @@ -873,7 +873,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool { byte[][] keys = inferBoundaries(map); TableDescriptorBuilder tdBuilder = TableDescriptorBuilder.newBuilder(tableName); familyBuilders.stream().map(ColumnFamilyDescriptorBuilder::build) - .forEachOrdered(tdBuilder::addColumnFamily); + .forEachOrdered(tdBuilder::setColumnFamily); admin.createTable(tdBuilder.build(), keys); LOG.info("Table " + tableName + " is available!!"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java index b4b0be0827..a67bca1bd1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java @@ -37,6 +37,8 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; +import org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder; +import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -149,27 +151,29 @@ public class FSTableDescriptors implements TableDescriptors { // the META table data goes to File mode BC only. Test how that affect the system. If too much, // we have to rethink about adding back the setCacheDataInL1 for META table CFs. return TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(HConstants.CATALOG_FAMILY) - .setMaxVersions(conf.getInt(HConstants.HBASE_META_VERSIONS, - HConstants.DEFAULT_HBASE_META_VERSIONS)) - .setInMemory(true) - .setBlocksize(conf.getInt(HConstants.HBASE_META_BLOCK_SIZE, - HConstants.DEFAULT_HBASE_META_BLOCK_SIZE)) - .setScope(HConstants.REPLICATION_SCOPE_LOCAL) - // Disable blooms for meta. Needs work. Seems to mess w/ getClosestOrBefore. - .setBloomFilterType(BloomType.NONE) - .build()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(HConstants.TABLE_FAMILY) - .setMaxVersions(conf.getInt(HConstants.HBASE_META_VERSIONS, - HConstants.DEFAULT_HBASE_META_VERSIONS)) - .setInMemory(true) - .setBlocksize(8 * 1024) - .setScope(HConstants.REPLICATION_SCOPE_LOCAL) - // Disable blooms for meta. Needs work. Seems to mess w/ getClosestOrBefore. - .setBloomFilterType(BloomType.NONE) - .build()) - .addCoprocessor("org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint", - null, Coprocessor.PRIORITY_SYSTEM, null); + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(HConstants.CATALOG_FAMILY) + .setMaxVersions(conf.getInt(HConstants.HBASE_META_VERSIONS, + HConstants.DEFAULT_HBASE_META_VERSIONS)) + .setInMemory(true) + .setBlocksize(conf.getInt(HConstants.HBASE_META_BLOCK_SIZE, + HConstants.DEFAULT_HBASE_META_BLOCK_SIZE)) + .setScope(HConstants.REPLICATION_SCOPE_LOCAL) + // Disable blooms for meta. Needs work. Seems to mess w/ getClosestOrBefore. + .setBloomFilterType(BloomType.NONE) + .build()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(HConstants.TABLE_FAMILY) + .setMaxVersions(conf.getInt(HConstants.HBASE_META_VERSIONS, + HConstants.DEFAULT_HBASE_META_VERSIONS)) + .setInMemory(true) + .setBlocksize(8 * 1024) + .setScope(HConstants.REPLICATION_SCOPE_LOCAL) + // Disable blooms for meta. Needs work. Seems to mess w/ getClosestOrBefore. + .setBloomFilterType(BloomType.NONE) + .build()) + .setCoprocessor(CoprocessorDescriptorBuilder.newBuilder( + MultiRowMutationEndpoint.class.getName()) + .setPriority(Coprocessor.PRIORITY_SYSTEM) + .build()); } @VisibleForTesting diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index a83e182560..d11cd7e0ad 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -1380,7 +1380,7 @@ public class HBaseFsck extends Configured implements Closeable { if (columns ==null || columns.isEmpty()) return false; TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); for (String columnfamimly : columns) { - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly)); } fstd.createTableDescriptor(builder.build(), true); return true; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java index 75b8ccd1d3..7d3bc4208e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java @@ -402,7 +402,7 @@ public class RegionSplitter { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); for (String cf : columnFamilies) { - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(cf)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(cf)); } try (Connection connection = ConnectionFactory.createConnection(conf)) { Admin admin = connection.getAdmin(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java index c0aa1a0f58..d84dd59a75 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java @@ -76,7 +76,7 @@ public abstract class AcidGuaranteesTestBase { builder.setValue(CompactingMemStore.IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, "0.9"); } Stream.of(FAMILIES).map(ColumnFamilyDescriptorBuilder::of) - .forEachOrdered(builder::addColumnFamily); + .forEachOrdered(builder::setColumnFamily); UTIL.getAdmin().createTable(builder.build()); tool.setConf(UTIL.getConfiguration()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java index 44d3e87151..4265d50e15 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java @@ -320,7 +320,7 @@ public class AcidGuaranteesTestTool extends AbstractHBaseTool { if (!admin.tableExists(TABLE_NAME)) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TABLE_NAME); Stream.of(FAMILIES).map(ColumnFamilyDescriptorBuilder::of) - .forEachOrdered(builder::addColumnFamily); + .forEachOrdered(builder::setColumnFamily); admin.createTable(builder.build()); } ColumnFamilyDescriptor cfd = admin.getDescriptor(TABLE_NAME).getColumnFamilies()[0]; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index b48abc6a8d..3c3cb0a0f7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -1409,7 +1409,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { BloomType type, int blockSize, Configuration c) throws IOException { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(htd); for (byte[] family : families) { - builder.addColumnFamily( + builder.setColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(family).setBloomFilterType(type) .setBlocksize(blockSize).build()); } @@ -3752,7 +3752,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(td); for (ColumnFamilyDescriptor cd : cds) { if (!td.hasColumnFamily(cd.getName())) { - builder.addColumnFamily(cd); + builder.setColumnFamily(cd); } } td = builder.build(); @@ -3858,7 +3858,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { throws IOException { TableDescriptor td = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) - .addColumnFamily(cd) + .setColumnFamily(cd) .build(); HRegionInfo info = new HRegionInfo(TableName.valueOf(tableName), null, null, false); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java index c1735a7b02..2d579542ec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java @@ -143,7 +143,7 @@ public class TestZooKeeper { private void testSanity(final String testName) throws Exception { String tableName = testName + "_" + System.currentTimeMillis(); TableDescriptor desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("fam")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("fam")).build(); LOG.info("Creating table " + tableName); Admin admin = TEST_UTIL.getAdmin(); try { @@ -179,7 +179,7 @@ public class TestZooKeeper { Bytes.toBytes("g"), Bytes.toBytes("h"), Bytes.toBytes("i"), Bytes.toBytes("j") }; TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build(); admin.createTable(htd, SPLIT_KEYS); TEST_UTIL.waitUntilNoRegionsInTransition(60000); m.getZooKeeper().close(); @@ -241,7 +241,7 @@ public class TestZooKeeper { byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("1"), Bytes.toBytes("2"), Bytes.toBytes("3"), Bytes.toBytes("4"), Bytes.toBytes("5") }; TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build(); admin.createTable(htd, SPLIT_KEYS); } TEST_UTIL.waitUntilNoRegionsInTransition(60000); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/AbstractTestCIOperationTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/AbstractTestCIOperationTimeout.java index d084741e66..d1091084c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/AbstractTestCIOperationTimeout.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/AbstractTestCIOperationTimeout.java @@ -40,8 +40,8 @@ public abstract class AbstractTestCIOperationTimeout extends AbstractTestCITimeo public void setUp() throws IOException { tableName = TableName.valueOf(name.getMethodName()); TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName) - .addCoprocessor(SleepAndFailFirstTime.class.getName()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAM_NAM)).build(); + .setCoprocessor(SleepAndFailFirstTime.class.getName()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAM_NAM)).build(); TEST_UTIL.getAdmin().createTable(htd); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/AbstractTestCIRpcTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/AbstractTestCIRpcTimeout.java index e052202f43..89696cf2ab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/AbstractTestCIRpcTimeout.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/AbstractTestCIRpcTimeout.java @@ -41,8 +41,8 @@ public abstract class AbstractTestCIRpcTimeout extends AbstractTestCITimeout { public void setUp() throws IOException { tableName = TableName.valueOf(name.getMethodName()); TableDescriptor htd = - TableDescriptorBuilder.newBuilder(tableName).addCoprocessor(SleepCoprocessor.class.getName()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAM_NAM)).build(); + TableDescriptorBuilder.newBuilder(tableName).setCoprocessor(SleepCoprocessor.class.getName()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAM_NAM)).build(); TEST_UTIL.getAdmin().createTable(htd); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java index 525fa4c93f..da86418e2e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java @@ -129,7 +129,7 @@ public abstract class TestAsyncAdminBase { byte[]... families) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); for (byte[] family : families) { - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); } CompletableFuture future = splitKeys == null ? admin.createTable(builder.build()) : admin.createTable(builder.build(), splitKeys); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java index b70d8c0fce..6c34c1f74f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java @@ -313,7 +313,7 @@ public class TestAsyncClusterAdminApi extends TestAsyncAdminBase { private void createAndLoadTable(TableName[] tables) { for (TableName table : tables) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(table); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); admin.createTable(builder.build(), Bytes.toBytes("aaaaa"), Bytes.toBytes("zzzzz"), 16).join(); AsyncTable asyncTable = ASYNC_CONN.getTable(table); List puts = new ArrayList<>(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java index 762dbd1bb4..1a156ec09a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java @@ -47,7 +47,6 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.JVMClusterUtil; import org.apache.hadoop.hbase.util.Threads; import org.junit.ClassRule; -import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; @@ -106,7 +105,7 @@ public class TestAsyncRegionAdminApi extends TestAsyncAdminBase { throws IOException, InterruptedException, ExecutionException { TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build(); admin.createTable(desc, Bytes.toBytes("A"), Bytes.toBytes("Z"), 5).get(); // wait till the table is assigned @@ -262,7 +261,7 @@ public class TestAsyncRegionAdminApi extends TestAsyncAdminBase { .setMobEnabled(true).setMobThreshold(0).build(); TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(columnDescriptor).build(); + .setColumnFamily(columnDescriptor).build(); admin.createTable(tableDescriptor).get(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java index 0dc2b38209..b22caa2a5b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java @@ -111,7 +111,7 @@ public class TestAsyncReplicationAdminApiWithClusters extends TestAsyncAdminBase private void createTableWithDefaultConf(AsyncAdmin admin, TableName tableName) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); admin.createTable(builder.build()).join(); } @@ -147,7 +147,7 @@ public class TestAsyncReplicationAdminApiWithClusters extends TestAsyncAdminBase createTableWithDefaultConf(admin2, tableName); TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(admin.getDescriptor(tableName).get()); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("newFamily")) + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("newFamily")) .build()); admin2.disableTable(tableName).join(); admin2.modifyTable(builder.build()).join(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java index bc3a651c28..2c948dd934 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java @@ -97,7 +97,7 @@ public class TestAsyncTableAdminApi extends TestAsyncAdminBase { final TableName tableName3 = TableName.valueOf(tableName.getNameAsString() + "_3"); TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName3); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); admin.createTable(builder.build(), "a".getBytes(), "z".getBytes(), 3).join(); regionLocations = AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, Optional.of(tableName3)).get(); @@ -105,7 +105,7 @@ public class TestAsyncTableAdminApi extends TestAsyncAdminBase { final TableName tableName4 = TableName.valueOf(tableName.getNameAsString() + "_4"); builder = TableDescriptorBuilder.newBuilder(tableName4); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); try { admin.createTable(builder.build(), "a".getBytes(), "z".getBytes(), 2).join(); fail("Should not be able to create a table with only 2 regions using this API."); @@ -115,7 +115,7 @@ public class TestAsyncTableAdminApi extends TestAsyncAdminBase { final TableName tableName5 = TableName.valueOf(tableName.getNameAsString() + "_5"); builder = TableDescriptorBuilder.newBuilder(tableName5); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); admin.createTable(builder.build(), new byte[] { 1 }, new byte[] { 127 }, 16).join(); regionLocations = AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, Optional.of(tableName5)).get(); @@ -191,7 +191,7 @@ public class TestAsyncTableAdminApi extends TestAsyncAdminBase { expectedRegions = 10; final TableName tableName2 = TableName.valueOf(tableName.getNameAsString() + "_2"); TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName2); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); admin.createTable(builder.build(), startKey, endKey, expectedRegions).join(); regions = @@ -244,7 +244,7 @@ public class TestAsyncTableAdminApi extends TestAsyncAdminBase { expectedRegions = 5; final TableName tableName3 = TableName.valueOf(tableName.getNameAsString() + "_3"); builder = TableDescriptorBuilder.newBuilder(tableName3); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); admin.createTable(builder.build(), startKey, endKey, expectedRegions).join(); regions = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.java index 180f95b17d..63ff3e48ad 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.java @@ -70,7 +70,7 @@ public class TestAsyncTableAdminApi2 extends TestAsyncAdminBase { public void testAddColumnFamily() throws Exception { // Create a table with two families TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0)); admin.createTable(builder.build()).join(); admin.disableTable(tableName).join(); // Verify the table descriptor @@ -85,7 +85,7 @@ public class TestAsyncTableAdminApi2 extends TestAsyncAdminBase { public void testAddSameColumnFamilyTwice() throws Exception { // Create a table with one families TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0)); admin.createTable(builder.build()).join(); admin.disableTable(tableName).join(); // Verify the table descriptor @@ -109,7 +109,7 @@ public class TestAsyncTableAdminApi2 extends TestAsyncAdminBase { TableDescriptorBuilder tdBuilder = TableDescriptorBuilder.newBuilder(tableName); ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.of(FAMILY_0); int blockSize = cfd.getBlocksize(); - admin.createTable(tdBuilder.addColumnFamily(cfd).build()).join(); + admin.createTable(tdBuilder.setColumnFamily(cfd).build()).join(); admin.disableTable(tableName).join(); // Verify the table descriptor verifyTableDescriptor(tableName, FAMILY_0); @@ -129,7 +129,7 @@ public class TestAsyncTableAdminApi2 extends TestAsyncAdminBase { TableDescriptorBuilder tdBuilder = TableDescriptorBuilder.newBuilder(tableName); ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.of(FAMILY_0); int blockSize = cfd.getBlocksize(); - admin.createTable(tdBuilder.addColumnFamily(cfd).build()).join(); + admin.createTable(tdBuilder.setColumnFamily(cfd).build()).join(); admin.disableTable(tableName).join(); // Verify the table descriptor verifyTableDescriptor(tableName, FAMILY_0); @@ -150,8 +150,8 @@ public class TestAsyncTableAdminApi2 extends TestAsyncAdminBase { public void testDeleteColumnFamily() throws Exception { // Create a table with two families TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_1)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_1)); admin.createTable(builder.build()).join(); admin.disableTable(tableName).join(); // Verify the table descriptor @@ -166,8 +166,8 @@ public class TestAsyncTableAdminApi2 extends TestAsyncAdminBase { public void testDeleteSameColumnFamilyTwice() throws Exception { // Create a table with two families TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_1)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_1)); admin.createTable(builder.build()).join(); admin.disableTable(tableName).join(); // Verify the table descriptor diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.java index 84009c081e..07c8d774a5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.java @@ -120,7 +120,7 @@ public class TestAsyncTableAdminApi3 extends TestAsyncAdminBase { byte[][] families = { FAMILY, FAMILY_0, FAMILY_1 }; TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); for (byte[] family : families) { - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); } TableDescriptor desc = builder.build(); admin.createTable(desc).join(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java index 5c76381b67..3a7614b63d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java @@ -268,7 +268,7 @@ public class TestAsyncTableBatch { public void testPartialSuccess() throws IOException, InterruptedException, ExecutionException { Admin admin = TEST_UTIL.getAdmin(); TableDescriptor htd = TableDescriptorBuilder.newBuilder(admin.getDescriptor(TABLE_NAME)) - .addCoprocessor(ErrorInjectObserver.class.getName()).build(); + .setCoprocessor(ErrorInjectObserver.class.getName()).build(); admin.modifyTable(htd); AsyncTable table = tableGetter.apply(TABLE_NAME); table.putAll(Arrays.asList(SPLIT_KEYS).stream().map(k -> new Put(k).addColumn(FAMILY, CQ, k)) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCISleep.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCISleep.java index 761922acf7..4e5665d871 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCISleep.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCISleep.java @@ -58,10 +58,11 @@ public class TestCISleep extends AbstractTestCITimeout { @Test public void testRpcRetryingCallerSleep() throws Exception { TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAM_NAM)) - .addCoprocessorWithSpec("|" + SleepAndFailFirstTime.class.getName() + "||" + - SleepAndFailFirstTime.SLEEP_TIME_CONF_KEY + "=2000") - .build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAM_NAM)) + .setCoprocessor(CoprocessorDescriptorBuilder.newBuilder(SleepAndFailFirstTime.class.getName()) + .setProperty(SleepAndFailFirstTime.SLEEP_TIME_CONF_KEY, String.valueOf(2000)) + .build()) + .build(); TEST_UTIL.getAdmin().createTable(htd); Configuration c = new Configuration(TEST_UTIL.getConfiguration()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java index 6b59fdeaf6..b8ebccd7d5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java @@ -114,9 +114,9 @@ public class TestDropTimeoutRequest { // on handling timeout requests and finally all requests timeout and client throws exception. TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())); - builder.addCoprocessor(SleepLongerAtFirstCoprocessor.class.getName()); + builder.setCoprocessor(SleepLongerAtFirstCoprocessor.class.getName()); ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(FAM_NAM).build(); - builder.addColumnFamily(cfd); + builder.setColumnFamily(cfd); TableDescriptor td = builder.build(); try (Admin admin = TEST_UTIL.getConnection().getAdmin()) { admin.createTable(td); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java index 82cf8023ba..24591c0401 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java @@ -184,7 +184,7 @@ public class TestFromClientSide3 { public void testScanAfterDeletingSpecifiedRow() throws IOException { TableName tableName = TableName.valueOf(name.getMethodName()); TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)) .build(); TEST_UTIL.getAdmin().createTable(desc); byte[] row = Bytes.toBytes("SpecifiedRow"); @@ -231,7 +231,7 @@ public class TestFromClientSide3 { public void testScanAfterDeletingSpecifiedRowV2() throws IOException { TableName tableName = TableName.valueOf(name.getMethodName()); TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)) .build(); TEST_UTIL.getAdmin().createTable(desc); byte[] row = Bytes.toBytes("SpecifiedRow"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMalformedCellFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMalformedCellFromClient.java index 6b57b89a2d..6305fa1412 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMalformedCellFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMalformedCellFromClient.java @@ -83,7 +83,7 @@ public class TestMalformedCellFromClient { @Before public void before() throws Exception { TableDescriptor desc = TableDescriptorBuilder.newBuilder(TABLE_NAME) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)) .setValue(HRegion.HBASE_MAX_CELL_SIZE_KEY, String.valueOf(CELL_SIZE)).build(); TEST_UTIL.getConnection().getAdmin().createTable(desc); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java index 046b6f8d14..59920d6eee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java @@ -317,8 +317,8 @@ public class TestReplicaWithCluster { public void testChangeTable() throws Exception { TableDescriptor td = TableDescriptorBuilder.newBuilder(TableName.valueOf("testChangeTable")) .setRegionReplication(NB_SERVERS) - .addCoprocessor(SlowMeCopro.class.getName()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(f)) + .setCoprocessor(SlowMeCopro.class.getName()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(f)) .build(); HTU.getAdmin().createTable(td); Table table = HTU.getConnection().getTable(td.getTableName()); @@ -334,7 +334,7 @@ public class TestReplicaWithCluster { // Add a CF, it should work. TableDescriptor bHdt = HTU.getAdmin().getDescriptor(td.getTableName()); td = TableDescriptorBuilder.newBuilder(td) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(row)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(row)) .build(); HTU.getAdmin().disableTable(td.getTableName()); HTU.getAdmin().modifyTable(td); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java index c5be7cd1bf..27522f56a9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java @@ -63,8 +63,8 @@ public class TestResultFromCoprocessor { public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(3); TableDescriptor desc = TableDescriptorBuilder.newBuilder(TABLE_NAME) - .addCoprocessor(MyObserver.class.getName()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)) + .setCoprocessor(MyObserver.class.getName()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)) .build(); TEST_UTIL.getAdmin().createTable(desc); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestServerLoadDurability.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestServerLoadDurability.java index 38c3d3a879..267e9e8c01 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestServerLoadDurability.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestServerLoadDurability.java @@ -117,7 +117,7 @@ public class TestServerLoadDurability { private void createTableWithDefaultConf(TableName tableName) throws IOException { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); admin.createTable(builder.build()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoreRegionCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoreRegionCoprocessor.java index 8e6ebf349b..ceb6b5c9e1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoreRegionCoprocessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoreRegionCoprocessor.java @@ -67,7 +67,7 @@ public class TestCoreRegionCoprocessor { String methodName = this.name.getMethodName(); TableName tn = TableName.valueOf(methodName); ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(methodName)).build(); - TableDescriptor td = TableDescriptorBuilder.newBuilder(tn).addColumnFamily(cfd).build(); + TableDescriptor td = TableDescriptorBuilder.newBuilder(tn).setColumnFamily(cfd).build(); RegionInfo ri = RegionInfoBuilder.newBuilder(tn).build(); this.rss = new MockRegionServerServices(HTU.getConfiguration()); this.region = HRegion.openHRegion(ri, td, null, HTU.getConfiguration(), this.rss, null); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestPassCustomCellViaRegionObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestPassCustomCellViaRegionObserver.java index 282d5dc893..fcfd4f6631 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestPassCustomCellViaRegionObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestPassCustomCellViaRegionObserver.java @@ -112,8 +112,8 @@ public class TestPassCustomCellViaRegionObserver { admin.deleteTable(name); } table = UTIL.createTable(TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)) - .addCoprocessor(RegionObserverImpl.class.getName()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)) + .setCoprocessor(RegionObserverImpl.class.getName()) .build(), null); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java index 3ee70202a0..c8cb805ac1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java @@ -485,14 +485,14 @@ public class TestWALObserver { private TableDescriptor getBasic3FamilyHTableDescriptor(TableName tableName) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); Arrays.stream(TEST_FAMILY).map(ColumnFamilyDescriptorBuilder::of) - .forEachOrdered(builder::addColumnFamily); + .forEachOrdered(builder::setColumnFamily); return builder.build(); } private TableDescriptor createBasic3FamilyHTD(String tableName) { return TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("a")) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("b")) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("c")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("a")) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("b")) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("c")).build(); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java index cee7a4a3e4..4aa97d3356 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; +import org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; @@ -126,9 +127,14 @@ public class TestAssignmentManagerMetrics { // alter table with a non-existing coprocessor - String spec = "hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2"; TableDescriptor htd = TableDescriptorBuilder.newBuilder(TABLENAME) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).addCoprocessorWithSpec(spec) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)) + .setCoprocessor(CoprocessorDescriptorBuilder.newBuilder("com.foo.FooRegionObserver") + .setJarPath("hdfs:///foo.jar") + .setPriority(1001) + .setProperty("arg1", "1") + .setProperty("arg2", "2") + .build()) .build(); try { TEST_UTIL.getAdmin().modifyTable(htd); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java index b665f327a0..b8a53b640c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java @@ -173,7 +173,7 @@ public class TestCatalogJanitor { */ private TableDescriptor createTableDescriptorForCurrentMethod() { return TableDescriptorBuilder.newBuilder(TableName.valueOf(this.name.getMethodName())). - addColumnFamily(new HColumnDescriptor(MockMasterServices.DEFAULT_COLUMN_FAMILY_NAME)). + setColumnFamily(new HColumnDescriptor(MockMasterServices.DEFAULT_COLUMN_FAMILY_NAME)). build(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java index 346abbabf4..fb75001cb3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java @@ -342,7 +342,7 @@ public class MockMasterServices extends MockNoopMasterServices { @Override public TableDescriptor get(TableName tablename) throws IOException { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tablename); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(DEFAULT_COLUMN_FAMILY_NAME)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(DEFAULT_COLUMN_FAMILY_NAME)); return builder.build(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java index f5e4634320..afcf446342 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java @@ -179,7 +179,7 @@ public class TestRogueRSAssignment { private List createTable(final TableName tableName) throws Exception { TableDescriptorBuilder tdBuilder = TableDescriptorBuilder.newBuilder(tableName); - tdBuilder.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build()); + tdBuilder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build()); byte[][] rows = new byte[initialRegionCount - 1][]; for (int i = 0; i < rows.length; ++i) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java index ece1ee124e..0b40ae0abc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java @@ -126,7 +126,7 @@ public class TestFavoredStochasticBalancerPickers extends BalancerTestBase { ColumnFamilyDescriptorBuilder.newBuilder(HConstants.CATALOG_FAMILY).build(); TableDescriptor desc = TableDescriptorBuilder .newBuilder(tableName) - .addColumnFamily(columnFamilyDescriptor) + .setColumnFamily(columnFamilyDescriptor) .build(); admin.createTable(desc, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), REGIONS); TEST_UTIL.waitUntilAllRegionsAssigned(tableName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java index 94efcc7ed4..9d76ede16f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java @@ -31,7 +31,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; @@ -290,7 +289,7 @@ public class TestSnapshotFromMaster { // snapshot, the call after snapshot will be a no-op and checks will fail UTIL.deleteTable(TABLE_NAME); TableDescriptor td = TableDescriptorBuilder.newBuilder(TABLE_NAME) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAM)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAM)) .setCompactionEnabled(false) .build(); UTIL.getAdmin().createTable(td); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java index c8bb97de33..785e85f3ce 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java @@ -145,7 +145,7 @@ public class MasterProcedureTestingUtility { public static TableDescriptor createHTD(final TableName tableName, final String... family) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); for (int i = 0; i < family.length; ++i) { - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family[i])); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family[i])); } return builder.build(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterObserverPostCalls.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterObserverPostCalls.java index 65033a3bbf..e6357103a8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterObserverPostCalls.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterObserverPostCalls.java @@ -148,7 +148,7 @@ public class TestMasterObserverPostCalls { admin.createNamespace(NamespaceDescriptor.create(ns).build()); admin.createTable(TableDescriptorBuilder.newBuilder(tn1) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("f1")).build()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("f1")).build()) .build()); HMaster master = UTIL.getMiniHBaseCluster().getMaster(); @@ -239,7 +239,7 @@ public class TestMasterObserverPostCalls { public void testPostCreateTable() throws IOException { final Admin admin = UTIL.getAdmin(); final TableName tn = TableName.valueOf("postcreatetable"); - final TableDescriptor td = TableDescriptorBuilder.newBuilder(tn).addColumnFamily( + final TableDescriptor td = TableDescriptorBuilder.newBuilder(tn).setColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("f1")).build()).build(); HMaster master = UTIL.getMiniHBaseCluster().getMaster(); @@ -269,7 +269,7 @@ public class TestMasterObserverPostCalls { public void testPostModifyTable() throws IOException { final Admin admin = UTIL.getAdmin(); final TableName tn = TableName.valueOf("postmodifytable"); - final TableDescriptor td = TableDescriptorBuilder.newBuilder(tn).addColumnFamily( + final TableDescriptor td = TableDescriptorBuilder.newBuilder(tn).setColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("f1")).build()).build(); HMaster master = UTIL.getMiniHBaseCluster().getMaster(); @@ -289,7 +289,7 @@ public class TestMasterObserverPostCalls { preCount = observer.postHookCalls.get(); try { admin.modifyTable(TableDescriptorBuilder.newBuilder(TableName.valueOf("missing")) - .addColumnFamily(td.getColumnFamily(Bytes.toBytes("f1"))).build()); + .setColumnFamily(td.getColumnFamily(Bytes.toBytes("f1"))).build()); fail("Modifying a missing table should fail"); } catch (IOException e) { // Pass @@ -303,7 +303,7 @@ public class TestMasterObserverPostCalls { public void testPostDisableTable() throws IOException { final Admin admin = UTIL.getAdmin(); final TableName tn = TableName.valueOf("postdisabletable"); - final TableDescriptor td = TableDescriptorBuilder.newBuilder(tn).addColumnFamily( + final TableDescriptor td = TableDescriptorBuilder.newBuilder(tn).setColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("f1")).build()).build(); HMaster master = UTIL.getMiniHBaseCluster().getMaster(); @@ -336,7 +336,7 @@ public class TestMasterObserverPostCalls { public void testPostDeleteTable() throws IOException { final Admin admin = UTIL.getAdmin(); final TableName tn = TableName.valueOf("postdeletetable"); - final TableDescriptor td = TableDescriptorBuilder.newBuilder(tn).addColumnFamily( + final TableDescriptor td = TableDescriptorBuilder.newBuilder(tn).setColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("f1")).build()).build(); HMaster master = UTIL.getMiniHBaseCluster().getMaster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedurePriority.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedurePriority.java index 05d8976a0f..9f5741c929 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedurePriority.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedurePriority.java @@ -115,7 +115,7 @@ public class TestProcedurePriority { for (int i = 0; i < TABLE_COUNT; i++) { futures.add(UTIL.getAdmin().createTableAsync( TableDescriptorBuilder.newBuilder(TableName.valueOf(TABLE_NAME_PREFIX + i)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(CF)).build(), + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(CF)).build(), null)); } for (Future future : futures) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java index f26998bbd7..5c73a6fa0c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java @@ -170,7 +170,7 @@ public class TestCacheOnWriteInSchema { ColumnFamilyDescriptorBuilder.newBuilder(family).setBloomFilterType(BloomType.ROWCOL)) .build(); TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(table)).addColumnFamily(hcd).build(); + TableDescriptorBuilder.newBuilder(TableName.valueOf(table)).setColumnFamily(hcd).build(); // Create a store based on the schema String id = TestCacheOnWriteInSchema.class.getName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java index 225c7237a3..4263de5695 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java @@ -94,7 +94,7 @@ public class TestCompactionArchiveConcurrentClose { TableName tableName = TableName.valueOf(name.getMethodName()); TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(fam)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam)).build(); RegionInfo info = RegionInfoBuilder.newBuilder(tableName).build(); HRegion region = initHRegion(htd, info); RegionServerServices rss = mock(RegionServerServices.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java index 4c6cf6ad03..1199c6fc86 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java @@ -102,7 +102,7 @@ public class TestCompactionArchiveIOException { TableName tableName = TableName.valueOf(name.getMethodName()); TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(fam)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam)).build(); RegionInfo info = RegionInfoBuilder.newBuilder(tableName).build(); HRegion region = initHRegion(htd, info); RegionServerServices rss = mock(RegionServerServices.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionLifeCycleTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionLifeCycleTracker.java index 6a6e907687..e680e86164 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionLifeCycleTracker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionLifeCycleTracker.java @@ -148,9 +148,9 @@ public class TestCompactionLifeCycleTracker { public void setUp() throws IOException { UTIL.getAdmin() .createTable(TableDescriptorBuilder.newBuilder(NAME) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(CF1)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(CF2)) - .addCoprocessor(CompactionObserver.class.getName()).build()); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(CF1)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(CF2)) + .setCoprocessor(CompactionObserver.class.getName()).build()); try (Table table = UTIL.getConnection().getTable(NAME)) { for (int i = 0; i < 100; i++) { byte[] row = Bytes.toBytes(i); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java index b7e01647d1..6da5ec0a74 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java @@ -953,7 +953,7 @@ public class TestDefaultMemStore { // parameterized tests add [#] suffix get rid of [ and ]. TableDescriptor desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(name.getMethodName().replaceAll("[\\[\\]]", "_"))) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("foo")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("foo")).build(); RegionInfo hri = RegionInfoBuilder.newBuilder(desc.getTableName()) .setStartKey(Bytes.toBytes("row_0200")).setEndKey(Bytes.toBytes("row_0300")).build(); HRegion r = HRegion.createHRegion(hri, testDir, conf, desc, wFactory.getWAL(hri)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushLifeCycleTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushLifeCycleTracker.java index 278afe029e..b2fbe2d9e3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushLifeCycleTracker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushLifeCycleTracker.java @@ -182,8 +182,8 @@ public class TestFlushLifeCycleTracker { public void setUp() throws IOException { UTIL.getAdmin() .createTable(TableDescriptorBuilder.newBuilder(NAME) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(CF)) - .addCoprocessor(FlushObserver.class.getName()).build()); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(CF)) + .setCoprocessor(FlushObserver.class.getName()).build()); region = UTIL.getHBaseCluster().getRegions(NAME).get(0); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java index 770a60a808..d66feeef25 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java @@ -157,7 +157,7 @@ public class TestHRegionReplayEvents { method = name.getMethodName(); TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(method)); for (byte[] family : families) { - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); } htd = builder.build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java index 94798905e7..b310f58675 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java @@ -203,7 +203,7 @@ public class TestHStore { private void initHRegion(String methodName, Configuration conf, TableDescriptorBuilder builder, ColumnFamilyDescriptor hcd, MyStoreHook hook, boolean switchToPread) throws IOException { - TableDescriptor htd = builder.addColumnFamily(hcd).build(); + TableDescriptor htd = builder.setColumnFamily(hcd).build(); Path basedir = new Path(DIR + methodName); Path tableDir = FSUtils.getTableDir(basedir, htd.getTableName()); final Path logdir = new Path(basedir, AbstractFSWALProvider.getWALDirectoryName(methodName)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java index 814c86fa70..cc9e3850ff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java @@ -201,7 +201,7 @@ public class TestMobStoreCompaction { TableDescriptor td = TableDescriptorBuilder .newBuilder(region.getTableDescriptor()) .removeColumnFamily(cfName) - .addColumnFamily(cfd) + .setColumnFamily(cfd) .build(); region.setTableDescriptor(td); return region; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java index 6af72ca754..50dffd50bf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java @@ -553,7 +553,7 @@ public class TestRegionServerMetrics { TableDescriptor td = TableDescriptorBuilder .newBuilder(region.getTableDescriptor()) .removeColumnFamily(cfName) - .addColumnFamily(cfd) + .setColumnFamily(cfd) .build(); ((HRegion)region).setTableDescriptor(td); return region; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java index a3b5f227a0..5ad8e21fca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java @@ -130,8 +130,8 @@ public class TestRegionServerReadRequestMetrics { private static Table createTable() throws IOException { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TABLE_NAME); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(CF1)); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF2).setTimeToLive(TTL) + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(CF1)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF2).setTimeToLive(TTL) .build()); admin.createTable(builder.build()); return TEST_UTIL.getConnection().getTable(TABLE_NAME); @@ -422,8 +422,8 @@ public class TestRegionServerReadRequestMetrics { public void testReadRequestsWithCoprocessor() throws Exception { TableName tableName = TableName.valueOf("testReadRequestsWithCoprocessor"); TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(CF1)); - builder.addCoprocessor(ScanRegionCoprocessor.class.getName()); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(CF1)); + builder.setCoprocessor(ScanRegionCoprocessor.class.getName()); admin.createTable(builder.build()); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java index 5596fc9d67..3e5ab9b99d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java @@ -88,7 +88,7 @@ public class TestSplitWalDataLoss { Admin admin = testUtil.getAdmin(); admin.createNamespace(namespace); admin.createTable(TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build()); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build()); testUtil.waitTableAvailable(tableName); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java index 5b0a60f985..7ce2418d30 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java @@ -85,7 +85,7 @@ public class TestStoreFileRefresherChore { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName).setRegionReplication(regionReplication); Arrays.stream(families).map(family -> ColumnFamilyDescriptorBuilder.newBuilder(family) - .setMaxVersions(Integer.MAX_VALUE).build()).forEachOrdered(builder::addColumnFamily); + .setMaxVersions(Integer.MAX_VALUE).build()).forEachOrdered(builder::setColumnFamily); return builder.build(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java index 6c8e45966c..0af297066f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java @@ -73,7 +73,7 @@ public class TestSwitchToStreamRead { VALUE_PREFIX = sb.append("-").toString(); REGION = UTIL.createLocalHRegion( TableDescriptorBuilder.newBuilder(TABLE_NAME) - .addColumnFamily( + .setColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setBlocksize(1024).build()) .build(), null, null); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java index c7a2a7c4ed..599260be9d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java @@ -104,7 +104,7 @@ public class TestWALMonotonicallyIncreasingSeqId { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); Arrays.stream(families).map( f -> ColumnFamilyDescriptorBuilder.newBuilder(f).setMaxVersions(Integer.MAX_VALUE).build()) - .forEachOrdered(builder::addColumnFamily); + .forEachOrdered(builder::setColumnFamily); return builder.build(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java index 7ea879e165..e5a4f0ce15 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java @@ -93,7 +93,7 @@ public class TestFIFOCompactionPolicy { FIFOCompactionPolicy.class.getName()) .setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, DisabledRegionSplitPolicy.class.getName()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1).build()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1).build()) .build(); admin.createTable(desc); Table table = TEST_UTIL.getConnection().getTable(tableName); @@ -155,7 +155,7 @@ public class TestFIFOCompactionPolicy { FIFOCompactionPolicy.class.getName()) .setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, DisabledRegionSplitPolicy.class.getName()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build(); TEST_UTIL.getAdmin().createTable(desc); } @@ -169,7 +169,7 @@ public class TestFIFOCompactionPolicy { FIFOCompactionPolicy.class.getName()) .setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, DisabledRegionSplitPolicy.class.getName()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1) .setMinVersions(1).build()) .build(); TEST_UTIL.getAdmin().createTable(desc); @@ -187,7 +187,7 @@ public class TestFIFOCompactionPolicy { .setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, DisabledRegionSplitPolicy.class.getName()) .setValue(HStore.BLOCKING_STOREFILES_KEY, "10") - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1).build()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1).build()) .build(); TEST_UTIL.getAdmin().createTable(desc); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java index a7cdfa5b38..5319d302de 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java @@ -203,7 +203,7 @@ public class TestCompactionWithThroughputController { try { TEST_UTIL.getAdmin() .createTable(TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).setCompactionEnabled(false) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).setCompactionEnabled(false) .build()); TEST_UTIL.waitTableAvailable(tableName); HRegionServer regionServer = TEST_UTIL.getRSForFirstRegionInTable(tableName); @@ -260,7 +260,7 @@ public class TestCompactionWithThroughputController { try { TEST_UTIL.getAdmin() .createTable(TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).setCompactionEnabled(false) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).setCompactionEnabled(false) .build()); TEST_UTIL.waitTableAvailable(tableName); HStore store = getStoreWithName(tableName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java index 83533d9969..1c39646667 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java @@ -173,7 +173,7 @@ public class TestFlushWithThroughputController { hbtu.startMiniCluster(1); Connection conn = ConnectionFactory.createConnection(conf); hbtu.getAdmin().createTable(TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).setCompactionEnabled(false) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).setCompactionEnabled(false) .build()); hbtu.waitTableAvailable(tableName); HRegionServer regionServer = hbtu.getRSForFirstRegionInTable(tableName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java index 1cdb6e5246..5336963dba 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java @@ -252,9 +252,9 @@ public abstract class AbstractTestFSWAL { AbstractFSWAL wal = newWAL(FS, CommonFSUtils.getWALRootDir(conf1), DIR.toString(), HConstants.HREGION_OLDLOGDIR_NAME, conf1, null, true, null, null); TableDescriptor t1 = TableDescriptorBuilder.newBuilder(TableName.valueOf("t1")) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); TableDescriptor t2 = TableDescriptorBuilder.newBuilder(TableName.valueOf("t2")) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); RegionInfo hri1 = RegionInfoBuilder.newBuilder(t1.getTableName()).build(); RegionInfo hri2 = RegionInfoBuilder.newBuilder(t2.getTableName()).build(); // add edits and roll the wal @@ -361,7 +361,7 @@ public abstract class AbstractTestFSWAL { final RegionInfo hri = RegionInfoBuilder.newBuilder(tableName).build(); final byte[] rowName = tableName.getName(); final TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("f")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f")).build(); HRegion r = HBaseTestingUtility.createRegionAndWAL(hri, TEST_UTIL.getDefaultRootDirPath(), TEST_UTIL.getConfiguration(), htd); HBaseTestingUtility.closeRegionAndWAL(r); @@ -449,7 +449,7 @@ public abstract class AbstractTestFSWAL { CONF, null, true, null, null); wal.close(); TableDescriptor td = TableDescriptorBuilder.newBuilder(TableName.valueOf("table")) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); RegionInfo ri = RegionInfoBuilder.newBuilder(td.getTableName()).build(); MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(); NavigableMap scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java index c6059b1e12..610af61fec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java @@ -305,7 +305,7 @@ public abstract class AbstractTestLogRolling { protected Table createTestTable(String tableName) throws IOException { // Create the test table and open it TableDescriptor desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(getName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build(); admin.createTable(desc); return TEST_UTIL.getConnection().getTable(desc.getTableName()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java index f5fabbc457..4effa6de11 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java @@ -291,7 +291,7 @@ public class TestDurability { private HRegion createHRegion(WALFactory wals, Durability durability) throws IOException { TableName tableName = TableName.valueOf(name.getMethodName().replaceAll("[^A-Za-z0-9-_]", "_")); TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build(); RegionInfo info = RegionInfoBuilder.newBuilder(tableName).build(); Path path = new Path(DIR, tableName.getNameAsString()); if (FS.exists(path)) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java index 93ea2b807b..7baaa6c517 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java @@ -111,7 +111,7 @@ public class TestFSHLog extends AbstractTestFSWAL { syncRunnerIndexField.set(ringBufferEventHandler, Integer.MAX_VALUE - 1); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(this.name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); NavigableMap scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (byte[] fam : htd.getColumnFamilyNames()) { scopes.put(fam, 0); @@ -160,7 +160,7 @@ public class TestFSHLog extends AbstractTestFSWAL { // open a new region which uses this WAL TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(this.name.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(b)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(b)).build(); RegionInfo hri = RegionInfoBuilder.newBuilder(htd.getTableName()).build(); ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null); final HRegion region = TEST_UTIL.createLocalHRegion(hri, htd, log); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java index e2b480919e..3eed1372a1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java @@ -145,7 +145,7 @@ public class TestLogRollAbort { // Create the test table and open it TableName tableName = TableName.valueOf(this.getClass().getSimpleName()); TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build(); admin.createTable(desc); Table table = TEST_UTIL.getConnection().getTable(tableName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java index 55f2726033..f3cf2bfe64 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java @@ -136,7 +136,7 @@ public class TestLogRolling extends AbstractTestLogRolling { // Create the test table and open it TableDescriptor desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(getName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build(); admin.createTable(desc); Table table = TEST_UTIL.getConnection().getTable(desc.getTableName()); @@ -244,7 +244,7 @@ public class TestLogRolling extends AbstractTestLogRolling { // Create the test table and open it TableDescriptor desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(getName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build(); admin.createTable(desc); Table table = TEST_UTIL.getConnection().getTable(desc.getTableName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java index 75913d2dbf..de0b94f51f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java @@ -136,11 +136,11 @@ public class TestMasterReplication { CoprocessorHost.USER_REGION_COPROCESSOR_CONF_KEY, CoprocessorCounter.class.getName()); table = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(famName) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(famName) .setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(famName1) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(famName1) .setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(noRepfamName)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(noRepfamName)).build(); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java index 6d2b578aae..37da48241f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java @@ -92,18 +92,18 @@ public class TestNamespaceReplication extends TestReplicationBase { admin2.createNamespace(NamespaceDescriptor.create(ns2).build()); TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tabAName); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder + builder.setColumnFamily(ColumnFamilyDescriptorBuilder .newBuilder(f1Name).setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build()); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder + builder.setColumnFamily(ColumnFamilyDescriptorBuilder .newBuilder(f2Name).setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build()); TableDescriptor tabA = builder.build(); admin1.createTable(tabA); admin2.createTable(tabA); builder = TableDescriptorBuilder.newBuilder(tabBName); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder + builder.setColumnFamily(ColumnFamilyDescriptorBuilder .newBuilder(f1Name).setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build()); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder + builder.setColumnFamily(ColumnFamilyDescriptorBuilder .newBuilder(f2Name).setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build()); TableDescriptor tabB = builder.build(); admin1.createTable(tabB); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java index 3a7a5752b4..397d6e20a9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java @@ -232,9 +232,9 @@ public class TestReplicationBase { hbaseAdmin.addReplicationPeer("2", rpc); TableDescriptor table = TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(famName).setMaxVersions(100) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(famName).setMaxVersions(100) .setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build()) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(noRepfamName)).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(noRepfamName)).build(); scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (ColumnFamilyDescriptor f : table.getColumnFamilies()) { scopes.put(f.getName(), f.getScope()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java index f5d2a2d5e4..8d24f5ef5d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java @@ -348,7 +348,7 @@ public class TestReplicationSmallTests extends TestReplicationBase { // Create Tables for (int i = 0; i < numOfTables; i++) { hadmin.createTable(TableDescriptorBuilder.newBuilder(TableName.valueOf(tName + i)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(colFam)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(colFam)) .setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build()) .build()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java index 407ebf9be5..3655352bf5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java @@ -666,7 +666,7 @@ public class SecureTestUtil { byte[][] families) throws Exception { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); for (byte[] family : families) { - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); } createTable(testUtil, testUtil.getAdmin(), builder.build()); return testUtil.getConnection().getTable(tableName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOnNewVersionBehaviorTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOnNewVersionBehaviorTable.java index 4093ace36f..6490530801 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOnNewVersionBehaviorTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOnNewVersionBehaviorTable.java @@ -42,7 +42,7 @@ public class TestVisibilityLabelsOnNewVersionBehaviorTable TableName tableName = TableName.valueOf(testName.getMethodName()); TEST_UTIL.getAdmin() .createTable(TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily( + .setColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(fam).setNewVersionBehavior(true).build()) .build()); return TEST_UTIL.getConnection().getTable(tableName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java index ce8e2ebaad..b0cabed46d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java @@ -64,7 +64,7 @@ public class TestVisibilityLabelsWithDeletes extends VisibilityLabelsWithDeletes protected Table createTable(byte[] fam) throws IOException { TableName tableName = TableName.valueOf(testName.getMethodName()); TEST_UTIL.getAdmin().createTable(TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(fam)).build()); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam)).build()); return TEST_UTIL.getConnection().getTable(tableName); } @@ -84,7 +84,7 @@ public class TestVisibilityLabelsWithDeletes extends VisibilityLabelsWithDeletes builder.setMaxVersions(maxVersions); } TEST_UTIL.getAdmin().createTable( - TableDescriptorBuilder.newBuilder(tableName).addColumnFamily(builder.build()).build()); + TableDescriptorBuilder.newBuilder(tableName).setColumnFamily(builder.build()).build()); } @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java index 1a33f13b2b..75b6c235dc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java @@ -66,7 +66,7 @@ public class MobSnapshotTestingUtils { = TableDescriptorBuilder.newBuilder(tableName) .setRegionReplication(regionReplication); for (byte[] family : families) { - builder.addColumnFamily(ColumnFamilyDescriptorBuilder + builder.setColumnFamily(ColumnFamilyDescriptorBuilder .newBuilder(family) .setMobEnabled(true) .setMobThreshold(0L) @@ -96,7 +96,7 @@ public class MobSnapshotTestingUtils { // tests have hard coded counts of what to expect in block cache, etc., // and blooms being // on is interfering. - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family) + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family) .setBloomFilterType(BloomType.NONE) .setMobEnabled(true) .setMobThreshold(0L) @@ -152,7 +152,7 @@ public class MobSnapshotTestingUtils { @Override public TableDescriptor createHtd(final String tableName) { return TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) - .addColumnFamily(ColumnFamilyDescriptorBuilder + .setColumnFamily(ColumnFamilyDescriptorBuilder .newBuilder(Bytes.toBytes(TEST_FAMILY)) .setMobEnabled(true) .setMobThreshold(0L) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index 5aca35272d..e798839879 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -694,7 +694,7 @@ public final class SnapshotTestingUtils { public TableDescriptor createHtd(final String tableName) { return TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)) .build(); } @@ -772,7 +772,7 @@ public final class SnapshotTestingUtils { .newBuilder(tableName) .setRegionReplication(regionReplication); for (byte[] family : families) { - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); } byte[][] splitKeys = getSplitKeys(nRegions); util.createTable(builder.build(), splitKeys); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java index 9da82a4936..85235b6420 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java @@ -236,7 +236,7 @@ public class TestLoadIncrementalHFiles { private TableDescriptor buildHTD(TableName tableName, BloomType bloomType) { return TableDescriptorBuilder.newBuilder(tableName) - .addColumnFamily( + .setColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setBloomFilterType(bloomType).build()) .build(); } @@ -462,7 +462,7 @@ public class TestLoadIncrementalHFiles { // set real family name to upper case in purpose to simulate the case that // family name in HFiles is invalid TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(TABLE)) - .addColumnFamily(ColumnFamilyDescriptorBuilder + .setColumnFamily(ColumnFamilyDescriptorBuilder .of(Bytes.toBytes(new String(FAMILY).toUpperCase(Locale.ROOT)))) .build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java index 48a6d23c0c..a4b99a11b7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java @@ -144,7 +144,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { private TableDescriptor createTableDesc(TableName name, int cfs) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(name); IntStream.range(0, cfs).mapToObj(i -> ColumnFamilyDescriptorBuilder.of(family(i))) - .forEachOrdered(builder::addColumnFamily); + .forEachOrdered(builder::setColumnFamily); return builder.build(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java index 0ddc0c3055..11c7bcd005 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java @@ -238,7 +238,7 @@ public class TestFSTableDescriptors { // Update the table infos for (int i = 0; i < count; i++) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name + i)); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of("" + i)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of("" + i)); htds.updateTableDescriptor(builder.build()); } // Wait a while so mod time we write is for sure different. @@ -275,7 +275,7 @@ public class TestFSTableDescriptors { // Update the table infos for (int i = 0; i < count; i++) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name + i)); - builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of("" + i)); + builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of("" + i)); htds.updateTableDescriptor(builder.build()); } for (int i = 0; i < count; i++) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java index c3615a2fe2..2548a174fa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java @@ -186,10 +186,10 @@ public class TestFSHLogProvider { LOG.info(currentTest.getMethodName()); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); TableDescriptor htd2 = TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName() + "2")) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); NavigableMap scopes1 = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (byte[] fam : htd.getColumnFamilyNames()) { scopes1.put(fam, 0); @@ -266,10 +266,10 @@ public class TestFSHLogProvider { LOG.debug(currentTest.getMethodName()); TableDescriptor table1 = TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName() + "1")) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); TableDescriptor table2 = TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName() + "2")) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); NavigableMap scopes1 = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (byte[] fam : table1.getColumnFamilyNames()) { scopes1.put(fam, 0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java index 66d2c94330..b1fe67b779 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java @@ -494,7 +494,7 @@ public class TestWALFactory { int colCount = 10; TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("column")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("column")).build(); NavigableMap scopes = new TreeMap(Bytes.BYTES_COMPARATOR); for (byte[] fam : htd.getColumnFamilyNames()) { scopes.put(fam, 0); @@ -555,7 +555,7 @@ public class TestWALFactory { int colCount = 10; TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName())) - .addColumnFamily(ColumnFamilyDescriptorBuilder.of("column")).build(); + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("column")).build(); NavigableMap scopes = new TreeMap(Bytes.BYTES_COMPARATOR); for (byte[] fam : htd.getColumnFamilyNames()) { scopes.put(fam, 0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java index 7afbb0b906..9a8bddfc92 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java @@ -403,7 +403,7 @@ public final class WALPerformanceEvaluation extends Configured implements Tool { TableDescriptorBuilder.newBuilder(TableName.valueOf(TABLE_NAME + ":" + regionNum)); IntStream.range(0, numFamilies) .mapToObj(i -> ColumnFamilyDescriptorBuilder.of(FAMILY_PREFIX + i)) - .forEachOrdered(builder::addColumnFamily); + .forEachOrdered(builder::setColumnFamily); return builder.build(); } diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb b/hbase-shell/src/main/ruby/hbase/admin.rb index f52438088a..078477ff03 100644 --- a/hbase-shell/src/main/ruby/hbase/admin.rb +++ b/hbase-shell/src/main/ruby/hbase/admin.rb @@ -706,6 +706,7 @@ module Hbase next unless k =~ /coprocessor/i v = String.new(value) v.strip! + # TODO: We should not require user to config the coprocessor with our inner format. htd.addCoprocessorWithSpec(v) valid_coproc_keys << key end