diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index 0ee29e5..a166b91 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -295,6 +295,11 @@
test
+ org.mockito
+ mockito-core
+ test
+
+
com.fasterxml.jackson.core
jackson-databind
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ChangeSplitPolicyAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ChangeSplitPolicyAction.java
index 9af5728..b4cf6a3 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ChangeSplitPolicyAction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ChangeSplitPolicyAction.java
@@ -18,9 +18,10 @@
package org.apache.hadoop.hbase.chaos.actions;
import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
import org.apache.hadoop.hbase.regionserver.IncreasingToUpperBoundRegionSplitPolicy;
@@ -49,10 +50,11 @@ public class ChangeSplitPolicyAction extends Action {
Admin admin = util.getAdmin();
LOG.info("Performing action: Change split policy of table " + tableName);
- HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
+ TableDescriptor tableDescriptor = admin.getDescriptor(tableName);
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
String chosenPolicy = possiblePolicies[random.nextInt(possiblePolicies.length)];
- tableDescriptor.setRegionSplitPolicyClassName(chosenPolicy);
+ builder.setRegionSplitPolicyClassName(chosenPolicy);
LOG.info("Changing " + tableName + " split policy to " + chosenPolicy);
- admin.modifyTable(tableName, tableDescriptor);
+ admin.modifyTable(builder.build());
}
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
index 503d4c1..e45baf1 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
@@ -34,8 +34,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import org.apache.hadoop.hbase.ClusterStatus;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.InvalidFamilyOperationException;
import org.apache.hadoop.hbase.NamespaceExistException;
@@ -50,12 +48,15 @@ import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingMetaAction;
import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingTableAction;
import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.RetriesExhaustedException;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
import org.apache.hadoop.hbase.ipc.FatalConnectionException;
@@ -232,15 +233,17 @@ public class IntegrationTestMTTR {
}
// Create the table. If this fails then fail everything.
- HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
+ TableDescriptor tableDescriptor = util.getAdmin().getDescriptor(tableName);
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
// Make the max file size huge so that splits don't happen during the test.
- tableDescriptor.setMaxFileSize(Long.MAX_VALUE);
+ builder.setMaxFileSize(Long.MAX_VALUE);
- HColumnDescriptor descriptor = new HColumnDescriptor(FAMILY);
- descriptor.setMaxVersions(1);
- tableDescriptor.addFamily(descriptor);
- util.getAdmin().createTable(tableDescriptor);
+ ColumnFamilyDescriptorBuilder colDescriptorBldr =
+ ColumnFamilyDescriptorBuilder.newBuilder(FAMILY);
+ colDescriptorBldr.setMaxVersions(1);
+ builder.addColumnFamily(colDescriptorBldr.build());
+ util.getAdmin().createTable(builder.build());
// Setup the table for LoadTestTool
int ret = loadTool.run(new String[]{"-tn", loadTableName.getNameAsString(), "-init_only"});
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/TestChangeSplitPolicyAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/TestChangeSplitPolicyAction.java
new file mode 100644
index 0000000..091da9a
--- /dev/null
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/TestChangeSplitPolicyAction.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.chaos.actions;
+
+import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+
+@Category({MediumTests.class})
+public class TestChangeSplitPolicyAction extends Action {
+ private final static IntegrationTestingUtility TEST_UTIL = new IntegrationTestingUtility();
+ private static ChangeSplitPolicyAction action;
+ private Admin admin;
+ private TableName tableName = TableName.valueOf("ChangeSplitPolicyAction");
+
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ TEST_UTIL.startMiniCluster(2);
+ }
+ @AfterClass
+ public static void tearDownAfterClass() throws Exception {
+ TEST_UTIL.shutdownMiniCluster();
+ }
+ @Before
+ public void setUp() throws Exception {
+ this.admin = TEST_UTIL.getAdmin();
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
+ admin.createTable(builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of("fam")).build());
+ }
+
+ @Test
+ public void testChangeSplitPolicyAction() throws Exception {
+ ActionContext ctx = Mockito.mock(ActionContext.class);
+ Mockito.when(ctx.getHBaseIntegrationTestingUtility()).thenReturn(TEST_UTIL);
+ Mockito.when(ctx.getHBaseCluster()).thenReturn(TEST_UTIL.getHBaseCluster());
+ action = new ChangeSplitPolicyAction(tableName);
+ action.init(ctx);
+ action.perform();
+ }
+}