diff --git beeline/pom.xml beeline/pom.xml
index 4e5ad0795c..a5a1e42896 100644
--- beeline/pom.xml
+++ beeline/pom.xml
@@ -29,7 +29,7 @@
..
- 1.6.6
+ 2.0.2
@@ -127,7 +127,7 @@
org.powermock
- powermock-api-mockito
+ powermock-api-mockito2
${powermock.version}
test
diff --git beeline/src/test/org/apache/hive/beeline/TestBufferedRows.java beeline/src/test/org/apache/hive/beeline/TestBufferedRows.java
index 9478ec5880..1add3c01b3 100644
--- beeline/src/test/org/apache/hive/beeline/TestBufferedRows.java
+++ beeline/src/test/org/apache/hive/beeline/TestBufferedRows.java
@@ -17,6 +17,7 @@
*/
package org.apache.hive.beeline;
+import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -26,7 +27,6 @@
import org.junit.Assert;
import org.junit.Test;
-import org.mockito.Matchers;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
@@ -99,7 +99,7 @@ public Boolean answer(InvocationOnMock invocation) {
}
});
- when(mockResultSet.getObject(Matchers.anyInt())).thenAnswer(new Answer() {
+ when(mockResultSet.getObject(anyInt())).thenAnswer(new Answer() {
@Override
public String answer(InvocationOnMock invocation) {
Object[] args = invocation.getArguments();
diff --git beeline/src/test/org/apache/hive/beeline/TestTableOutputFormat.java beeline/src/test/org/apache/hive/beeline/TestTableOutputFormat.java
index 0f557e8bc4..5b8407088f 100644
--- beeline/src/test/org/apache/hive/beeline/TestTableOutputFormat.java
+++ beeline/src/test/org/apache/hive/beeline/TestTableOutputFormat.java
@@ -15,13 +15,14 @@
*/
package org.apache.hive.beeline;
+import static org.mockito.ArgumentMatchers.anyInt;
+
import java.io.PrintStream;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
-import org.mockito.Matchers;
import static org.mockito.Mockito.when;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
@@ -96,7 +97,7 @@ public Boolean answer(final InvocationOnMock invocation) {
}
});
- when(mockResultSet.getObject(Matchers.anyInt())).thenAnswer(new Answer() {
+ when(mockResultSet.getObject(anyInt())).thenAnswer(new Answer() {
@Override
public String answer(final InvocationOnMock invocation) {
Object[] args = invocation.getArguments();
diff --git beeline/src/test/org/apache/hive/beeline/schematool/TestHiveSchemaTool.java beeline/src/test/org/apache/hive/beeline/schematool/TestHiveSchemaTool.java
index 2311b0409f..52b2aca520 100644
--- beeline/src/test/org/apache/hive/beeline/schematool/TestHiveSchemaTool.java
+++ beeline/src/test/org/apache/hive/beeline/schematool/TestHiveSchemaTool.java
@@ -35,11 +35,10 @@
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.eq;
-import static org.mockito.Matchers.same;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Mockito.when;
import static org.powermock.api.mockito.PowerMockito.mockStatic;
-import static org.powermock.api.mockito.PowerMockito.verifyStatic;
@RunWith(PowerMockRunner.class)
@PowerMockIgnore("javax.management.*")
@@ -71,7 +70,6 @@ public void setup() throws IOException {
@After
public void globalAssert() throws IOException {
- verifyStatic();
HiveSchemaHelper.getValidConfVar(eq(MetastoreConf.ConfVars.CONNECT_URL_KEY), same(hiveConf));
HiveSchemaHelper
.getValidConfVar(eq(MetastoreConf.ConfVars.CONNECTION_DRIVER), same(hiveConf));
diff --git cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
index 42e3bc564a..5f219005c9 100644
--- cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
+++ cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hive.cli;
-import static org.mockito.Matchers.anyBoolean;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.anyBoolean;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 7b3acad511..a08dd03a46 100644
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2697,11 +2697,6 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal
"Truststore password when using a client-side certificate with TLS connectivity to ZooKeeper." +
"Overrides any explicit value set via the zookeeper.ssl.trustStore.password " +
"system property (note the camelCase)."),
- HIVE_ZOOKEEPER_KILLQUERY_ENABLE("hive.zookeeper.killquery.enable", true,
- "Whether enabled kill query coordination with zookeeper, " +
- "when hive.server2.support.dynamic.service.discovery is enabled."),
- HIVE_ZOOKEEPER_KILLQUERY_NAMESPACE("hive.zookeeper.killquery.namespace", "killQueries",
- "When kill query coordination is enabled, uses this namespace for registering queries to kill with zookeeper"),
// Transactions
HIVE_TXN_MANAGER("hive.txn.manager",
diff --git common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java
index 9b5748e724..15e74db366 100644
--- common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java
+++ common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java
@@ -22,7 +22,7 @@
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.any;
+import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
diff --git druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java
index 74576975a4..edfcc65460 100644
--- druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java
+++ druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java
@@ -18,8 +18,8 @@
package org.apache.hadoop.hive.druid.serde;
import static org.junit.Assert.assertEquals;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyObject;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyObject;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
diff --git hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/TestMutatorClient.java hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/TestMutatorClient.java
index 91b90ed818..4222786d80 100644
--- hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/TestMutatorClient.java
+++ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/TestMutatorClient.java
@@ -20,7 +20,7 @@
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
-import static org.mockito.Matchers.anyString;
+import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@@ -42,7 +42,7 @@
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class TestMutatorClient {
diff --git hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/TestTransaction.java hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/TestTransaction.java
index c47cf4d7cf..4d30c253c6 100644
--- hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/TestTransaction.java
+++ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/TestTransaction.java
@@ -31,7 +31,7 @@
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class TestTransaction {
diff --git hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/lock/TestHeartbeatTimerTask.java hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/lock/TestHeartbeatTimerTask.java
index 1edec690b0..33e29cf924 100644
--- hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/lock/TestHeartbeatTimerTask.java
+++ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/lock/TestHeartbeatTimerTask.java
@@ -33,7 +33,7 @@
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class TestHeartbeatTimerTask {
diff --git hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/lock/TestLock.java hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/lock/TestLock.java
index 0a46faf90d..156a37ebc7 100644
--- hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/lock/TestLock.java
+++ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/client/lock/TestLock.java
@@ -24,18 +24,18 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyInt;
-import static org.mockito.Matchers.anyLong;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyCollection;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
-import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.verifyNoInteractions;
import static org.mockito.Mockito.when;
import java.net.InetAddress;
-import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
@@ -60,7 +60,7 @@
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
import com.google.common.collect.ImmutableSet;
@@ -99,9 +99,10 @@ public void injectMocks() throws Exception {
when(mockMetaStoreClient.lock(any(LockRequest.class))).thenReturn(mockLockResponse);
when(mockLockResponse.getLockid()).thenReturn(LOCK_ID);
when(mockLockResponse.getState()).thenReturn(ACQUIRED);
- when(
- mockHeartbeatFactory.newInstance(any(IMetaStoreClient.class), any(LockFailureListener.class), any(Long.class),
- any(Collection.class), anyLong(), anyInt())).thenReturn(mockHeartbeat);
+ // Transaction IDs can also be null
+ when(mockHeartbeatFactory.newInstance(
+ any(IMetaStoreClient.class), any(LockFailureListener.class), any(), anyCollection(), any(Long.class), anyInt())
+ ).thenReturn(mockHeartbeat);
readLock = new Lock(mockMetaStoreClient, mockHeartbeatFactory, configuration, mockListener, USER, SOURCES,
Collections. emptySet(), 3, 0);
@@ -138,7 +139,7 @@ public void testAcquireReadLockCheckHeartbeatCreated() throws Exception {
configuration.set("hive.txn.timeout", "100s");
readLock.acquire();
- verify(mockHeartbeatFactory).newInstance(eq(mockMetaStoreClient), eq(mockListener), any(Long.class), eq(SOURCES),
+ verify(mockHeartbeatFactory).newInstance(eq(mockMetaStoreClient), eq(mockListener), any(), eq(SOURCES),
eq(LOCK_ID), eq(75));
}
@@ -321,11 +322,11 @@ public void testHeartbeatFailsTxnAbortedException() throws Exception {
@Test
public void testHeartbeatContinuesTException() throws Exception {
Throwable t = new TException();
- doThrow(t).when(mockMetaStoreClient).heartbeat(0, LOCK_ID);
+ lenient().doThrow(t).when(mockMetaStoreClient).heartbeat(0, LOCK_ID);
HeartbeatTimerTask task = new HeartbeatTimerTask(mockMetaStoreClient, mockListener, TRANSACTION_ID, SOURCES,
LOCK_ID);
task.run();
- verifyZeroInteractions(mockListener);
+ verifyNoInteractions(mockListener);
}
private static Table createTable(String databaseName, String tableName) {
diff --git hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMetaStorePartitionHelper.java hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMetaStorePartitionHelper.java
index 335ecd2af8..4a7d358698 100644
--- hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMetaStorePartitionHelper.java
+++ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMetaStorePartitionHelper.java
@@ -20,7 +20,7 @@
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.verifyNoInteractions;
import static org.mockito.Mockito.when;
import java.io.IOException;
@@ -40,7 +40,7 @@
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class TestMetaStorePartitionHelper {
@@ -95,7 +95,7 @@ public void injectMocks() throws Exception {
public void getPathForUnpartitionedTable() throws Exception {
Path path = helper.getPathForPartition(UNPARTITIONED_VALUES);
assertThat(path, is(TABLE_PATH));
- verifyZeroInteractions(mockClient);
+ verifyNoInteractions(mockClient);
}
@Test
@@ -107,7 +107,7 @@ public void getPathForPartitionedTable() throws Exception {
@Test
public void createOnUnpartitionTableDoesNothing() throws Exception {
helper.createPartitionIfNotExists(UNPARTITIONED_VALUES);
- verifyZeroInteractions(mockClient);
+ verifyNoInteractions(mockClient);
}
@Test
diff --git hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMutatorCoordinator.java hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMutatorCoordinator.java
index fab56b35b9..297e67c40d 100644
--- hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMutatorCoordinator.java
+++ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMutatorCoordinator.java
@@ -17,15 +17,16 @@
*/
package org.apache.hive.hcatalog.streaming.mutate.worker;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyInt;
-import static org.mockito.Matchers.anyList;
-import static org.mockito.Matchers.anyLong;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.anyList;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.util.Arrays;
@@ -41,7 +42,7 @@
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class TestMutatorCoordinator {
@@ -92,7 +93,7 @@ public void createCoordinator() throws Exception {
mockMutator);
when(mockPartitionHelper.getPathForPartition(any(List.class))).thenReturn(PATH_A);
when(mockRecordInspector.extractRecordIdentifier(RECORD)).thenReturn(ROW__ID_INSERT);
- when(mockSequenceValidator.isInSequence(any(RecordIdentifier.class))).thenReturn(true);
+ lenient().when(mockSequenceValidator.isInSequence(any(RecordIdentifier.class))).thenReturn(true);
when(mockGroupingValidator.isInSequence(any(List.class), anyInt())).thenReturn(true);
coordinator = new MutatorCoordinator(configuration, mockMutatorFactory, mockPartitionHelper, mockGroupingValidator,
@@ -247,7 +248,7 @@ public void closeNoRecords() throws Exception {
coordinator.close();
// No mutator created
- verifyZeroInteractions(mockMutator);
+ verifyNoMoreInteractions(mockMutator);
}
@Test
diff --git hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMutatorImpl.java hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMutatorImpl.java
index d2c89e53ad..5219532d3e 100644
--- hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMutatorImpl.java
+++ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/mutate/worker/TestMutatorImpl.java
@@ -19,8 +19,8 @@
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@@ -40,7 +40,7 @@
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class TestMutatorImpl {
diff --git hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/ConcurrentJobRequestsTestBase.java hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/ConcurrentJobRequestsTestBase.java
index 05beccbcce..45ad767cf4 100644
--- hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/ConcurrentJobRequestsTestBase.java
+++ hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/ConcurrentJobRequestsTestBase.java
@@ -21,7 +21,6 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
@@ -37,6 +36,11 @@
import org.mockito.Mockito;
import org.mockito.stubbing.Answer;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+
/*
* Base class for mocking job operations with concurrent requests.
*/
@@ -88,8 +92,8 @@ public JobRunnable ConcurrentJobsStatus(final int threadCount, AppConfig appConf
StatusDelegator delegator = new StatusDelegator(appConfig);
final StatusDelegator mockDelegator = Mockito.spy(delegator);
- Mockito.doAnswer(answer).when(mockDelegator).getJobStatus(Mockito.any(String.class),
- Mockito.any(String.class));
+ doAnswer(answer).when(mockDelegator).getJobStatus(any(String.class),
+ any(String.class));
JobRunnable statusJobRunnable = new JobRunnable() {
@Override
@@ -116,9 +120,9 @@ public JobRunnable ConcurrentListJobs(final int threadCount, AppConfig config,
ListDelegator delegator = new ListDelegator(config);
final ListDelegator mockDelegator = Mockito.spy(delegator);
- Mockito.doAnswer(answer).when(mockDelegator).listJobs(Mockito.any(String.class),
- Mockito.any(boolean.class), Mockito.any(String.class),
- Mockito.any(int.class), Mockito.any(boolean.class));
+ doAnswer(answer).when(mockDelegator).listJobs(any(String.class),
+ any(boolean.class), any(String.class),
+ any(int.class), any(boolean.class));
JobRunnable listJobRunnable = new JobRunnable() {
@Override
@@ -149,18 +153,19 @@ public JobRunnable SubmitConcurrentJobs(final int threadCount, AppConfig config,
TempletonControllerJob mockCtrl = Mockito.mock(TempletonControllerJob.class);
- Mockito.doReturn(jobIdResponse).when(mockCtrl).getSubmittedId();
+ doReturn(jobIdResponse).when(mockCtrl).getSubmittedId();
- Mockito.doReturn(mockCtrl).when(mockDelegator).getTempletonController();
+ doReturn(mockCtrl).when(mockDelegator).getTempletonController();
- Mockito.doAnswer(responseAnswer).when(mockDelegator).runTempletonControllerJob(
- Mockito.any(TempletonControllerJob.class), Mockito.any(List.class));
+ doAnswer(responseAnswer).when(mockDelegator).runTempletonControllerJob(
+ any(TempletonControllerJob.class), any(List.class));
- Mockito.doAnswer(timeoutResponseAnswer).when(mockDelegator).killJob(
- Mockito.any(String.class), Mockito.any(String.class));
+ doAnswer(timeoutResponseAnswer).when(mockDelegator).killJob(
+ any(String.class), any(String.class));
- Mockito.doNothing().when(mockDelegator).registerJob(Mockito.any(String.class),
- Mockito.any(String.class), Mockito.any(String.class), Mockito.any(Map.class));
+ // UserArgs Map can be null - thus use any()
+ doNothing().when(mockDelegator).registerJob(any(String.class),
+ any(String.class), any(String.class), any());
JobRunnable submitJobRunnable = new JobRunnable() {
@Override
diff --git hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestConcurrentJobRequestsThreads.java hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestConcurrentJobRequestsThreads.java
index 12a211af3f..4865d1f0d0 100644
--- hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestConcurrentJobRequestsThreads.java
+++ hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestConcurrentJobRequestsThreads.java
@@ -18,10 +18,7 @@
*/
package org.apache.hive.hcatalog.templeton;
-import java.io.IOException;
import java.util.ArrayList;
-import java.util.concurrent.TimeoutException;
-import org.eclipse.jetty.http.HttpStatus;
import org.junit.BeforeClass;
import org.junit.Rule;
diff --git hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestConcurrentJobRequestsThreadsAndTimeout.java hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestConcurrentJobRequestsThreadsAndTimeout.java
index 19d87b6126..d351a98a4c 100644
--- hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestConcurrentJobRequestsThreadsAndTimeout.java
+++ hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestConcurrentJobRequestsThreadsAndTimeout.java
@@ -21,7 +21,6 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.concurrent.TimeoutException;
-import org.eclipse.jetty.http.HttpStatus;
import org.junit.BeforeClass;
import org.junit.Rule;
diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java
index 4a64927bad..692d40d289 100644
--- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java
+++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java
@@ -104,12 +104,12 @@ public void testAcidTablesReplLoadBootstrapIncr() throws Throwable {
ScheduledQueryExecutionService.startScheduledQueryExecutorService(primary.hiveConf)) {
int next = 0;
ReplDumpWork.injectNextDumpDirForTest(String.valueOf(next));
- primary.run("create scheduled query s1_t1 every 10 minutes as repl dump " + primaryDbName);
- primary.run("alter scheduled query s1_t1 execute");
+ primary.run("create scheduled query s1 every 10 minutes as repl dump " + primaryDbName);
+ primary.run("alter scheduled query s1 execute");
Thread.sleep(6000);
- replica.run("create scheduled query s2_t1 every 10 minutes as repl load " + primaryDbName + " INTO "
+ replica.run("create scheduled query s2 every 10 minutes as repl load " + primaryDbName + " INTO "
+ replicatedDbName);
- replica.run("alter scheduled query s2_t1 execute");
+ replica.run("alter scheduled query s2 execute");
Thread.sleep(20000);
replica.run("use " + replicatedDbName)
.run("show tables like 't1'")
@@ -123,9 +123,9 @@ public void testAcidTablesReplLoadBootstrapIncr() throws Throwable {
.run("insert into t1 values(4)");
next++;
ReplDumpWork.injectNextDumpDirForTest(String.valueOf(next));
- primary.run("alter scheduled query s1_t1 execute");
+ primary.run("alter scheduled query s1 execute");
Thread.sleep(20000);
- replica.run("alter scheduled query s2_t1 execute");
+ replica.run("alter scheduled query s2 execute");
Thread.sleep(20000);
replica.run("use " + replicatedDbName)
.run("show tables like 't1'")
@@ -139,9 +139,9 @@ public void testAcidTablesReplLoadBootstrapIncr() throws Throwable {
.run("insert into t1 values(6)");
next++;
ReplDumpWork.injectNextDumpDirForTest(String.valueOf(next));
- primary.run("alter scheduled query s1_t1 execute");
+ primary.run("alter scheduled query s1 execute");
Thread.sleep(30000);
- replica.run("alter scheduled query s2_t1 execute");
+ replica.run("alter scheduled query s2 execute");
Thread.sleep(30000);
replica.run("use " + replicatedDbName)
.run("show tables like 't1'")
@@ -152,8 +152,8 @@ public void testAcidTablesReplLoadBootstrapIncr() throws Throwable {
} finally {
- primary.run("drop scheduled query s1_t1");
- replica.run("drop scheduled query s2_t1");
+ primary.run("drop scheduled query s1");
+ replica.run("drop scheduled query s2");
}
}
@@ -163,46 +163,46 @@ public void testExternalTablesReplLoadBootstrapIncr() throws Throwable {
String withClause = " WITH('" + HiveConf.ConfVars.REPL_EXTERNAL_TABLE_BASE_DIR.varname
+ "'='/replica_external_base')";
primary.run("use " + primaryDbName)
- .run("create external table t2 (id int)")
- .run("insert into t2 values(1)")
- .run("insert into t2 values(2)");
+ .run("create external table t1 (id int)")
+ .run("insert into t1 values(1)")
+ .run("insert into t1 values(2)");
try (ScheduledQueryExecutionService schqS =
ScheduledQueryExecutionService.startScheduledQueryExecutorService(primary.hiveConf)) {
int next = 0;
ReplDumpWork.injectNextDumpDirForTest(String.valueOf(next));
- primary.run("create scheduled query s1_t2 every 10 minutes as repl dump " + primaryDbName + withClause);
- primary.run("alter scheduled query s1_t2 execute");
+ primary.run("create scheduled query s1 every 10 minutes as repl dump " + primaryDbName + withClause);
+ primary.run("alter scheduled query s1 execute");
Thread.sleep(80000);
- replica.run("create scheduled query s2_t2 every 10 minutes as repl load " + primaryDbName + " INTO "
+ replica.run("create scheduled query s2 every 10 minutes as repl load " + primaryDbName + " INTO "
+ replicatedDbName);
- replica.run("alter scheduled query s2_t2 execute");
+ replica.run("alter scheduled query s2 execute");
Thread.sleep(80000);
replica.run("use " + replicatedDbName)
- .run("show tables like 't2'")
- .verifyResult("t2")
- .run("select id from t2 order by id")
+ .run("show tables like 't1'")
+ .verifyResult("t1")
+ .run("select id from t1 order by id")
.verifyResults(new String[]{"1", "2"});
// First incremental, after bootstrap
primary.run("use " + primaryDbName)
- .run("insert into t2 values(3)")
- .run("insert into t2 values(4)");
+ .run("insert into t1 values(3)")
+ .run("insert into t1 values(4)");
next++;
ReplDumpWork.injectNextDumpDirForTest(String.valueOf(next));
- primary.run("alter scheduled query s1_t2 execute");
+ primary.run("alter scheduled query s1 execute");
Thread.sleep(80000);
- replica.run("alter scheduled query s2_t2 execute");
+ replica.run("alter scheduled query s2 execute");
Thread.sleep(80000);
replica.run("use " + replicatedDbName)
- .run("show tables like 't2'")
- .verifyResult("t2")
- .run("select id from t2 order by id")
+ .run("show tables like 't1'")
+ .verifyResult("t1")
+ .run("select id from t1 order by id")
.verifyResults(new String[]{"1", "2", "3", "4"});
} finally {
- primary.run("drop scheduled query s1_t2");
- replica.run("drop scheduled query s2_t2");
+ primary.run("drop scheduled query s1");
+ replica.run("drop scheduled query s2");
}
}
}
diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
index 37bb6aded3..0db6eb74e8 100644
--- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
+++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
@@ -23,7 +23,7 @@
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
-import static org.mockito.Matchers.any;
+import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
index cf120ea1f2..36ac85b730 100644
--- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
+++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
@@ -19,7 +19,7 @@
package org.apache.hadoop.hive.ql.security.authorization.plugin;
import static org.junit.Assert.assertEquals;
-import static org.mockito.Matchers.any;
+import static org.mockito.ArgumentMatchers.any;
import java.util.ArrayList;
import java.util.Arrays;
@@ -71,8 +71,11 @@
* HiveAuthorizer.filterListCmdObjects, and stores the list argument in
* filterArguments
*/
- protected static class MockedHiveAuthorizerFactory implements HiveAuthorizerFactory {
- protected abstract class AuthorizerWithFilterCmdImpl implements HiveAuthorizer {
+ public static class MockedHiveAuthorizerFactory implements HiveAuthorizerFactory {
+ /**
+ * Abstracts HiveAuthorizer interface for hive authorization plugins
+ */
+ public abstract class AuthorizerWithFilterCmdImpl implements HiveAuthorizer {
@Override
public List filterListCmdObjects(List listObjs,
HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java
index 45b22f9514..3973ec9270 100644
--- itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java
+++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java
@@ -40,7 +40,6 @@
import java.util.UUID;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.ddl.process.kill.KillQueriesOperation;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
@@ -785,8 +784,6 @@ public void run() {
con2.close();
assertNotNull("tExecute", tExecuteHolder.throwable);
- assertEquals(HiveStatement.QUERY_CANCELLED_MESSAGE + " "+ KillQueriesOperation.KILL_QUERY_MESSAGE,
- tExecuteHolder.throwable.getMessage());
assertNull("tCancel", tKillHolder.throwable);
}
diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java
index 1aab03d08f..68a515ccbe 100644
--- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java
+++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java
@@ -31,7 +31,6 @@
import org.apache.hadoop.hive.llap.FieldDesc;
import org.apache.hadoop.hive.llap.LlapBaseInputFormat;
import org.apache.hadoop.hive.llap.Row;
-import org.apache.hadoop.hive.ql.ddl.process.kill.KillQueriesOperation;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
@@ -413,8 +412,6 @@ public void testKillQueryById() throws Exception {
testKillQueryInternal(System.getProperty("user.name"), System.getProperty("user.name"), false,
tExecuteHolder, tKillHolder);
assertNotNull("tExecute", tExecuteHolder.throwable);
- assertEquals(HiveStatement.QUERY_CANCELLED_MESSAGE + " "+ KillQueriesOperation.KILL_QUERY_MESSAGE,
- tExecuteHolder.throwable.getMessage());
assertNull("tCancel", tKillHolder.throwable);
}
@@ -434,8 +431,6 @@ public void testKillQueryByTagAdmin() throws Exception {
ExceptionHolder tKillHolder = new ExceptionHolder();
testKillQueryInternal("user1", System.getProperty("user.name"), true, tExecuteHolder, tKillHolder);
assertNotNull("tExecute", tExecuteHolder.throwable);
- assertEquals(HiveStatement.QUERY_CANCELLED_MESSAGE + " "+ KillQueriesOperation.KILL_QUERY_MESSAGE,
- tExecuteHolder.throwable.getMessage());
assertNull("tCancel", tKillHolder.throwable);
}
@@ -445,8 +440,6 @@ public void testKillQueryByTagOwner() throws Exception {
ExceptionHolder tKillHolder = new ExceptionHolder();
testKillQueryInternal("user1", "user1", true, tExecuteHolder, tKillHolder);
assertNotNull("tExecute", tExecuteHolder.throwable);
- assertEquals(HiveStatement.QUERY_CANCELLED_MESSAGE + " "+ KillQueriesOperation.KILL_QUERY_MESSAGE,
- tExecuteHolder.throwable.getMessage());
assertNull("tCancel", tKillHolder.throwable);
}
diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithServiceDiscovery.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithServiceDiscovery.java
deleted file mode 100644
index 1621e7e52c..0000000000
--- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithServiceDiscovery.java
+++ /dev/null
@@ -1,326 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.jdbc;
-
-import org.apache.curator.test.TestingServer;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ddl.process.kill.KillQueriesOperation;
-import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.registry.impl.ZkRegistryBase;
-import org.apache.hive.jdbc.miniHS2.MiniHS2;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.net.URL;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
-/**
- * Test JDBC driver when two HS2 instance is running with service discovery enabled.
- */
-public class TestJdbcWithServiceDiscovery {
-
- private static final Logger LOG = LoggerFactory.getLogger(TestJdbcWithServiceDiscovery.class);
- private static final String TABLE_NAME = "testJdbcMinihs2Tbl";
- private static final String DB_NAME = "testJdbcMinihs2";
- private static final String REMOTE_ERROR_MESSAGE = "Unable to kill query locally or on remote servers.";
-
- private static TestingServer zkServer;
- private static MiniHS2 miniHS2server1;
- private static MiniHS2 miniHS2server2;
- private static String miniHS2directUrl1;
- private static String miniHS2directUrl2;
- private static Path kvDataFilePath;
-
- @BeforeClass
- public static void setup() throws Exception {
- MiniHS2.cleanupLocalDir();
- zkServer = new TestingServer();
-
- // Create one MiniHS2 with Tez and one with Local FS only
- HiveConf hiveConf1 = getTezConf();
- HiveConf hiveConf2 = new HiveConf();
-
- setSDConfigs(hiveConf1);
- setSDConfigs(hiveConf2);
-
- miniHS2server1 = new MiniHS2.Builder().withConf(hiveConf1).withMiniTez().build();
- miniHS2server2 = new MiniHS2.Builder().withConf(hiveConf2).cleanupLocalDirOnStartup(false).build();
-
- Class.forName(MiniHS2.getJdbcDriverName());
- String instanceId1 = UUID.randomUUID().toString();
- miniHS2server1.start(getConfOverlay(instanceId1));
- miniHS2directUrl1 =
- "jdbc:hive2://" + miniHS2server1.getHost() + ":" + miniHS2server1.getBinaryPort() + "/" + DB_NAME;
- String instanceId2 = UUID.randomUUID().toString();
- miniHS2server2.start(getConfOverlay(instanceId2));
- miniHS2directUrl2 =
- "jdbc:hive2://" + miniHS2server2.getHost() + ":" + miniHS2server2.getBinaryPort() + "/" + DB_NAME;
-
- String dataFileDir = hiveConf1.get("test.data.files").replace('\\', '/').replace("c:", "");
- kvDataFilePath = new Path(dataFileDir, "kv1.txt");
-
- setupDb();
- }
-
- /**
- * SleepMsUDF.
- */
- public static class SleepMsUDF extends UDF {
- public Integer evaluate(int value, int ms) {
- try {
- Thread.sleep(ms);
- } catch (InterruptedException e) {
- // No-op
- }
- return value;
- }
- }
-
- public static void setupDb() throws Exception {
- Connection conDefault = DriverManager
- .getConnection("jdbc:hive2://" + miniHS2server1.getHost() + ":" + miniHS2server1.getBinaryPort() + "/default",
- System.getProperty("user.name"), "bar");
- Statement stmt = conDefault.createStatement();
- String tblName = DB_NAME + "." + TABLE_NAME;
- stmt.execute("drop database if exists " + DB_NAME + " cascade");
- stmt.execute("create database " + DB_NAME);
- stmt.execute("use " + DB_NAME);
- stmt.execute("create table " + tblName + " (int_col int, value string) ");
- stmt.execute("load data local inpath '" + kvDataFilePath.toString() + "' into table " + tblName);
- stmt.execute("grant select on table " + tblName + " to role public");
-
- stmt.close();
- conDefault.close();
- }
-
- @AfterClass
- public static void afterTest() throws Exception {
- if ((miniHS2server1 != null) && miniHS2server1.isStarted()) {
- try {
- miniHS2server1.stop();
- } catch (Exception e) {
- LOG.warn("Error why shutting down Hs2", e);
- }
- }
- if ((miniHS2server2 != null) && miniHS2server2.isStarted()) {
- try {
- miniHS2server2.stop();
- } catch (Exception e) {
- LOG.warn("Error why shutting down Hs2", e);
- }
- }
- if (zkServer != null) {
- zkServer.close();
- zkServer = null;
- }
- MiniHS2.cleanupLocalDir();
- }
-
- private static HiveConf getTezConf() throws Exception {
- String confDir = "../../data/conf/tez/";
- HiveConf.setHiveSiteLocation(new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml"));
- System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation());
- HiveConf defaultConf = new HiveConf();
- defaultConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
- defaultConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
- defaultConf.addResource(new URL("file://" + new File(confDir).toURI().getPath() + "/tez-site.xml"));
- return defaultConf;
- }
-
- private static void setSDConfigs(HiveConf conf) {
- conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
- conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY, true);
- conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM, zkServer.getConnectString());
- conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_ENABLE, false);
- conf.setTimeVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CONNECTION_TIMEOUT, 2, TimeUnit.SECONDS);
- conf.setTimeVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CONNECTION_BASESLEEPTIME, 100, TimeUnit.MILLISECONDS);
- conf.setIntVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CONNECTION_MAX_RETRIES, 1);
- conf.setBoolVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_KILLQUERY_ENABLE, true);
- }
-
- private static Map getConfOverlay(final String instanceId) {
- Map confOverlay = new HashMap<>();
- confOverlay.put("hive.server2.zookeeper.publish.configs", "true");
- confOverlay.put(ZkRegistryBase.UNIQUE_IDENTIFIER, instanceId);
- return confOverlay;
- }
-
- private static class ExceptionHolder {
- Throwable throwable;
- }
-
- private void executeQueryAndKill(Connection con1, Connection con2, ExceptionHolder tExecuteHolder,
- ExceptionHolder tKillHolder) throws SQLException, InterruptedException {
- final HiveStatement stmt = (HiveStatement) con1.createStatement();
- final Statement stmt2 = con2.createStatement();
- final StringBuffer stmtQueryId = new StringBuffer();
-
- // Thread executing the query
- Thread tExecute = new Thread(() -> {
- try {
- LOG.info("Executing waiting query.");
- // The test table has 500 rows, so total query time should be ~ 500*500ms
- stmt.executeAsync(
- "select sleepMsUDF(t1.int_col, 10), t1.int_col, t2.int_col " + "from " + TABLE_NAME + " t1 join "
- + TABLE_NAME + " t2 on t1.int_col = t2.int_col");
- stmtQueryId.append(stmt.getQueryId());
- stmt.getUpdateCount();
- } catch (SQLException e) {
- tExecuteHolder.throwable = e;
- }
- });
-
- tExecute.start();
-
- // wait for other thread to create the stmt handle
- int count = 0;
- while (count < 10) {
- try {
- Thread.sleep(2000);
- String queryId;
- if (stmtQueryId.length() != 0) {
- queryId = stmtQueryId.toString();
- } else {
- count++;
- continue;
- }
-
- LOG.info("Killing query: " + queryId);
- stmt2.execute("kill query '" + queryId + "'");
- stmt2.close();
- break;
- } catch (SQLException e) {
- LOG.warn("Exception when kill query", e);
- tKillHolder.throwable = e;
- break;
- }
- }
-
- tExecute.join();
- try {
- stmt.close();
- con1.close();
- con2.close();
- } catch (Exception e) {
- LOG.warn("Exception when close stmt and con", e);
- }
- }
-
- @Test
- public void testKillQueryWithSameServer() throws Exception {
- Connection con1 = DriverManager.getConnection(miniHS2directUrl1, System.getProperty("user.name"), "bar");
- Connection con2 = DriverManager.getConnection(miniHS2directUrl1, System.getProperty("user.name"), "bar");
-
- Statement stmt = con1.createStatement();
- stmt.execute("create temporary function sleepMsUDF as '" + SleepMsUDF.class.getName() + "'");
- stmt.close();
-
- ExceptionHolder tExecuteHolder = new ExceptionHolder();
- ExceptionHolder tKillHolder = new ExceptionHolder();
-
- executeQueryAndKill(con1, con2, tExecuteHolder, tKillHolder);
-
- assertNotNull("tExecute", tExecuteHolder.throwable);
- assertEquals("Query was cancelled. User invoked KILL QUERY", tExecuteHolder.throwable.getMessage());
- assertNull("tCancel", tKillHolder.throwable);
- }
-
- @Test
- public void testKillQueryWithDifferentServer() throws Exception {
- Connection con1 = DriverManager.getConnection(miniHS2directUrl1, System.getProperty("user.name"), "bar");
- Connection con2 = DriverManager.getConnection(miniHS2directUrl2, System.getProperty("user.name"), "bar");
-
- Statement stmt = con1.createStatement();
- stmt.execute("create temporary function sleepMsUDF as '" + SleepMsUDF.class.getName() + "'");
- stmt.close();
-
- ExceptionHolder tExecuteHolder = new ExceptionHolder();
- ExceptionHolder tKillHolder = new ExceptionHolder();
-
- executeQueryAndKill(con1, con2, tExecuteHolder, tKillHolder);
-
- assertNotNull("tExecute", tExecuteHolder.throwable);
- assertEquals(HiveStatement.QUERY_CANCELLED_MESSAGE + " " + KillQueriesOperation.KILL_QUERY_MESSAGE,
- tExecuteHolder.throwable.getMessage());
- assertNull("tCancel", tKillHolder.throwable);
- }
-
- @Test
- public void testKillQueryWithDifferentServerZKTurnedOff() throws Exception {
- Connection con1 = DriverManager.getConnection(miniHS2directUrl1, System.getProperty("user.name"), "bar");
- Connection con2 = DriverManager.getConnection(miniHS2directUrl2, System.getProperty("user.name"), "bar");
-
- Statement stmt = con1.createStatement();
- stmt.execute("create temporary function sleepMsUDF as '" + SleepMsUDF.class.getName() + "'");
- stmt.close();
-
- stmt = con2.createStatement();
- stmt.execute("set hive.zookeeper.killquery.enable = false");
- stmt.close();
-
- ExceptionHolder tExecuteHolder = new ExceptionHolder();
- ExceptionHolder tKillHolder = new ExceptionHolder();
-
- executeQueryAndKill(con1, con2, tExecuteHolder, tKillHolder);
-
- assertNull("tExecute", tExecuteHolder.throwable);
- assertNull("tCancel", tKillHolder.throwable);
- }
-
- @Test
- public void testKillQueryWithRandomId() throws Exception {
- Connection con1 = DriverManager.getConnection(miniHS2directUrl1, System.getProperty("user.name"), "bar");
- ExceptionHolder tKillHolder = new ExceptionHolder();
-
- Statement stmt = con1.createStatement();
- String queryId = "randomId123";
- try {
- LOG.info("Killing query: " + queryId);
- stmt.execute("kill query '" + queryId + "'");
- stmt.close();
- } catch (SQLException e) {
- LOG.warn("Exception when kill query", e);
- tKillHolder.throwable = e;
- }
- try {
- con1.close();
- } catch (Exception e) {
- LOG.warn("Exception when close stmt and con", e);
- }
-
- assertNotNull("tCancel", tKillHolder.throwable);
- assertTrue(tKillHolder.throwable.getMessage(), tKillHolder.throwable.getMessage().contains(REMOTE_ERROR_MESSAGE));
- }
-}
diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java
index 4b1a101124..977fe43320 100644
--- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java
+++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java
@@ -18,37 +18,33 @@
package org.apache.hive.jdbc.authorization;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.verify;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.HashMap;
-
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
import org.apache.hive.jdbc.miniHS2.MiniHS2;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
-import org.mockito.Matchers;
import org.mockito.Mockito;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.Statement;
+import java.util.HashMap;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyList;
+import static org.mockito.Mockito.verify;
/**
* Test context information that gets passed to authorization api
*/
@@ -103,8 +99,7 @@ public void testAuthzContextContentsCmdProcessorCmd() throws Exception {
verifyContextContents("dfs -ls /", "-ls /");
}
- private void verifyContextContents(final String cmd, String ctxCmd) throws Exception,
- HiveAuthzPluginException, HiveAccessControlException {
+ private void verifyContextContents(final String cmd, String ctxCmd) throws Exception {
Connection hs2Conn = getConnection("user1");
Statement stmt = hs2Conn.createStatement();
@@ -116,8 +111,7 @@ private void verifyContextContents(final String cmd, String ctxCmd) throws Excep
.forClass(HiveAuthzContext.class);
verify(mockedAuthorizer).checkPrivileges(any(HiveOperationType.class),
- Matchers.anyListOf(HivePrivilegeObject.class),
- Matchers.anyListOf(HivePrivilegeObject.class), contextCapturer.capture());
+ anyList(), any(), contextCapturer.capture());
HiveAuthzContext context = contextCapturer.getValue();
diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestHiveSessionImpl.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestHiveSessionImpl.java
index 1ee3a501eb..a5784d4188 100644
--- itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestHiveSessionImpl.java
+++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestHiveSessionImpl.java
@@ -18,6 +18,9 @@
package org.apache.hive.service.cli.session;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.ArgumentMatchers.same;
+
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationHandle;
@@ -27,7 +30,6 @@
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
-import static org.mockito.Matchers.*;
import java.util.Arrays;
import java.util.HashMap;
diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestMiniHS2StateWithNoZookeeper.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestMiniHS2StateWithNoZookeeper.java
index 0df3058359..99e681e5b2 100644
--- itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestMiniHS2StateWithNoZookeeper.java
+++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestMiniHS2StateWithNoZookeeper.java
@@ -50,14 +50,11 @@
private static HiveConf hiveConf = null;
@BeforeClass
- public static void beforeTest() throws Exception {
- MiniHS2.cleanupLocalDir();
+ public static void beforeTest() throws Exception {
hiveConf = new HiveConf();
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY, true);
hiveConf.setIntVar(ConfVars.HIVE_ZOOKEEPER_CONNECTION_MAX_RETRIES, 0);
hiveConf.setTimeVar(ConfVars.HIVE_ZOOKEEPER_CONNECTION_BASESLEEPTIME, 0, TimeUnit.MILLISECONDS);
- // Disable killquery, this way only HS2 start will fail, not the SessionManager service
- hiveConf.setBoolVar(ConfVars.HIVE_ZOOKEEPER_KILLQUERY_ENABLE, false);
miniHS2 = new MiniHS2(hiveConf);
Map confOverlay = new HashMap();
try {
diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
index 901286775d..c073ace081 100644
--- itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
+++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
@@ -20,7 +20,7 @@
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.any;
+import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
import java.io.IOException;
@@ -45,7 +45,6 @@
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
import org.apache.hive.jdbc.HttpBasicAuthInterceptor;
import org.apache.hive.service.auth.HiveAuthConstants;
-import org.apache.hive.service.auth.HiveAuthConstants.AuthTypes;
import org.apache.hive.service.rpc.thrift.TCLIService;
import org.apache.hive.service.rpc.thrift.TExecuteStatementReq;
import org.apache.hive.service.rpc.thrift.TOpenSessionReq;
diff --git itests/hive-unit/src/test/java/org/apache/hive/service/server/TestKillQueryZookeeperManager.java itests/hive-unit/src/test/java/org/apache/hive/service/server/TestKillQueryZookeeperManager.java
deleted file mode 100644
index d9997a9c49..0000000000
--- itests/hive-unit/src/test/java/org/apache/hive/service/server/TestKillQueryZookeeperManager.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.server;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.RetryOneTime;
-import org.apache.curator.test.TestingServer;
-import org.apache.curator.utils.CloseableUtils;
-import org.apache.zookeeper.KeeperException;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Tests for {@link KillQueryZookeeperManager}.
- */
-public class TestKillQueryZookeeperManager {
-
- private static final Logger LOG = LoggerFactory.getLogger(TestKillQueryZookeeperManager.class);
- private static final String BARRIER_ROOT_PATH = "/killqueries";
- private static final String QUERYID = "QUERY1";
- private static final String SERVER1 = "localhost:1234";
- private static final String SERVER2 = "localhost:1235";
- private static final String USER = "user";
- private static final int TIMEOUT = 1000;
-
- TestingServer server;
-
- @Before
- public void setupZookeeper() throws Exception {
- server = new TestingServer();
- }
-
- @After
- public void shutdown() {
- if (server != null) {
- CloseableUtils.closeQuietly(server);
- }
- }
-
- private CuratorFramework getClient() {
- return CuratorFrameworkFactory.builder().connectString(server.getConnectString()).sessionTimeoutMs(TIMEOUT * 100)
- .connectionTimeoutMs(TIMEOUT).retryPolicy(new RetryOneTime(1)).build();
- }
-
- @Test
- public void testBarrierServerCrash() throws Exception {
- try (CuratorFramework client = getClient()) {
- client.start();
- client.create().creatingParentContainersIfNeeded().forPath(BARRIER_ROOT_PATH);
- final KillQueryZookeeperManager.KillQueryZookeeperBarrier barrier =
- new KillQueryZookeeperManager.KillQueryZookeeperBarrier(client, BARRIER_ROOT_PATH);
- barrier.setBarrier(QUERYID, SERVER1, USER, true);
-
- final ExecutorService service = Executors.newSingleThreadExecutor();
- Future