diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
index 901a4ede9a..eeb81dad25 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
@@ -1542,8 +1542,8 @@ public void testFailureRangerReplication() throws Throwable {
}
/*
-Can't test complete replication as mini ranger is not supported
-Testing just the configs and no impact on existing replication
+ Can't test complete replication as mini ranger is not supported
+ Testing just the configs and no impact on existing replication
*/
@Test
public void testFailureUnsupportedAuthorizerReplication() throws Throwable {
diff --git a/ql/pom.xml b/ql/pom.xml
index d6dc7cedf8..9bf7b902b9 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -821,8 +821,12 @@
jersey-multipart${jersey.version}
+
+ com.google.code.findbugs
+ findbugs-annotations
+ 3.0.1
+
-
sources
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java
index f9d3de7531..5b3ea90956 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java
@@ -30,6 +30,8 @@
import org.apache.hadoop.hive.ql.exec.repl.ranger.NoOpRangerRestClient;
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerRestClientImpl;
import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
+import org.apache.hadoop.hive.ql.parse.repl.dump.log.RangerDumpLogger;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -52,6 +54,8 @@
private transient RangerRestClient rangerRestClient;
+ private transient ReplLogger replLogger;
+
public RangerDumpTask() {
super();
}
@@ -83,6 +87,8 @@ public int execute() {
+ "Please pass a valid config hive.repl.authorization.provider.service.endpoint");
}
String rangerHiveServiceName = conf.getVar(REPL_RANGER_SERVICE_NAME);
+ replLogger = new RangerDumpLogger(work.getDbName(), work.getCurrentDumpPath().toString());
+ replLogger.startLog();
RangerExportPolicyList rangerExportPolicyList = rangerRestClient.exportRangerPolicies(rangerEndpoint,
work.getDbName(), rangerHiveServiceName);
List rangerPolicies = rangerExportPolicyList.getPolicies();
@@ -101,6 +107,7 @@ public int execute() {
exportCount = rangerExportPolicyList.getListSize();
}
}
+ replLogger.endLog(exportCount);
LOG.debug("Ranger policy export filePath:" + filePath);
LOG.info("Number of ranger policies exported {}", exportCount);
return 0;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java
index 5497d28eff..a695cd23c8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java
@@ -30,6 +30,8 @@
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerPolicy;
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerExportPolicyList;
import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
+import org.apache.hadoop.hive.ql.parse.repl.load.log.RangerLoadLogger;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -53,6 +55,8 @@
private transient RangerRestClient rangerRestClient;
+ private transient ReplLogger replLogger;
+
public RangerLoadTask() {
super();
}
@@ -87,6 +91,10 @@ public int execute() {
LOG.info("Importing Ranger Metadata from {} ", work.getCurrentDumpPath());
rangerExportPolicyList = rangerRestClient.readRangerPoliciesFromJsonFile(new Path(work.getCurrentDumpPath(),
ReplUtils.HIVE_RANGER_POLICIES_FILE_NAME), conf);
+ int expectedPolicyCount = rangerExportPolicyList == null ? 0 : rangerExportPolicyList.getListSize();
+ replLogger = new RangerLoadLogger(work.getSourceDbName(), work.getTargetDbName(),
+ work.getCurrentDumpPath().toString(), expectedPolicyCount);
+ replLogger.startLog();
if (rangerExportPolicyList != null && !CollectionUtils.isEmpty(rangerExportPolicyList.getPolicies())) {
rangerPolicies = rangerExportPolicyList.getPolicies();
}
@@ -108,6 +116,7 @@ public int execute() {
conf.getVar(REPL_RANGER_SERVICE_NAME));
LOG.info("Number of ranger policies imported {}", rangerExportPolicyList.getListSize());
importCount = rangerExportPolicyList.getListSize();
+ replLogger.endLog(importCount);
LOG.info("Ranger policy import finished {} ", importCount);
}
return 0;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/NoOpRangerRestClient.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/NoOpRangerRestClient.java
index 4e3fa61c42..b0fdff434f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/NoOpRangerRestClient.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/NoOpRangerRestClient.java
@@ -61,7 +61,7 @@ public Path saveRangerPoliciesToFile(RangerExportPolicyList rangerExportPolicyLi
@Override
public RangerExportPolicyList readRangerPoliciesFromJsonFile(Path filePath, HiveConf conf) throws SemanticException {
- return null;
+ return new RangerExportPolicyList();
}
@Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplLogger.java
index 645b2c67e8..449b882480 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplLogger.java
@@ -19,13 +19,19 @@
import org.apache.hadoop.hive.metastore.TableType;
-public abstract class ReplLogger {
+/**
+ * ReplLogger.
+ *
+ * Logger class for Repl Events.
+ **/
+public abstract class ReplLogger {
public ReplLogger() {
}
public abstract void startLog();
- public abstract void endLog(String lastReplId);
+
+ public abstract void endLog(T lastReplId);
public void tableLog(String tableName, TableType tableType) {
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java
index 6429866d34..e44115317b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java
@@ -23,6 +23,11 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+/**
+ * ReplState.
+ *
+ * Logger class for Repl Events.
+ **/
public abstract class ReplState {
@JsonIgnoreProperties
private static final Logger REPL_LOG = LoggerFactory.getLogger("ReplState");
@@ -41,9 +46,13 @@
TABLE_DUMP,
FUNCTION_DUMP,
EVENT_DUMP,
+ RANGER_DUMP_START,
+ RANGER_DUMP_END,
TABLE_LOAD,
FUNCTION_LOAD,
EVENT_LOAD,
+ RANGER_LOAD_START,
+ RANGER_LOAD_END,
END
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/BootstrapDumpLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/BootstrapDumpLogger.java
index 198b94fd9b..e0b262950c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/BootstrapDumpLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/BootstrapDumpLogger.java
@@ -25,7 +25,7 @@
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
-public class BootstrapDumpLogger extends ReplLogger {
+public class BootstrapDumpLogger extends ReplLogger {
private String dbName;
private String dumpDir;
private long estimatedNumTables;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/IncrementalDumpLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/IncrementalDumpLogger.java
index f5c0837510..4d4278828c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/IncrementalDumpLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/IncrementalDumpLogger.java
@@ -23,7 +23,7 @@
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
-public class IncrementalDumpLogger extends ReplLogger {
+public class IncrementalDumpLogger extends ReplLogger {
private String dbName;
private String dumpDir;
private long estimatedNumEvents;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/RangerDumpLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/RangerDumpLogger.java
new file mode 100644
index 0000000000..1f48645a3f
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/RangerDumpLogger.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.dump.log;
+
+import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
+import org.apache.hadoop.hive.ql.parse.repl.dump.log.state.RangerDumpBegin;
+import org.apache.hadoop.hive.ql.parse.repl.dump.log.state.RangerDumpEnd;
+
+/**
+ * RangerDumpLogger.
+ *
+ * Repllogger for Ranger Dump.
+ **/
+public class RangerDumpLogger extends ReplLogger {
+ private String dbName;
+ private String dumpDir;
+
+ public RangerDumpLogger(String dbName, String dumpDir) {
+ this.dbName = dbName;
+ this.dumpDir = dumpDir;
+ }
+
+ @Override
+ public void startLog() {
+ new RangerDumpBegin(dbName).log(LogTag.RANGER_DUMP_START);
+ }
+
+ @Override
+ public void endLog(Long count) {
+ new RangerDumpEnd(dbName, count, dumpDir).log(LogTag.RANGER_DUMP_END);
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpBegin.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpBegin.java
new file mode 100644
index 0000000000..114de86837
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpBegin.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.dump.log.state;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState;
+import org.codehaus.jackson.annotate.JsonProperty;
+
+/**
+ * RangerDumpBegin.
+ *
+ * ReplState to define Ranger Dump Start.
+ **/
+public class RangerDumpBegin extends ReplState {
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String dbName;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long dumpStartTime;
+
+ public RangerDumpBegin(String dbName) {
+ this.dbName = dbName;
+ this.dumpStartTime = System.currentTimeMillis() / 1000;
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpEnd.java
new file mode 100644
index 0000000000..8e4dbc7bb8
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpEnd.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.dump.log.state;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState;
+import org.codehaus.jackson.annotate.JsonProperty;
+
+/**
+ * RangerDumpEnd.
+ *
+ * ReplState to define Ranger Dump End.
+ **/
+public class RangerDumpEnd extends ReplState {
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String dbName;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long actualNumPolicies;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long dumpEndTime;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String dumpDir;
+
+ public RangerDumpEnd(String dbName,
+ long actualNumPolicies,
+ String dumpDir) {
+ this.dbName = dbName;
+ this.actualNumPolicies = actualNumPolicies;
+ this.dumpEndTime = System.currentTimeMillis() / 1000;
+ this.dumpDir = dumpDir;
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/BootstrapLoadLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/BootstrapLoadLogger.java
index cf35826f83..f96d74f587 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/BootstrapLoadLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/BootstrapLoadLogger.java
@@ -22,7 +22,7 @@
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
-public class BootstrapLoadLogger extends ReplLogger {
+public class BootstrapLoadLogger extends ReplLogger {
private String dbName;
private String dumpDir;
private long numTables;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java
index 77db6edb2f..21bf7f778d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java
@@ -23,7 +23,7 @@
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
-public class IncrementalLoadLogger extends ReplLogger {
+public class IncrementalLoadLogger extends ReplLogger {
private String dbName;
private String dumpDir;
private long numEvents;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/RangerLoadLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/RangerLoadLogger.java
new file mode 100644
index 0000000000..1eee3af6b8
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/RangerLoadLogger.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.load.log;
+
+import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
+import org.apache.hadoop.hive.ql.parse.repl.load.log.state.RangerLoadBegin;
+import org.apache.hadoop.hive.ql.parse.repl.load.log.state.RangerLoadEnd;
+
+/**
+ * RangerLoadLogger.
+ *
+ * Repllogger for Ranger Load.
+ **/
+public class RangerLoadLogger extends ReplLogger {
+ private String sourceDbName;
+ private String targetDbName;
+ private String dumpDir;
+ private long estimatedNumPolicies;
+
+ public RangerLoadLogger(String sourceDbName, String targetDbName, String dumpDir, long estimatedNumPolicies) {
+ this.sourceDbName = sourceDbName;
+ this.targetDbName = targetDbName;
+ this.estimatedNumPolicies = estimatedNumPolicies;
+ this.dumpDir = dumpDir;
+ }
+
+ @Override
+ public void startLog() {
+ new RangerLoadBegin(sourceDbName, targetDbName, estimatedNumPolicies).log(LogTag.RANGER_LOAD_START);
+ }
+
+ @Override
+ public void endLog(Long count) {
+ new RangerLoadEnd(sourceDbName, targetDbName, count, dumpDir).log(LogTag.RANGER_LOAD_END);
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadBegin.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadBegin.java
new file mode 100644
index 0000000000..16f6d96963
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadBegin.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.load.log.state;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState;
+import org.codehaus.jackson.annotate.JsonProperty;
+
+/**
+ * RangerLoadBegin.
+ *
+ * ReplState to define Ranger Load Begin.
+ **/
+public class RangerLoadBegin extends ReplState {
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String sourceDbName;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String targetDbName;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long estimatedNumPolicies;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long loadStartTime;
+
+ public RangerLoadBegin(String sourceDbName, String targetDbName, long estimatedNumPolicies) {
+ this.sourceDbName = sourceDbName;
+ this.targetDbName = targetDbName;
+ this.estimatedNumPolicies = estimatedNumPolicies;
+ this.loadStartTime = System.currentTimeMillis() / 1000;
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadEnd.java
new file mode 100644
index 0000000000..3317f081fc
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadEnd.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.load.log.state;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState;
+import org.codehaus.jackson.annotate.JsonProperty;
+
+/**
+ * RangerLoadEnd.
+ *
+ * ReplState to define Ranger Load End.
+ **/
+public class RangerLoadEnd extends ReplState {
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String sourceDbName;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String targetDbName;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long actualNumPolicies;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long loadEndTime;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String dumpDir;
+
+ public RangerLoadEnd(String sourceDbName,
+ String targetDbName,
+ long actualNumPolicies,
+ String dumpDir) {
+ this.sourceDbName = sourceDbName;
+ this.targetDbName = targetDbName;
+ this.actualNumPolicies = actualNumPolicies;
+ this.loadEndTime = System.currentTimeMillis() / 1000;
+ this.dumpDir = dumpDir;
+ }
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java
index 89cec533d8..8ef09876af 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java
@@ -25,13 +25,17 @@
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerRestClientImpl;
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerPolicy;
import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
+import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.Mockito;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+import org.powermock.reflect.Whitebox;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -43,10 +47,10 @@
/**
* Unit test class for testing Ranger Dump.
*/
-@RunWith(MockitoJUnitRunner.class)
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({LoggerFactory.class})
public class TestRangerDumpTask {
- protected static final Logger LOG = LoggerFactory.getLogger(TestRangerDumpTask.class);
private RangerDumpTask task;
@Mock
@@ -79,8 +83,9 @@ public void testSuccessValidAuthProviderEndpoint() throws Exception {
Mockito.when(mockClient.exportRangerPolicies(Mockito.anyString(), Mockito.anyString(), Mockito.anyString()))
.thenReturn(rangerPolicyList);
Mockito.when(conf.getVar(REPL_AUTHORIZATION_PROVIDER_SERVICE_ENDPOINT)).thenReturn("rangerEndpoint");
- Mockito.when(conf.getVar(REPL_RANGER_SERVICE_NAME)).thenReturn("cm_hive");
+ Mockito.when(conf.getVar(REPL_RANGER_SERVICE_NAME)).thenReturn("hive");
Mockito.when(work.getDbName()).thenReturn("testdb");
+ Mockito.when(work.getCurrentDumpPath()).thenReturn(new Path("/tmp"));
int status = task.execute();
Assert.assertEquals(0, status);
}
@@ -102,7 +107,7 @@ public void testSuccessNonEmptyRangerPolicies() throws Exception {
Mockito.when(mockClient.exportRangerPolicies(Mockito.anyString(), Mockito.anyString(), Mockito.anyString()))
.thenReturn(rangerPolicyList);
Mockito.when(conf.getVar(REPL_AUTHORIZATION_PROVIDER_SERVICE_ENDPOINT)).thenReturn("rangerEndpoint");
- Mockito.when(conf.getVar(REPL_RANGER_SERVICE_NAME)).thenReturn("cm_hive");
+ Mockito.when(conf.getVar(REPL_RANGER_SERVICE_NAME)).thenReturn("hive");
Mockito.when(work.getDbName()).thenReturn("testdb");
Path rangerDumpPath = new Path("/tmp");
Mockito.when(work.getCurrentDumpPath()).thenReturn(rangerDumpPath);
@@ -112,4 +117,33 @@ public void testSuccessNonEmptyRangerPolicies() throws Exception {
int status = task.execute();
Assert.assertEquals(0, status);
}
+
+ @Test
+ public void testSuccessRangerDumpMetrics() throws Exception {
+ Logger logger = Mockito.mock(Logger.class);
+ Whitebox.setInternalState(ReplState.class, logger);
+ RangerExportPolicyList rangerPolicyList = new RangerExportPolicyList();
+ rangerPolicyList.setPolicies(new ArrayList());
+ Mockito.when(mockClient.exportRangerPolicies(Mockito.anyString(), Mockito.anyString(), Mockito.anyString()))
+ .thenReturn(rangerPolicyList);
+ Mockito.when(conf.getVar(REPL_AUTHORIZATION_PROVIDER_SERVICE_ENDPOINT)).thenReturn("rangerEndpoint");
+ Mockito.when(conf.getVar(REPL_RANGER_SERVICE_NAME)).thenReturn("hive");
+ Mockito.when(work.getDbName()).thenReturn("testdb");
+ Mockito.when(work.getCurrentDumpPath()).thenReturn(new Path("/tmp"));
+ int status = task.execute();
+ Assert.assertEquals(0, status);
+ ArgumentCaptor replStateCaptor = ArgumentCaptor.forClass(String.class);
+ ArgumentCaptor