diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java index 1a0d8e1..df13346 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java @@ -18,14 +18,14 @@ package org.apache.hadoop.hive.common.jsonexplain; -import java.io.PrintStream; +import org.codehaus.jackson.JsonNode; -import org.json.JSONObject; +import java.io.PrintStream; /** * JsonParser is the interface for classes that print a JSONObject * into outputStream. */ public interface JsonParser { - public void print(JSONObject inputObject, PrintStream outputStream) throws Exception; + public void print(JsonNode inputObject, PrintStream outputStream) throws Exception; } diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java index 718791c..d3d219c 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java @@ -18,18 +18,12 @@ package org.apache.hadoop.hive.common.jsonexplain.tez; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; -import java.util.Set; import org.apache.hadoop.hive.common.jsonexplain.tez.Vertex.VertexType; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; public final class Op { public final String name; @@ -40,7 +34,7 @@ public final List children; public final Map attrs; // the jsonObject for this operator - public final JSONObject opObject; + public final JsonNode opObject; // the vertex that this operator belongs to public final Vertex vertex; // the vertex that this operator output to @@ -53,8 +47,7 @@ }; public Op(String name, String id, String outputVertexName, List children, - Map attrs, JSONObject opObject, Vertex vertex, TezJsonParser tezJsonParser) - throws JSONException { + Map attrs, JsonNode opObject, Vertex vertex, TezJsonParser tezJsonParser) { super(); this.name = name; this.operatorId = id; @@ -86,12 +79,14 @@ private OpType deriveOpType(String operatorId) { private void inlineJoinOp() throws Exception { // inline map join operator if (this.type == OpType.MAPJOIN) { - JSONObject joinObj = opObject.getJSONObject(this.name); + JsonNode joinObj = opObject.get(this.name); // get the map for posToVertex - JSONObject verticeObj = joinObj.getJSONObject("input vertices:"); + JsonNode verticeObj = joinObj.get("input vertices:"); Map posToVertex = new LinkedHashMap<>(); - for (String pos : JSONObject.getNames(verticeObj)) { - String vertexName = verticeObj.getString(pos); + Iterator verticeIter = verticeObj.getFieldNames(); + while (verticeIter.hasNext()) { + String pos = verticeIter.next(); + String vertexName = verticeObj.get(pos).getTextValue(); // update the connection Connection c = null; for (Connection connection : vertex.parentConnections) { @@ -108,7 +103,7 @@ private void inlineJoinOp() throws Exception { // update the attrs this.attrs.remove("input vertices:"); // update the keys to use operator name - JSONObject keys = joinObj.getJSONObject("keys:"); + JsonNode keys = joinObj.get("keys:"); // find out the vertex for the big table Set parentVertexes = new HashSet<>(); for (Connection connection : vertex.parentConnections) { @@ -116,8 +111,10 @@ private void inlineJoinOp() throws Exception { } parentVertexes.removeAll(posToVertex.values()); Map posToOpId = new LinkedHashMap<>(); - if (keys.length() != 0) { - for (String key : JSONObject.getNames(keys)) { + if (keys.size() != 0) { + Iterator keysIter = keys.getFieldNames(); + while (keysIter.hasNext()) { + String key = keysIter.next(); // first search from the posToVertex if (posToVertex.containsKey(key)) { Vertex vertex = posToVertex.get(key); @@ -170,15 +167,15 @@ else if (parentVertexes.size() == 1) { } this.attrs.remove("keys:"); StringBuffer sb = new StringBuffer(); - JSONArray conditionMap = joinObj.getJSONArray("condition map:"); - for (int index = 0; index < conditionMap.length(); index++) { - JSONObject cond = conditionMap.getJSONObject(index); - String k = (String) cond.keys().next(); - JSONObject condObject = new JSONObject((String)cond.get(k)); - String type = condObject.getString("type"); - String left = condObject.getString("left"); - String right = condObject.getString("right"); - if (keys.length() != 0) { + ArrayNode conditionMap = (ArrayNode) joinObj.get("condition map:"); + for (int index = 0; index < conditionMap.size(); index++) { + JsonNode cond = conditionMap.get(index); + String k = cond.getFieldNames().next(); + JsonNode condObject = cond.get(k); + String type = condObject.get("type").getTextValue(); + String left = condObject.get("left").getTextValue(); + String right = condObject.get("right").getTextValue(); + if (keys.size() != 0) { sb.append(posToOpId.get(left) + "." + keys.get(left) + "=" + posToOpId.get(right) + "." + keys.get(right) + "(" + type + "),"); } else { @@ -235,11 +232,13 @@ else if (parentVertexes.size() == 1) { posToOpId.put(v.tag, v.rootOps.get(0).operatorId); } } - JSONObject joinObj = opObject.getJSONObject(this.name); + JsonNode joinObj = opObject.get(this.name); // update the keys to use operator name - JSONObject keys = joinObj.getJSONObject("keys:"); - if (keys.length() != 0) { - for (String key : JSONObject.getNames(keys)) { + JsonNode keys = joinObj.get("keys:"); + if (keys.size() != 0) { + Iterator keysIter = keys.getFieldNames(); + while (keysIter.hasNext()) { + String key = keysIter.next(); if (!posToOpId.containsKey(key)) { throw new Exception( "Can not find the source operator on one of the branches of merge join."); @@ -255,15 +254,15 @@ else if (parentVertexes.size() == 1) { // update the attrs this.attrs.remove("keys:"); StringBuffer sb = new StringBuffer(); - JSONArray conditionMap = joinObj.getJSONArray("condition map:"); - for (int index = 0; index < conditionMap.length(); index++) { - JSONObject cond = conditionMap.getJSONObject(index); - String k = (String) cond.keys().next(); - JSONObject condObject = new JSONObject((String)cond.get(k)); - String type = condObject.getString("type"); - String left = condObject.getString("left"); - String right = condObject.getString("right"); - if (keys.length() != 0) { + ArrayNode conditionMap = (ArrayNode) joinObj.get("condition map:"); + for (int index = 0; index < conditionMap.size(); index++) { + JsonNode cond = conditionMap.get(index); + String k = cond.getFieldNames().next(); + JsonNode condObject = cond.get(k); + String type = condObject.get("type").getTextValue(); + String left = condObject.get("left").getTextValue(); + String right = condObject.get("right").getTextValue(); + if (keys.size() != 0) { sb.append(posToOpId.get(left) + "." + keys.get(left) + "=" + posToOpId.get(right) + "." + keys.get(right) + "(" + type + "),"); } else { diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java index 63937f8..fb85ee6 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java @@ -18,18 +18,15 @@ package org.apache.hadoop.hive.common.jsonexplain.tez; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; +import java.util.*; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.jsonexplain.tez.Vertex.VertexType; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.node.ArrayNode; +import org.codehaus.jackson.node.ObjectNode; public final class Stage { //external name is used to show at the console @@ -48,6 +45,7 @@ // some stage may contain only a single operator, e.g., create table operator, // fetch operator. Op op; + ObjectMapper objectMapper = new ObjectMapper(); public Stage(String name, TezJsonParser tezJsonParser) { super(); @@ -56,9 +54,9 @@ public Stage(String name, TezJsonParser tezJsonParser) { parser = tezJsonParser; } - public void addDependency(JSONObject object, Map stages) throws JSONException { + public void addDependency(JsonNode object, Map stages) { if (object.has("DEPENDENT STAGES")) { - String names = object.getString("DEPENDENT STAGES"); + String names = object.get("DEPENDENT STAGES").getTextValue(); for (String name : names.split(",")) { Stage parent = stages.get(name.trim()); this.parentStages.add(parent); @@ -66,7 +64,7 @@ public void addDependency(JSONObject object, Map stages) throws J } } if (object.has("CONDITIONAL CHILD TASKS")) { - String names = object.getString("CONDITIONAL CHILD TASKS"); + String names = object.get("CONDITIONAL CHILD TASKS").getTextValue(); this.externalName = this.internalName + "(CONDITIONAL CHILD TASKS: " + names + ")"; for (String name : names.split(",")) { Stage child = stages.get(name.trim()); @@ -84,31 +82,36 @@ public void addDependency(JSONObject object, Map stages) throws J * vertices and edges Else we need to directly extract operators * and/or attributes. */ - public void extractVertex(JSONObject object) throws Exception { + public void extractVertex(JsonNode object) throws Exception { if (object.has("Tez")) { this.tezStageDependency = new TreeMap<>(); - JSONObject tez = (JSONObject) object.get("Tez"); - JSONObject vertices = tez.getJSONObject("Vertices:"); + JsonNode tez = object.get("Tez"); + JsonNode vertices = tez.get("Vertices:"); if (tez.has("Edges:")) { - JSONObject edges = tez.getJSONObject("Edges:"); + JsonNode edges = tez.get("Edges:"); // iterate for the first time to get all the vertices - for (String to : JSONObject.getNames(edges)) { - vertexs.put(to, new Vertex(to, vertices.getJSONObject(to), parser)); + Iterator fieldNamesIter; + fieldNamesIter = edges.getFieldNames(); + while (fieldNamesIter.hasNext()) { + String to = fieldNamesIter.next(); + vertexs.put(to, new Vertex(to, vertices.get(to), parser)); } // iterate for the second time to get all the vertex dependency - for (String to : JSONObject.getNames(edges)) { - Object o = edges.get(to); + fieldNamesIter = edges.getFieldNames(); + while (fieldNamesIter.hasNext()) { + String to = fieldNamesIter.next(); + JsonNode o = edges.get(to); Vertex v = vertexs.get(to); // 1 to 1 mapping - if (o instanceof JSONObject) { - JSONObject obj = (JSONObject) o; - String parent = obj.getString("parent"); + if (!o.isArray()) { + JsonNode obj = (JsonNode) o; + String parent = obj.get("parent").getTextValue(); Vertex parentVertex = vertexs.get(parent); if (parentVertex == null) { - parentVertex = new Vertex(parent, vertices.getJSONObject(parent), parser); + parentVertex = new Vertex(parent, vertices.get(parent), parser); vertexs.put(parent, parentVertex); } - String type = obj.getString("type"); + String type = obj.get("type").getTextValue(); // for union vertex, we reverse the dependency relationship if (!"CONTAINS".equals(type)) { v.addDependency(new Connection(type, parentVertex)); @@ -121,17 +124,17 @@ public void extractVertex(JSONObject object) throws Exception { this.tezStageDependency.put(v, Arrays.asList(new Connection(type, parentVertex))); } else { // 1 to many mapping - JSONArray from = (JSONArray) o; + ArrayNode from = (ArrayNode) o; List list = new ArrayList<>(); - for (int index = 0; index < from.length(); index++) { - JSONObject obj = from.getJSONObject(index); - String parent = obj.getString("parent"); + for (int index = 0; index < from.size(); index++) { + JsonNode obj = from.get(index); + String parent = obj.get("parent").getTextValue(); Vertex parentVertex = vertexs.get(parent); if (parentVertex == null) { - parentVertex = new Vertex(parent, vertices.getJSONObject(parent), parser); + parentVertex = new Vertex(parent, vertices.get(parent), parser); vertexs.put(parent, parentVertex); } - String type = obj.getString("type"); + String type = obj.get("type").getTextValue(); if (!"CONTAINS".equals(type)) { v.addDependency(new Connection(type, parentVertex)); parentVertex.setType(type); @@ -146,8 +149,10 @@ public void extractVertex(JSONObject object) throws Exception { } } } else { - for (String vertexName : JSONObject.getNames(vertices)) { - vertexs.put(vertexName, new Vertex(vertexName, vertices.getJSONObject(vertexName), parser)); + Iterator fieldNamesIter = vertices.getFieldNames(); + while (fieldNamesIter.hasNext()) { + String vertexName = fieldNamesIter.next(); + vertexs.put(vertexName, new Vertex(vertexName, vertices.get(vertexName), parser)); } } // The opTree in vertex is extracted @@ -158,15 +163,14 @@ public void extractVertex(JSONObject object) throws Exception { } } } else { - String[] names = JSONObject.getNames(object); - if (names != null) { - for (String name : names) { - if (name.contains("Operator")) { - this.op = extractOp(name, object.getJSONObject(name)); - } else { - if (!object.get(name).toString().isEmpty()) { - attrs.put(name, object.get(name).toString()); - } + Iterator namesIter = object.getFieldNames(); + while (namesIter.hasNext()) { + String name = namesIter.next(); + if (name.contains("Operator")) { + this.op = extractOp(name, object.get(name)); + } else { + if (!object.get(name).toString().isEmpty()) { + attrs.put(name, object.get(name).toString()); } } } @@ -181,27 +185,30 @@ public void extractVertex(JSONObject object) throws Exception { * This method address the create table operator, fetch operator, * etc */ - Op extractOp(String opName, JSONObject opObj) throws Exception { + Op extractOp(String opName, JsonNode opObj) throws Exception { Map attrs = new TreeMap<>(); Vertex v = null; - if (opObj.length() > 0) { - String[] names = JSONObject.getNames(opObj); - for (String name : names) { - Object o = opObj.get(name); - if (isPrintable(o) && !o.toString().isEmpty()) { - attrs.put(name, o.toString()); - } else if (o instanceof JSONObject) { - JSONObject attrObj = (JSONObject) o; - if (attrObj.length() > 0) { + if (opObj.size() > 0) { + Iterator namesIter = opObj.getFieldNames(); + while (namesIter.hasNext()) { + String name = namesIter.next(); + JsonNode o = opObj.get(name); + if (o.isValueNode() && !o.toString().isEmpty()) { + attrs.put(name, o.asText()); + } else if (!o.isArray()) { + JsonNode attrObj = (JsonNode) o; + if (attrObj.size() > 0) { if (name.equals("Processor Tree:")) { - JSONObject object = new JSONObject(new LinkedHashMap<>()); + ObjectNode object = objectMapper.createObjectNode(); object.put(name, attrObj); v = new Vertex(null, object, parser); v.extractOpTree(); } else { - for (String attrName : JSONObject.getNames(attrObj)) { + Iterator attrNamesIter = attrObj.getFieldNames(); + while (attrNamesIter.hasNext()) { + String attrName = attrNamesIter.next(); if (!attrObj.get(attrName).toString().isEmpty()) { - attrs.put(attrName, attrObj.get(attrName).toString()); + attrs.put(attrName, attrObj.get(attrName).asText()); } } } @@ -218,18 +225,6 @@ Op extractOp(String opName, JSONObject opObj) throws Exception { return op; } - private boolean isPrintable(Object val) { - if (val instanceof Boolean || val instanceof String || val instanceof Integer - || val instanceof Long || val instanceof Byte || val instanceof Float - || val instanceof Double || val instanceof Path) { - return true; - } - if (val != null && val.getClass().isPrimitive()) { - return true; - } - return false; - } - public void print(Printer printer, int indentFlag) throws Exception { // print stagename if (parser.printSet.contains(this)) { diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java index ea86048..5b1ae15 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java @@ -19,16 +19,12 @@ package org.apache.hadoop.hive.common.jsonexplain.tez; import java.io.PrintStream; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; -import java.util.Set; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; -import org.json.JSONObject; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ObjectNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,25 +42,32 @@ public TezJsonParser() { LOG = LoggerFactory.getLogger(this.getClass().getName()); } - public void extractStagesAndPlans(JSONObject inputObject) throws Exception { + public void extractStagesAndPlans(JsonNode inputObject) throws Exception { // extract stages - JSONObject dependency = inputObject.getJSONObject("STAGE DEPENDENCIES"); - if (dependency != null && dependency.length() > 0) { + JsonNode dependency = inputObject.get("STAGE DEPENDENCIES"); + if (dependency != null && dependency.size() > 0) { // iterate for the first time to get all the names of stages. - for (String stageName : JSONObject.getNames(dependency)) { + Iterator iter; + iter = dependency.getFieldNames(); + while (iter.hasNext()) { + String stageName = iter.next(); this.stages.put(stageName, new Stage(stageName, this)); } // iterate for the second time to get all the dependency. - for (String stageName : JSONObject.getNames(dependency)) { - JSONObject dependentStageNames = dependency.getJSONObject(stageName); + iter = dependency.getFieldNames(); + while (iter.hasNext()) { + String stageName = iter.next(); + JsonNode dependentStageNames = dependency.get(stageName); this.stages.get(stageName).addDependency(dependentStageNames, this.stages); } } // extract stage plans - JSONObject stagePlans = inputObject.getJSONObject("STAGE PLANS"); - if (stagePlans != null && stagePlans.length() > 0) { - for (String stageName : JSONObject.getNames(stagePlans)) { - JSONObject stagePlan = stagePlans.getJSONObject(stageName); + JsonNode stagePlans = inputObject.get("STAGE PLANS"); + if (stagePlans != null && stagePlans.size() > 0) { + Iterator stagePlansIter = stagePlans.getFieldNames(); + while (stagePlansIter.hasNext()) { + String stageName = stagePlansIter.next(); + JsonNode stagePlan = stagePlans.get(stageName); this.stages.get(stageName).extractVertex(stagePlan); } } @@ -99,13 +102,13 @@ public static String prefixString(int indentFlag, String tail) { } @Override - public void print(JSONObject inputObject, PrintStream outputStream) throws Exception { + public void print(JsonNode inputObject, PrintStream outputStream) throws Exception { LOG.info("JsonParser is parsing:" + inputObject.toString()); this.extractStagesAndPlans(inputObject); Printer printer = new Printer(); // print out the cbo info if (inputObject.has("cboInfo")) { - printer.println(inputObject.getString("cboInfo")); + printer.println(inputObject.get("cboInfo").getTextValue()); printer.println(); } // print out the vertex dependency in root stage diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java index 3d559bd..611b464 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java @@ -19,21 +19,14 @@ package org.apache.hadoop.hive.common.jsonexplain.tez; import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; +import java.util.*; import org.apache.hadoop.hive.common.jsonexplain.tez.Op.OpType; -import org.apache.hadoop.util.hash.Hash; +import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.map.JsonMappingException; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; +import org.codehaus.jackson.node.ArrayNode; +import org.codehaus.jackson.node.ObjectNode; public final class Vertex implements Comparable{ public final String name; @@ -44,7 +37,7 @@ // vertex's children vertex. public final List children = new ArrayList<>(); // the jsonObject for this vertex - public final JSONObject vertexObject; + public final JsonNode vertexObject; // whether this vertex is dummy (which does not really exists but is created), // e.g., a dummy vertex for a mergejoin branch public boolean dummy; @@ -72,7 +65,7 @@ }; public EdgeType edgeType; - public Vertex(String name, JSONObject vertexObject, TezJsonParser tezJsonParser) { + public Vertex(String name, JsonNode vertexObject, TezJsonParser tezJsonParser) { super(); this.name = name; if (this.name != null) { @@ -93,12 +86,11 @@ public Vertex(String name, JSONObject vertexObject, TezJsonParser tezJsonParser) parser = tezJsonParser; } - public void addDependency(Connection connection) throws JSONException { + public void addDependency(Connection connection) { this.parentConnections.add(connection); } /** - * @throws JSONException * @throws JsonParseException * @throws JsonMappingException * @throws IOException @@ -106,41 +98,45 @@ public void addDependency(Connection connection) throws JSONException { * We assume that there is a single top-level Map Operator Tree or a * Reduce Operator Tree in a vertex */ - public void extractOpTree() throws JSONException, JsonParseException, JsonMappingException, + public void extractOpTree() throws JsonParseException, JsonMappingException, IOException, Exception { - if (vertexObject.length() != 0) { - for (String key : JSONObject.getNames(vertexObject)) { + if (vertexObject.size() != 0) { + Iterator fieldNamesIter = vertexObject.getFieldNames(); + while (fieldNamesIter.hasNext()) { + String key = fieldNamesIter.next(); if (key.equals("Map Operator Tree:")) { - extractOp(vertexObject.getJSONArray(key).getJSONObject(0)); + extractOp(vertexObject.get(key).get(0)); } else if (key.equals("Reduce Operator Tree:") || key.equals("Processor Tree:")) { - extractOp(vertexObject.getJSONObject(key)); + extractOp(vertexObject.get(key)); } else if (key.equals("Join:")) { // this is the case when we have a map-side SMB join // one input of the join is treated as a dummy vertex - JSONArray array = vertexObject.getJSONArray(key); - for (int index = 0; index < array.length(); index++) { - JSONObject mpOpTree = array.getJSONObject(index); + ArrayNode array = (ArrayNode) vertexObject.get(key); + for (int index = 0; index < array.size(); index++) { + JsonNode mpOpTree = array.get(index); Vertex v = new Vertex(null, mpOpTree, parser); v.extractOpTree(); v.dummy = true; mergeJoinDummyVertexs.add(v); } } else if (key.equals("Merge File Operator")) { - JSONObject opTree = vertexObject.getJSONObject(key); + JsonNode opTree = vertexObject.get(key); if (opTree.has("Map Operator Tree:")) { - extractOp(opTree.getJSONArray("Map Operator Tree:").getJSONObject(0)); + extractOp(opTree.get("Map Operator Tree:").get(0)); } else { throw new Exception("Merge File Operator does not have a Map Operator Tree"); } } else if (key.equals("Execution mode:")) { - executionMode = " " + vertexObject.getString(key); + executionMode = " " + vertexObject.get(key).getTextValue(); } else if (key.equals("tagToInput:")) { - JSONObject tagToInput = vertexObject.getJSONObject(key); - for (String tag : JSONObject.getNames(tagToInput)) { - this.tagToInput.put(tag, (String) tagToInput.get(tag)); + JsonNode tagToInput = vertexObject.get(key); + Iterator tagIter = tagToInput.getFieldNames(); + while (tagIter.hasNext()) { + String tag = tagIter.next(); + this.tagToInput.put(tag, tagToInput.get(tag).getTextValue()); } } else if (key.equals("tag:")) { - this.tag = vertexObject.getString(key); + this.tag = vertexObject.get(key).getTextValue(); } else { throw new Exception("Unsupported operator tree in vertex " + this.name); } @@ -148,42 +144,35 @@ public void extractOpTree() throws JSONException, JsonParseException, JsonMappin } } - /** - * @param operator - * @param parent - * @return - * @throws JSONException - * @throws JsonParseException - * @throws JsonMappingException - * @throws IOException - * @throws Exception - * assumption: each operator only has one parent but may have many - * children - */ - Op extractOp(JSONObject operator) throws JSONException, JsonParseException, JsonMappingException, + Op extractOp(JsonNode operator) throws JsonParseException, JsonMappingException, IOException, Exception { - String[] names = JSONObject.getNames(operator); - if (names.length != 1) { + Iterator operatorIter = operator.getFieldNames(); + if (!operatorIter.hasNext()) { throw new Exception("Expect only one operator in " + operator.toString()); } else { - String opName = names[0]; - JSONObject attrObj = (JSONObject) operator.get(opName); + String opName = operatorIter.next(); + if (operatorIter.hasNext()) { + throw new Exception("Expect only one operator in " + operator.toString()); + } + JsonNode attrObj = operator.get(opName); Map attrs = new TreeMap<>(); List children = new ArrayList<>(); String id = null; String outputVertexName = null; - for (String attrName : JSONObject.getNames(attrObj)) { + Iterator attrObjIter = attrObj.getFieldNames(); + while (attrObjIter.hasNext()) { + String attrName = attrObjIter.next(); if (attrName.equals("children")) { - Object childrenObj = attrObj.get(attrName); - if (childrenObj instanceof JSONObject) { - if (((JSONObject) childrenObj).length() != 0) { - children.add(extractOp((JSONObject) childrenObj)); + JsonNode childrenObj = attrObj.get(attrName); + if (!childrenObj.isArray()) { + if (childrenObj.size() != 0) { + children.add(extractOp(childrenObj)); } - } else if (childrenObj instanceof JSONArray) { - if (((JSONArray) childrenObj).length() != 0) { - JSONArray array = ((JSONArray) childrenObj); - for (int index = 0; index < array.length(); index++) { - children.add(extractOp(array.getJSONObject(index))); + } else if (childrenObj.isArray()) { + if (childrenObj.size() != 0) { + ArrayNode array = ((ArrayNode) childrenObj); + for (int index = 0; index < array.size(); index++) { + children.add(extractOp(array.get(index))); } } } else { @@ -215,7 +204,7 @@ Op extractOp(JSONObject operator) throws JSONException, JsonParseException, Json } public void print(Printer printer, int indentFlag, String type, Vertex callingVertex) - throws JSONException, Exception { + throws Exception { // print vertexname if (parser.printSet.contains(this) && !hasMultiReduceOp) { if (type != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 74cec3e..00449f0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -20,91 +20,46 @@ import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME; -import org.apache.commons.lang3.tuple.ImmutablePair; - import java.io.OutputStream; import java.io.PrintStream; import java.io.Serializable; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Stack; +import java.util.*; import java.util.Map.Entry; -import java.util.Set; -import java.util.TreeMap; +import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.Validator.StringSet; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.DriverContext; -import org.apache.hadoop.hive.ql.exec.spark.SparkTask; -import org.apache.hadoop.hive.ql.exec.tez.TezTask; -import org.apache.hadoop.hive.ql.exec.vector.VectorGroupByOperator; -import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; -import org.apache.hadoop.hive.ql.plan.MapJoinDesc; -import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc; import org.apache.hadoop.hive.ql.hooks.ReadEntity; -import org.apache.hadoop.hive.ql.io.AcidUtils; -import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; -import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; -import org.apache.hadoop.hive.ql.lib.Dispatcher; -import org.apache.hadoop.hive.ql.lib.GraphWalker; -import org.apache.hadoop.hive.ql.lib.Node; -import org.apache.hadoop.hive.ql.lib.NodeProcessor; -import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; -import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger; -import org.apache.hadoop.hive.ql.optimizer.physical.Vectorizer; -import org.apache.hadoop.hive.ql.optimizer.physical.VectorizerReason; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.VectorizationDetailLevel; -import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.BaseWork; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.ql.plan.Explain.Vectorization; -import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.plan.ExplainWork; -import org.apache.hadoop.hive.ql.plan.GroupByDesc; import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.ql.plan.MapredWork; -import org.apache.hadoop.hive.ql.plan.MapWork; -import org.apache.hadoop.hive.ql.plan.ReduceWork; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.SparkWork; -import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.TezWork; -import org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo; -import org.apache.hadoop.hive.ql.plan.VectorReduceSinkDesc; -import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.serde2.Deserializer; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.AnnotationUtils; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.node.ArrayNode; +import org.codehaus.jackson.node.ObjectNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -118,6 +73,7 @@ private final Set> visitedOps = new HashSet>(); private boolean isLogical = false; protected final Logger LOG; + private ObjectMapper objectMapper = new ObjectMapper(); public ExplainTask() { super(); @@ -132,11 +88,11 @@ public ExplainTask() { * {"input_tables":[{"tablename": "default@test_sambavi_v1", "tabletype": "TABLE"}], * "input partitions":["default@srcpart@ds=2008-04-08/hr=11"]} */ - private static JSONObject getJSONDependencies(ExplainWork work) + private ObjectNode getJSONDependencies(ExplainWork work) throws Exception { assert(work.getDependency()); - JSONObject outJSONObject = new JSONObject(new LinkedHashMap<>()); + ObjectNode outJSONObject = objectMapper.createObjectNode(); List> inputTableInfo = new ArrayList>(); List> inputPartitionInfo = new ArrayList>(); for (ReadEntity input: work.getInputs()) { @@ -164,15 +120,25 @@ private static JSONObject getJSONDependencies(ExplainWork work) } } - outJSONObject.put("input_tables", inputTableInfo); - outJSONObject.put("input_partitions", inputPartitionInfo); + putMaps(outJSONObject, "input_tables", inputTableInfo); + putMaps(outJSONObject, "input_partitions", inputPartitionInfo); return outJSONObject; } - public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws Exception { + private void putMaps(ObjectNode outJSONObject, String name, List> inputTableInfo) { + ArrayNode arrayNode = outJSONObject.putArray(name); + for (Map map : inputTableInfo) { + ObjectNode objectNode = arrayNode.addObject(); + for (Map.Entry entry : map.entrySet()) { + objectNode.put(entry.getKey(), entry.getValue()); + } + } + } + + public ObjectNode getJSONLogicalPlan(PrintStream out, ExplainWork work) throws Exception { isLogical = true; - JSONObject outJSONObject = new JSONObject(new LinkedHashMap<>()); + ObjectNode outJSONObject = objectMapper.createObjectNode(); boolean jsonOutput = work.isFormatted(); if (jsonOutput) { out = null; @@ -182,7 +148,7 @@ public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws E if (out != null) { out.print("LOGICAL PLAN:"); } - JSONObject jsonPlan = outputMap(work.getParseContext().getTopOps(), true, + ObjectNode jsonPlan = outputMap(work.getParseContext().getTopOps(), true, out, work.getExtended(), jsonOutput, 0); if (out != null) { out.println(); @@ -202,14 +168,14 @@ public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws E private static String falseCondNameVectorizationEnabled = HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED.varname + " IS false"; - private ImmutablePair outputPlanVectorization(PrintStream out, boolean jsonOutput) + private ImmutablePair outputPlanVectorization(PrintStream out, boolean jsonOutput) throws Exception { if (out != null) { out.println("PLAN VECTORIZATION:"); } - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + ObjectNode json = jsonOutput ? objectMapper.createObjectNode() : null; HiveConf hiveConf = queryState.getConf(); @@ -236,27 +202,33 @@ public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws E if (jsonOutput) { json.put("enabled", isVectorizationEnabled); if (!isVectorizationEnabled) { - json.put("enabledConditionsNotMet", isVectorizationEnabledCondList); + ArrayNode arrayNode = json.putArray("enabledConditionsNotMet"); + for (String string : isVectorizationEnabledCondList) { + arrayNode.add(string); + } } else { - json.put("enabledConditionsMet", isVectorizationEnabledCondList); + ArrayNode arrayNode = json.putArray("enabledConditionsMet"); + for (String string : isVectorizationEnabledCondList) { + arrayNode.add(string); + } } } - return new ImmutablePair(isVectorizationEnabled, jsonOutput ? json : null); + return new ImmutablePair(isVectorizationEnabled, jsonOutput ? json : null); } - public JSONObject getJSONPlan(PrintStream out, ExplainWork work) + public ObjectNode getJSONPlan(PrintStream out, ExplainWork work) throws Exception { return getJSONPlan(out, work.getRootTasks(), work.getFetchTask(), work.isFormatted(), work.getExtended(), work.isAppendTaskType()); } - public JSONObject getJSONPlan(PrintStream out, List> tasks, Task fetchTask, + public ObjectNode getJSONPlan(PrintStream out, List> tasks, Task fetchTask, boolean jsonOutput, boolean isExtended, boolean appendTaskType) throws Exception { // If the user asked for a formatted output, dump the json output // in the output stream - JSONObject outJSONObject = new JSONObject(new LinkedHashMap<>()); + ObjectNode outJSONObject = objectMapper.createObjectNode(); if (jsonOutput) { out = null; @@ -274,7 +246,7 @@ public JSONObject getJSONPlan(PrintStream out, List> tasks, Task fetc boolean suppressOthersForVectorization = false; if (this.work != null && this.work.isVectorization()) { - ImmutablePair planVecPair = outputPlanVectorization(out, jsonOutput); + ImmutablePair planVecPair = outputPlanVectorization(out, jsonOutput); if (this.work.isVectorizationOnly()) { // Suppress the STAGES if vectorization is off. @@ -291,7 +263,7 @@ public JSONObject getJSONPlan(PrintStream out, List> tasks, Task fetc } if (!suppressOthersForVectorization) { - JSONObject jsonDependencies = outputDependencies(out, jsonOutput, appendTaskType, ordered); + ObjectNode jsonDependencies = outputDependencies(out, jsonOutput, appendTaskType, ordered); if (out != null) { out.println(); @@ -302,7 +274,7 @@ public JSONObject getJSONPlan(PrintStream out, List> tasks, Task fetc } // Go over all the tasks and dump out the plans - JSONObject jsonPlan = outputStagePlans(out, ordered, + ObjectNode jsonPlan = outputStagePlans(out, ordered, jsonOutput, isExtended); if (jsonOutput) { @@ -325,7 +297,7 @@ public JSONObject getJSONPlan(PrintStream out, List> tasks, Task fetc return list; } - private Object toJson(String header, String message, PrintStream out, ExplainWork work) + private String toJson(String header, String message, PrintStream out, ExplainWork work) throws Exception { if (work.isFormatted()) { return message; @@ -337,10 +309,14 @@ private Object toJson(String header, String message, PrintStream out, ExplainWor return null; } - private Object toJson(String header, List messages, PrintStream out, ExplainWork work) + private ArrayNode toJson(String header, List messages, PrintStream out, ExplainWork work) throws Exception { if (work.isFormatted()) { - return new JSONArray(messages); + ArrayNode arrayNode = objectMapper.createArrayNode(); + for (String message : messages) { + arrayNode.add(message); + } + return arrayNode; } out.print(header); out.println(": "); @@ -360,19 +336,20 @@ public int execute(DriverContext driverContext) { Path resFile = work.getResFile(); OutputStream outS = resFile.getFileSystem(conf).create(resFile); out = new PrintStream(outS); + ObjectMapper objectMapper = new ObjectMapper(); if (work.isLogical()) { - JSONObject jsonLogicalPlan = getJSONLogicalPlan(out, work); + ObjectNode jsonLogicalPlan = getJSONLogicalPlan(out, work); if (work.isFormatted()) { out.print(jsonLogicalPlan); } } else if (work.isAuthorize()) { - JSONObject jsonAuth = collectAuthRelatedEntities(out, work); + ObjectNode jsonAuth = collectAuthRelatedEntities(out, work); if (work.isFormatted()) { out.print(jsonAuth); } } else if (work.getDependency()) { - JSONObject jsonDependencies = getJSONDependencies(work); + ObjectNode jsonDependencies = getJSONDependencies(work); out.print(jsonDependencies); } else { if (work.isUserLevelExplain()) { @@ -380,7 +357,7 @@ public int execute(DriverContext driverContext) { // that we can get a TezJsonParser. JsonParser jsonParser = JsonParserFactory.getParser(conf); work.getConfig().setFormatted(true); - JSONObject jsonPlan = getJSONPlan(out, work); + ObjectNode jsonPlan = getJSONPlan(out, work); if (work.getCboInfo() != null) { jsonPlan.put("cboInfo", work.getCboInfo()); } @@ -395,7 +372,7 @@ public int execute(DriverContext driverContext) { jsonPlan = getJSONPlan(out, work); } } else { - JSONObject jsonPlan = getJSONPlan(out, work); + ObjectNode jsonPlan = getJSONPlan(out, work); if (work.isFormatted()) { out.print(jsonPlan); } @@ -416,27 +393,27 @@ public int execute(DriverContext driverContext) { } } - private JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work) + private ObjectNode collectAuthRelatedEntities(PrintStream out, ExplainWork work) throws Exception { BaseSemanticAnalyzer analyzer = work.getAnalyzer(); HiveOperation operation = queryState.getHiveOperation(); - JSONObject object = new JSONObject(new LinkedHashMap<>()); - Object jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, work); + ObjectNode object = objectMapper.createObjectNode(); + ArrayNode jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, work); if (work.isFormatted()) { object.put("INPUTS", jsonInput); } - Object jsonOutput = toJson("OUTPUTS", toString(analyzer.getOutputs()), out, work); + ArrayNode jsonOutput = toJson("OUTPUTS", toString(analyzer.getOutputs()), out, work); if (work.isFormatted()) { object.put("OUTPUTS", jsonOutput); } String userName = SessionState.get().getAuthenticator().getUserName(); - Object jsonUser = toJson("CURRENT_USER", userName, out, work); + String jsonUser = toJson("CURRENT_USER", userName, out, work); if (work.isFormatted()) { object.put("CURRENT_USER", jsonUser); } - Object jsonOperation = toJson("OPERATION", operation.name(), out, work); + String jsonOperation = toJson("OPERATION", operation.name(), out, work); if (work.isFormatted()) { object.put("OPERATION", jsonOperation); } @@ -463,7 +440,7 @@ public void exception(Exception exception) { } } if (!exceptions.isEmpty()) { - Object jsonFails = toJson("AUTHORIZATION_FAILURES", exceptions, out, work); + ArrayNode jsonFails = toJson("AUTHORIZATION_FAILURES", exceptions, out, work); if (work.isFormatted()) { object.put("AUTHORIZATION_FAILURES", jsonFails); } @@ -480,11 +457,11 @@ private static String indentString(int indent) { return sb.toString(); } - private JSONObject outputMap(Map mp, boolean hasHeader, PrintStream out, + private ObjectNode outputMap(Map mp, boolean hasHeader, PrintStream out, boolean extended, boolean jsonOutput, int indent) throws Exception { TreeMap tree = getBasictypeKeyedMap(mp); - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + ObjectNode json = jsonOutput ? objectMapper.createObjectNode() : null; if (out != null && hasHeader && !mp.isEmpty()) { out.println(); } @@ -528,10 +505,17 @@ else if (ent.getValue() instanceof List) { } if (jsonOutput) { for (TezWork.Dependency dep: (List)ent.getValue()) { - JSONObject jsonDep = new JSONObject(new LinkedHashMap<>()); + ObjectNode jsonDep = objectMapper.createObjectNode(); jsonDep.put("parent", dep.getName()); jsonDep.put("type", dep.getType()); - json.accumulate(ent.getKey().toString(), jsonDep); + String key = ent.getKey().toString(); + ArrayNode arrayNode; + if (json.has(key)) { + arrayNode = (ArrayNode) json.get(key); + } else { + arrayNode = json.putArray(key); + } + arrayNode.add(jsonDep); } } } else if (ent.getValue() != null && !((List) ent.getValue()).isEmpty() @@ -557,11 +541,18 @@ else if (ent.getValue() instanceof List) { } if (jsonOutput) { for (SparkWork.Dependency dep: (List) ent.getValue()) { - JSONObject jsonDep = new JSONObject(new LinkedHashMap<>()); + ObjectNode jsonDep = objectMapper.createObjectNode(); jsonDep.put("parent", dep.getName()); jsonDep.put("type", dep.getShuffleType()); jsonDep.put("partitions", dep.getNumPartitions()); - json.accumulate(ent.getKey().toString(), jsonDep); + String key = ent.getKey().toString(); + ArrayNode arrayNode; + if (json.has(key)) { + arrayNode = (ArrayNode) json.get(key); + } else { + arrayNode = json.putArray(key); + } + arrayNode.add(jsonDep); } } } else { @@ -588,7 +579,7 @@ else if (ent.getValue() != null) { if (out != null) { out.println(); } - JSONObject jsonOut = outputPlan(ent.getValue(), out, + JsonNode jsonOut = outputPlan(ent.getValue(), out, extended, jsonOutput, jsonOutput ? 0 : indent + 2); if (jsonOutput) { json.put(ent.getKey().toString(), jsonOut); @@ -630,12 +621,12 @@ else if (ent.getValue() != null) { return ret; } - private JSONArray outputList(List l, PrintStream out, boolean hasHeader, + private ArrayNode outputList(List l, PrintStream out, boolean hasHeader, boolean extended, boolean jsonOutput, int indent) throws Exception { boolean first_el = true; boolean nl = false; - JSONArray outputArray = new JSONArray(); + ArrayNode outputArray = objectMapper.createArrayNode(); for (Object o : l) { if (isPrintable(o)) { @@ -646,7 +637,7 @@ private JSONArray outputList(List l, PrintStream out, boolean hasHeader, } if (jsonOutput) { - outputArray.put(o); + outputArray.addPOJO(o); } nl = true; } @@ -654,10 +645,10 @@ private JSONArray outputList(List l, PrintStream out, boolean hasHeader, if (first_el && (out != null) && hasHeader) { out.println(); } - JSONObject jsonOut = outputPlan(o, out, extended, + JsonNode jsonOut = outputPlan(o, out, extended, jsonOutput, jsonOutput ? 0 : (hasHeader ? indent + 2 : indent)); if (jsonOutput) { - outputArray.put(jsonOut); + outputArray.add(jsonOut); } } @@ -685,12 +676,12 @@ private boolean isPrintable(Object val) { return false; } - private JSONObject outputPlan(Object work, + private ObjectNode outputPlan(Object work, PrintStream out, boolean extended, boolean jsonOutput, int indent) throws Exception { return outputPlan(work, out, extended, jsonOutput, indent, ""); } - private JSONObject outputPlan(Object work, PrintStream out, + private ObjectNode outputPlan(Object work, PrintStream out, boolean extended, boolean jsonOutput, int indent, String appendToHeader) throws Exception { // Check if work has an explain annotation Annotation note = AnnotationUtils.getAnnotation(work.getClass(), Explain.class); @@ -780,7 +771,7 @@ private JSONObject outputPlan(Object work, PrintStream out, } } - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + ObjectNode json = jsonOutput ? objectMapper.createObjectNode() : null; // If this is an operator then we need to call the plan generation on the // conf and then the children if (work instanceof Operator) { @@ -788,11 +779,11 @@ private JSONObject outputPlan(Object work, PrintStream out, (Operator) work; if (operator.getConf() != null) { String appender = isLogical ? " (" + operator.getOperatorId() + ")" : ""; - JSONObject jsonOut = outputPlan(operator.getConf(), out, extended, + ObjectNode jsonOut = outputPlan(operator.getConf(), out, extended, jsonOutput, jsonOutput ? 0 : indent, appender); if (this.work != null && this.work.isUserLevelExplain()) { - if (jsonOut != null && jsonOut.length() > 0) { - ((JSONObject) jsonOut.get(JSONObject.getNames(jsonOut)[0])).put("OperatorId:", + if (jsonOut != null && jsonOut.size() > 0) { + ((ObjectNode) jsonOut.get(jsonOut.getFieldNames().next())).put("OperatorId:", operator.getOperatorId()); } } @@ -806,9 +797,16 @@ private JSONObject outputPlan(Object work, PrintStream out, if (operator.getChildOperators() != null) { int cindent = jsonOutput ? 0 : indent + 2; for (Operator op : operator.getChildOperators()) { - JSONObject jsonOut = outputPlan(op, out, extended, jsonOutput, cindent); + JsonNode jsonOut = outputPlan(op, out, extended, jsonOutput, cindent); if (jsonOutput) { - ((JSONObject)json.get(JSONObject.getNames(json)[0])).accumulate("children", jsonOut); + ObjectNode firstFieldNode = (ObjectNode) json.get(json.getFieldNames().next()); + ArrayNode arrayNode; + if (firstFieldNode.has("children")) { + arrayNode = (ArrayNode) firstFieldNode.get("children"); + } else { + arrayNode = firstFieldNode.putArray("children"); + } + arrayNode.add(jsonOut); } } } @@ -959,7 +957,7 @@ private JSONObject outputPlan(Object work, PrintStream out, out.print(header); } - JSONObject jsonOut = outputMap(mp, !skipHeader && !emptyHeader, out, extended, jsonOutput, ind); + ObjectNode jsonOut = outputMap(mp, !skipHeader && !emptyHeader, out, extended, jsonOutput, ind); if (jsonOutput && !mp.isEmpty()) { json.put(header, jsonOut); } @@ -973,7 +971,7 @@ private JSONObject outputPlan(Object work, PrintStream out, out.print(header); } - JSONArray jsonOut = outputList(l, out, !skipHeader && !emptyHeader, extended, jsonOutput, ind); + ArrayNode jsonOut = outputList(l, out, !skipHeader && !emptyHeader, extended, jsonOutput, ind); if (jsonOutput && !l.isEmpty()) { json.put(header, jsonOut); @@ -987,12 +985,14 @@ private JSONObject outputPlan(Object work, PrintStream out, if (!skipHeader && out != null) { out.println(header); } - JSONObject jsonOut = outputPlan(val, out, extended, jsonOutput, ind); - if (jsonOutput && jsonOut != null && jsonOut.length() != 0) { + ObjectNode jsonOut = outputPlan(val, out, extended, jsonOutput, ind); + if (jsonOutput && jsonOut != null && jsonOut.size() != 0) { if (!skipHeader) { json.put(header, jsonOut); } else { - for(String k: JSONObject.getNames(jsonOut)) { + Iterator iter = jsonOut.getFieldNames(); + while (iter.hasNext()) { + String k = iter.next(); json.put(k, jsonOut.get(k)); } } @@ -1008,7 +1008,7 @@ private JSONObject outputPlan(Object work, PrintStream out, if (jsonOutput) { if (keyJSONObject != null) { - JSONObject ret = new JSONObject(new LinkedHashMap<>()); + ObjectNode ret = objectMapper.createObjectNode(); ret.put(keyJSONObject, json); return ret; } @@ -1035,8 +1035,8 @@ private boolean shouldPrint(Explain exp, Object val) { return true; } - private JSONObject outputPlan(Task task, - PrintStream out, JSONObject parentJSON, boolean extended, + private ObjectNode outputPlan(Task task, + PrintStream out, ObjectNode parentJSON, boolean extended, boolean jsonOutput, int indent) throws Exception { if (out != null) { @@ -1048,7 +1048,7 @@ private JSONObject outputPlan(Task task, // Start by getting the work part of the task and call the output plan for // the work - JSONObject jsonOutputPlan = outputPlan(task.getWork(), out, extended, + JsonNode jsonOutputPlan = outputPlan(task.getWork(), out, extended, jsonOutput, jsonOutput ? 0 : indent + 2); if (out != null) { @@ -1061,12 +1061,12 @@ private JSONObject outputPlan(Task task, return null; } - private JSONObject outputDependencies(Task task, - PrintStream out, JSONObject parentJson, boolean jsonOutput, boolean taskType, int indent) + private ObjectNode outputDependencies(Task task, + PrintStream out, ObjectNode parentJson, boolean jsonOutput, boolean taskType, int indent) throws Exception { boolean first = true; - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + ObjectNode json = jsonOutput ? objectMapper.createObjectNode() : null; if (out != null) { out.print(indentString(indent)); out.print(task.getId()); @@ -1152,7 +1152,7 @@ private JSONObject outputDependencies(Task task, } public String outputAST(String treeString, PrintStream out, - boolean jsonOutput, int indent) throws JSONException { + boolean jsonOutput, int indent) { if (out != null) { out.print(indentString(indent)); out.println("ABSTRACT SYNTAX TREE:"); @@ -1163,7 +1163,7 @@ public String outputAST(String treeString, PrintStream out, return jsonOutput ? treeString : null; } - public JSONObject outputDependencies(PrintStream out, boolean jsonOutput, + public ObjectNode outputDependencies(PrintStream out, boolean jsonOutput, boolean appendTaskType, List tasks) throws Exception { @@ -1171,9 +1171,9 @@ public JSONObject outputDependencies(PrintStream out, boolean jsonOutput, out.println("STAGE DEPENDENCIES:"); } - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + ObjectNode json = jsonOutput ? objectMapper.createObjectNode() : null; for (Task task : tasks) { - JSONObject jsonOut = outputDependencies(task, out, json, jsonOutput, appendTaskType, 2); + ObjectNode jsonOut = outputDependencies(task, out, json, jsonOutput, appendTaskType, 2); if (jsonOutput && jsonOut != null) { json.put(task.getId(), jsonOut); } @@ -1182,7 +1182,7 @@ public JSONObject outputDependencies(PrintStream out, boolean jsonOutput, return jsonOutput ? json : null; } - public JSONObject outputStagePlans(PrintStream out, List tasks, + public ObjectNode outputStagePlans(PrintStream out, List tasks, boolean jsonOutput, boolean isExtended) throws Exception { @@ -1190,7 +1190,7 @@ public JSONObject outputStagePlans(PrintStream out, List tasks, out.println("STAGE PLANS:"); } - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + ObjectNode json = jsonOutput ? objectMapper.createObjectNode() : null; for (Task task : tasks) { outputPlan(task, out, json, isExtended, jsonOutput, 2); } diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java index 55b922b..3dd71d7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java @@ -21,7 +21,6 @@ import java.net.InetAddress; import java.util.ArrayList; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -49,7 +48,9 @@ import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hive.common.util.ShutdownHookManager; -import org.json.JSONObject; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.node.ObjectNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -69,6 +70,7 @@ private static TimelineClient timelineClient; private enum EntityTypes { HIVE_QUERY_ID }; private enum EventTypes { QUERY_SUBMITTED, QUERY_COMPLETED }; + private ObjectMapper objectMapper = new ObjectMapper(); private enum OtherInfoTypes { QUERY, STATUS, TEZ, MAPRED, INVOKER_INFO, SESSION_ID, THREAD_NAME, VERSION, @@ -184,7 +186,7 @@ public void run() { ExplainTask explain = (ExplainTask) TaskFactory.get(work, conf); explain.initialize(queryState, plan, null, null); String query = plan.getQueryStr(); - JSONObject explainPlan = explain.getJSONPlan(null, work); + JsonNode explainPlan = explain.getJSONPlan(null, work); String logID = conf.getLogIdVar(hookContext.getSessionId()); List tablesRead = getTablesFromEntitySet(hookContext.getInputs()); List tablesWritten = getTablesFromEntitySet(hookContext.getOutputs()); @@ -256,14 +258,14 @@ protected ExecutionMode getExecutionMode(QueryPlan plan) { return mode; } - TimelineEntity createPreHookEvent(String queryId, String query, JSONObject explainPlan, + TimelineEntity createPreHookEvent(String queryId, String query, JsonNode explainPlan, long startTime, String user, String requestuser, int numMrJobs, int numTezJobs, String opId, String clientIpAddress, String hiveInstanceAddress, String hiveInstanceType, String sessionID, String logID, String threadId, String executionMode, List tablesRead, List tablesWritten, HiveConf conf, ApplicationId llapAppId) throws Exception { - JSONObject queryObj = new JSONObject(new LinkedHashMap<>()); + ObjectNode queryObj = objectMapper.createObjectNode(); queryObj.put("queryText", query); queryObj.put("queryPlan", explainPlan); @@ -278,7 +280,7 @@ TimelineEntity createPreHookEvent(String queryId, String query, JSONObject expla for (Map.Entry setting : conf) { confMap.put(setting.getKey(), setting.getValue()); } - JSONObject confObj = new JSONObject((Map) confMap); + ObjectNode confObj = objectMapper.createObjectNode(); TimelineEntity atsEntity = new TimelineEntity(); atsEntity.setEntityId(queryId); @@ -345,7 +347,7 @@ TimelineEntity createPostHookEvent(String queryId, long stopTime, String user, S atsEntity.addOtherInfo(OtherInfoTypes.STATUS.name(), success); // Perf times - JSONObject perfObj = new JSONObject(new LinkedHashMap<>()); + ObjectNode perfObj = objectMapper.createObjectNode(); for (String key : perfLogger.getEndTimes().keySet()) { perfObj.put(key, perfLogger.getDuration(key)); }