diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/JSONArrayAdaptor.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/JSONArrayAdaptor.java new file mode 100644 index 0000000..068f88b --- /dev/null +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/JSONArrayAdaptor.java @@ -0,0 +1,52 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.common.jsonexplain; + +import org.codehaus.jackson.node.ArrayNode; + +public class JSONArrayAdaptor { + ArrayNode arrayNode; + + JSONArrayAdaptor(ArrayNode arrayNode) { + this.arrayNode = arrayNode; + } + + public void put(Object o) { + if (o instanceof String) { + arrayNode.add((String) o); + } else if (o instanceof JSONObjectAdaptor) { + arrayNode.add(((JSONObjectAdaptor) o).objectNode); + } else { + throw new RuntimeException(); + } + } + + public int length() { + return arrayNode.size(); + } + + public JSONObjectAdaptor getJSONObject(int index) { + return new JSONObjectAdaptor(arrayNode.get(index)); + } + + @Override + public String toString() { + return arrayNode.toString(); + } +} diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/JSONObjectAdaptor.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/JSONObjectAdaptor.java new file mode 100644 index 0000000..9c93ee9 --- /dev/null +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/JSONObjectAdaptor.java @@ -0,0 +1,179 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.common.jsonexplain; + +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.node.ArrayNode; +import org.codehaus.jackson.node.ObjectNode; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +public class JSONObjectAdaptor { + JsonNode objectNode; + + JSONObjectAdaptor(JsonNode objectNode) { + this.objectNode = objectNode; + } + + public void put(String fieldName, JSONObjectAdaptor value) { + ((ObjectNode) objectNode).put(fieldName, value.objectNode); + } + + public void put(String fieldName, boolean value) { + ((ObjectNode) objectNode).put(fieldName, value); + } + + public void put(String fieldName, long value) { + ((ObjectNode) objectNode).put(fieldName, value); + } + + public void put(String fieldName, String value) { + ((ObjectNode) objectNode).put(fieldName, value); + } + + public void put(String fieldName, List value) { + ArrayNode arrayNode = ((ObjectNode) objectNode).putArray(fieldName); + for (Object object : value) { + if (object instanceof String) { + arrayNode.add((String)object); + } else if (object instanceof Map) { + ObjectNode mapNode = arrayNode.addObject(); + Map map = (Map) object; + for (Map.Entry entry : map.entrySet()) { + mapNode.put(entry.getKey().toString(), entry.getValue().toString()); + } + } + } + } + + public void put(String fieldName, Object value) { + Class clazz = value.getClass(); + if (clazz == JSONObjectAdaptor.class) { + JSONObjectAdaptor jsonObject = (JSONObjectAdaptor) value; + ((ObjectNode) objectNode).put(fieldName, jsonObject.objectNode); + } else if (clazz == JSONArrayAdaptor.class) { + JSONArrayAdaptor jsonArray = (JSONArrayAdaptor) value; + ((ObjectNode) objectNode).put(fieldName, jsonArray.arrayNode); + } else { + ((ObjectNode) objectNode).put(fieldName, value.toString()); + } + } + + public void accumulate(String fieldName, JSONObjectAdaptor value) { + if (objectNode.has(fieldName)) { + JsonNode field = objectNode.get(fieldName); + if (field.isArray()) { + ((ArrayNode) objectNode.get(fieldName)).add(value.objectNode); + } else { + ArrayNode arrayNode = ((ObjectNode) objectNode).putArray(fieldName); + arrayNode.add(field); + arrayNode.add(value.objectNode); + } + } else { + ((ObjectNode) objectNode).put(fieldName, value.objectNode); + } + } + + public static String[] getNames(JSONObjectAdaptor jsonObject) { + int size = jsonObject.length(); + String[] strings = new String[size]; + Iterator fieldNamesIter = jsonObject.objectNode.getFieldNames(); + for (int i = 0; i < size; i++) { + strings[i] = fieldNamesIter.next(); + } + return strings; + } + + public Object get(String fieldName) { + JsonNode field = objectNode.get(fieldName); + if (field.isValueNode()) { + if (field.isBigDecimal()) { + return field.getDecimalValue(); + } else if (field.isBigInteger()) { + return field.getBigIntegerValue(); + } else if (field.isBinary()) { + try { + return field.getBinaryValue(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } else if (field.isBoolean()) { + return field.getBooleanValue(); + } else if (field.isDouble()) { + return field.getDoubleValue(); + } else if (field.isFloatingPointNumber()) { + return field.getDoubleValue(); + } else if (field.isInt()) { + return field.getIntValue(); + } else if (field.isLong()) { + return field.getLongValue(); + } else if (field.isTextual()) { + return field.getTextValue(); + } else if (field.isNull()) { + return null; + } + } else if (field.isArray()) { + return new JSONArrayAdaptor((ArrayNode) field); + } else { + return new JSONObjectAdaptor(field); + } + return null; + } + + public int length() { + return objectNode.size(); + } + + public JSONArrayAdaptor getJSONArray(String fieldName) { + return new JSONArrayAdaptor((ArrayNode) objectNode.get(fieldName)); + } + + + public String getString(String fieldName) { + JsonNode field = objectNode.get(fieldName); + if (field.isTextual()) { + return field.getTextValue(); + } + Object object = get(fieldName); + if (object == null) { + return null; + } + return object.toString(); + } + + public JSONObjectAdaptor getJSONObject(String fieldName) { + return new JSONObjectAdaptor(objectNode.get(fieldName)); + } + + public Iterator keys() { + return objectNode.getFieldNames(); + } + + public boolean has(String fieldName) { + return objectNode.has(fieldName); + } + + @Override + public String toString() { + return objectNode.toString(); + } +} diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/JSONObjectAdaptorFactory.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/JSONObjectAdaptorFactory.java new file mode 100644 index 0000000..3e17b88 --- /dev/null +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/JSONObjectAdaptorFactory.java @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.common.jsonexplain; + +import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.node.ArrayNode; + +import java.io.IOException; +import java.util.List; + +public class JSONObjectAdaptorFactory { + private ObjectMapper objectMapper = new ObjectMapper(); + + public JSONObjectAdaptor node() { + return new JSONObjectAdaptor(objectMapper.createObjectNode()); + } + + public JSONObjectAdaptor node(String string) { + try { + return new JSONObjectAdaptor(new ObjectMapper().readTree(string)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public JSONArrayAdaptor array() { + return new JSONArrayAdaptor(objectMapper.createArrayNode()); + } + + public JSONArrayAdaptor array(List strings) { + ArrayNode arrayNode = objectMapper.createArrayNode(); + for (String string : strings) { + arrayNode.add(string); + } + return new JSONArrayAdaptor(arrayNode); + } +} diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java index 1a0d8e1..84ce903 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java @@ -20,12 +20,10 @@ import java.io.PrintStream; -import org.json.JSONObject; - /** * JsonParser is the interface for classes that print a JSONObject * into outputStream. */ public interface JsonParser { - public void print(JSONObject inputObject, PrintStream outputStream) throws Exception; + public void print(JSONObjectAdaptor inputObject, PrintStream outputStream) throws Exception; } diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java index 718791c..0b66fe2 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java @@ -18,18 +18,13 @@ package org.apache.hadoop.hive.common.jsonexplain.tez; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; -import java.util.Set; +import org.apache.hadoop.hive.common.jsonexplain.JSONArrayAdaptor; +import org.apache.hadoop.hive.common.jsonexplain.JSONObjectAdaptor; +import org.apache.hadoop.hive.common.jsonexplain.JSONObjectAdaptorFactory; import org.apache.hadoop.hive.common.jsonexplain.tez.Vertex.VertexType; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; public final class Op { public final String name; @@ -39,8 +34,9 @@ public Op parent; public final List children; public final Map attrs; + private final JSONObjectAdaptorFactory factory = new JSONObjectAdaptorFactory(); // the jsonObject for this operator - public final JSONObject opObject; + public final JSONObjectAdaptor opObject; // the vertex that this operator belongs to public final Vertex vertex; // the vertex that this operator output to @@ -53,8 +49,7 @@ }; public Op(String name, String id, String outputVertexName, List children, - Map attrs, JSONObject opObject, Vertex vertex, TezJsonParser tezJsonParser) - throws JSONException { + Map attrs, JSONObjectAdaptor opObject, Vertex vertex, TezJsonParser tezJsonParser) { super(); this.name = name; this.operatorId = id; @@ -86,11 +81,11 @@ private OpType deriveOpType(String operatorId) { private void inlineJoinOp() throws Exception { // inline map join operator if (this.type == OpType.MAPJOIN) { - JSONObject joinObj = opObject.getJSONObject(this.name); + JSONObjectAdaptor joinObj = opObject.getJSONObject(this.name); // get the map for posToVertex - JSONObject verticeObj = joinObj.getJSONObject("input vertices:"); + JSONObjectAdaptor verticeObj = joinObj.getJSONObject("input vertices:"); Map posToVertex = new LinkedHashMap<>(); - for (String pos : JSONObject.getNames(verticeObj)) { + for (String pos : JSONObjectAdaptor.getNames(verticeObj)) { String vertexName = verticeObj.getString(pos); // update the connection Connection c = null; @@ -108,7 +103,7 @@ private void inlineJoinOp() throws Exception { // update the attrs this.attrs.remove("input vertices:"); // update the keys to use operator name - JSONObject keys = joinObj.getJSONObject("keys:"); + JSONObjectAdaptor keys = joinObj.getJSONObject("keys:"); // find out the vertex for the big table Set parentVertexes = new HashSet<>(); for (Connection connection : vertex.parentConnections) { @@ -117,7 +112,7 @@ private void inlineJoinOp() throws Exception { parentVertexes.removeAll(posToVertex.values()); Map posToOpId = new LinkedHashMap<>(); if (keys.length() != 0) { - for (String key : JSONObject.getNames(keys)) { + for (String key : JSONObjectAdaptor.getNames(keys)) { // first search from the posToVertex if (posToVertex.containsKey(key)) { Vertex vertex = posToVertex.get(key); @@ -170,11 +165,11 @@ else if (parentVertexes.size() == 1) { } this.attrs.remove("keys:"); StringBuffer sb = new StringBuffer(); - JSONArray conditionMap = joinObj.getJSONArray("condition map:"); + JSONArrayAdaptor conditionMap = joinObj.getJSONArray("condition map:"); for (int index = 0; index < conditionMap.length(); index++) { - JSONObject cond = conditionMap.getJSONObject(index); + JSONObjectAdaptor cond = conditionMap.getJSONObject(index); String k = (String) cond.keys().next(); - JSONObject condObject = new JSONObject((String)cond.get(k)); + JSONObjectAdaptor condObject = factory.node((String)cond.get(k)); String type = condObject.getString("type"); String left = condObject.getString("left"); String right = condObject.getString("right"); @@ -235,11 +230,11 @@ else if (parentVertexes.size() == 1) { posToOpId.put(v.tag, v.rootOps.get(0).operatorId); } } - JSONObject joinObj = opObject.getJSONObject(this.name); + JSONObjectAdaptor joinObj = opObject.getJSONObject(this.name); // update the keys to use operator name - JSONObject keys = joinObj.getJSONObject("keys:"); + JSONObjectAdaptor keys = joinObj.getJSONObject("keys:"); if (keys.length() != 0) { - for (String key : JSONObject.getNames(keys)) { + for (String key : JSONObjectAdaptor.getNames(keys)) { if (!posToOpId.containsKey(key)) { throw new Exception( "Can not find the source operator on one of the branches of merge join."); @@ -255,11 +250,11 @@ else if (parentVertexes.size() == 1) { // update the attrs this.attrs.remove("keys:"); StringBuffer sb = new StringBuffer(); - JSONArray conditionMap = joinObj.getJSONArray("condition map:"); + JSONArrayAdaptor conditionMap = joinObj.getJSONArray("condition map:"); for (int index = 0; index < conditionMap.length(); index++) { - JSONObject cond = conditionMap.getJSONObject(index); + JSONObjectAdaptor cond = conditionMap.getJSONObject(index); String k = (String) cond.keys().next(); - JSONObject condObject = new JSONObject((String)cond.get(k)); + JSONObjectAdaptor condObject = factory.node((String)cond.get(k)); String type = condObject.getString("type"); String left = condObject.getString("left"); String right = condObject.getString("right"); diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java index 63937f8..d6bba4b 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java @@ -26,10 +26,10 @@ import java.util.TreeMap; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.jsonexplain.JSONArrayAdaptor; +import org.apache.hadoop.hive.common.jsonexplain.JSONObjectAdaptor; +import org.apache.hadoop.hive.common.jsonexplain.JSONObjectAdaptorFactory; import org.apache.hadoop.hive.common.jsonexplain.tez.Vertex.VertexType; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; public final class Stage { //external name is used to show at the console @@ -48,6 +48,7 @@ // some stage may contain only a single operator, e.g., create table operator, // fetch operator. Op op; + JSONObjectAdaptorFactory factory = new JSONObjectAdaptorFactory(); public Stage(String name, TezJsonParser tezJsonParser) { super(); @@ -56,7 +57,7 @@ public Stage(String name, TezJsonParser tezJsonParser) { parser = tezJsonParser; } - public void addDependency(JSONObject object, Map stages) throws JSONException { + public void addDependency(JSONObjectAdaptor object, Map stages) { if (object.has("DEPENDENT STAGES")) { String names = object.getString("DEPENDENT STAGES"); for (String name : names.split(",")) { @@ -84,24 +85,24 @@ public void addDependency(JSONObject object, Map stages) throws J * vertices and edges Else we need to directly extract operators * and/or attributes. */ - public void extractVertex(JSONObject object) throws Exception { + public void extractVertex(JSONObjectAdaptor object) throws Exception { if (object.has("Tez")) { this.tezStageDependency = new TreeMap<>(); - JSONObject tez = (JSONObject) object.get("Tez"); - JSONObject vertices = tez.getJSONObject("Vertices:"); + JSONObjectAdaptor tez = (JSONObjectAdaptor) object.get("Tez"); + JSONObjectAdaptor vertices = tez.getJSONObject("Vertices:"); if (tez.has("Edges:")) { - JSONObject edges = tez.getJSONObject("Edges:"); + JSONObjectAdaptor edges = tez.getJSONObject("Edges:"); // iterate for the first time to get all the vertices - for (String to : JSONObject.getNames(edges)) { + for (String to : JSONObjectAdaptor.getNames(edges)) { vertexs.put(to, new Vertex(to, vertices.getJSONObject(to), parser)); } // iterate for the second time to get all the vertex dependency - for (String to : JSONObject.getNames(edges)) { + for (String to : JSONObjectAdaptor.getNames(edges)) { Object o = edges.get(to); Vertex v = vertexs.get(to); // 1 to 1 mapping - if (o instanceof JSONObject) { - JSONObject obj = (JSONObject) o; + if (o instanceof JSONObjectAdaptor) { + JSONObjectAdaptor obj = (JSONObjectAdaptor) o; String parent = obj.getString("parent"); Vertex parentVertex = vertexs.get(parent); if (parentVertex == null) { @@ -121,10 +122,10 @@ public void extractVertex(JSONObject object) throws Exception { this.tezStageDependency.put(v, Arrays.asList(new Connection(type, parentVertex))); } else { // 1 to many mapping - JSONArray from = (JSONArray) o; + JSONArrayAdaptor from = (JSONArrayAdaptor) o; List list = new ArrayList<>(); for (int index = 0; index < from.length(); index++) { - JSONObject obj = from.getJSONObject(index); + JSONObjectAdaptor obj = from.getJSONObject(index); String parent = obj.getString("parent"); Vertex parentVertex = vertexs.get(parent); if (parentVertex == null) { @@ -146,7 +147,7 @@ public void extractVertex(JSONObject object) throws Exception { } } } else { - for (String vertexName : JSONObject.getNames(vertices)) { + for (String vertexName : JSONObjectAdaptor.getNames(vertices)) { vertexs.put(vertexName, new Vertex(vertexName, vertices.getJSONObject(vertexName), parser)); } } @@ -158,7 +159,7 @@ public void extractVertex(JSONObject object) throws Exception { } } } else { - String[] names = JSONObject.getNames(object); + String[] names = JSONObjectAdaptor.getNames(object); if (names != null) { for (String name : names) { if (name.contains("Operator")) { @@ -181,25 +182,25 @@ public void extractVertex(JSONObject object) throws Exception { * This method address the create table operator, fetch operator, * etc */ - Op extractOp(String opName, JSONObject opObj) throws Exception { + Op extractOp(String opName, JSONObjectAdaptor opObj) throws Exception { Map attrs = new TreeMap<>(); Vertex v = null; if (opObj.length() > 0) { - String[] names = JSONObject.getNames(opObj); + String[] names = JSONObjectAdaptor.getNames(opObj); for (String name : names) { Object o = opObj.get(name); if (isPrintable(o) && !o.toString().isEmpty()) { attrs.put(name, o.toString()); - } else if (o instanceof JSONObject) { - JSONObject attrObj = (JSONObject) o; + } else if (o instanceof JSONObjectAdaptor) { + JSONObjectAdaptor attrObj = (JSONObjectAdaptor) o; if (attrObj.length() > 0) { if (name.equals("Processor Tree:")) { - JSONObject object = new JSONObject(new LinkedHashMap<>()); + JSONObjectAdaptor object = factory.node(); object.put(name, attrObj); v = new Vertex(null, object, parser); v.extractOpTree(); } else { - for (String attrName : JSONObject.getNames(attrObj)) { + for (String attrName : JSONObjectAdaptor.getNames(attrObj)) { if (!attrObj.get(attrName).toString().isEmpty()) { attrs.put(attrName, attrObj.get(attrName).toString()); } diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java index ea86048..c083301 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java @@ -27,8 +27,8 @@ import java.util.Map.Entry; import java.util.Set; +import org.apache.hadoop.hive.common.jsonexplain.JSONObjectAdaptor; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; -import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,25 +46,25 @@ public TezJsonParser() { LOG = LoggerFactory.getLogger(this.getClass().getName()); } - public void extractStagesAndPlans(JSONObject inputObject) throws Exception { + public void extractStagesAndPlans(JSONObjectAdaptor inputObject) throws Exception { // extract stages - JSONObject dependency = inputObject.getJSONObject("STAGE DEPENDENCIES"); + JSONObjectAdaptor dependency = inputObject.getJSONObject("STAGE DEPENDENCIES"); if (dependency != null && dependency.length() > 0) { // iterate for the first time to get all the names of stages. - for (String stageName : JSONObject.getNames(dependency)) { + for (String stageName : JSONObjectAdaptor.getNames(dependency)) { this.stages.put(stageName, new Stage(stageName, this)); } // iterate for the second time to get all the dependency. - for (String stageName : JSONObject.getNames(dependency)) { - JSONObject dependentStageNames = dependency.getJSONObject(stageName); + for (String stageName : JSONObjectAdaptor.getNames(dependency)) { + JSONObjectAdaptor dependentStageNames = dependency.getJSONObject(stageName); this.stages.get(stageName).addDependency(dependentStageNames, this.stages); } } // extract stage plans - JSONObject stagePlans = inputObject.getJSONObject("STAGE PLANS"); + JSONObjectAdaptor stagePlans = inputObject.getJSONObject("STAGE PLANS"); if (stagePlans != null && stagePlans.length() > 0) { - for (String stageName : JSONObject.getNames(stagePlans)) { - JSONObject stagePlan = stagePlans.getJSONObject(stageName); + for (String stageName : JSONObjectAdaptor.getNames(stagePlans)) { + JSONObjectAdaptor stagePlan = stagePlans.getJSONObject(stageName); this.stages.get(stageName).extractVertex(stagePlan); } } @@ -99,7 +99,7 @@ public static String prefixString(int indentFlag, String tail) { } @Override - public void print(JSONObject inputObject, PrintStream outputStream) throws Exception { + public void print(JSONObjectAdaptor inputObject, PrintStream outputStream) throws Exception { LOG.info("JsonParser is parsing:" + inputObject.toString()); this.extractStagesAndPlans(inputObject); Printer printer = new Printer(); diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java index 3d559bd..6fe8852 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java @@ -20,20 +20,14 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.TreeMap; +import org.apache.hadoop.hive.common.jsonexplain.JSONArrayAdaptor; +import org.apache.hadoop.hive.common.jsonexplain.JSONObjectAdaptor; import org.apache.hadoop.hive.common.jsonexplain.tez.Op.OpType; -import org.apache.hadoop.util.hash.Hash; -import org.codehaus.jackson.JsonParseException; -import org.codehaus.jackson.map.JsonMappingException; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; public final class Vertex implements Comparable{ public final String name; @@ -44,7 +38,7 @@ // vertex's children vertex. public final List children = new ArrayList<>(); // the jsonObject for this vertex - public final JSONObject vertexObject; + public final JSONObjectAdaptor vertexObject; // whether this vertex is dummy (which does not really exists but is created), // e.g., a dummy vertex for a mergejoin branch public boolean dummy; @@ -72,7 +66,7 @@ }; public EdgeType edgeType; - public Vertex(String name, JSONObject vertexObject, TezJsonParser tezJsonParser) { + public Vertex(String name, JSONObjectAdaptor vertexObject, TezJsonParser tezJsonParser) { super(); this.name = name; if (this.name != null) { @@ -93,23 +87,13 @@ public Vertex(String name, JSONObject vertexObject, TezJsonParser tezJsonParser) parser = tezJsonParser; } - public void addDependency(Connection connection) throws JSONException { + public void addDependency(Connection connection) { this.parentConnections.add(connection); } - /** - * @throws JSONException - * @throws JsonParseException - * @throws JsonMappingException - * @throws IOException - * @throws Exception - * We assume that there is a single top-level Map Operator Tree or a - * Reduce Operator Tree in a vertex - */ - public void extractOpTree() throws JSONException, JsonParseException, JsonMappingException, - IOException, Exception { + public void extractOpTree() throws IOException, Exception { if (vertexObject.length() != 0) { - for (String key : JSONObject.getNames(vertexObject)) { + for (String key : JSONObjectAdaptor.getNames(vertexObject)) { if (key.equals("Map Operator Tree:")) { extractOp(vertexObject.getJSONArray(key).getJSONObject(0)); } else if (key.equals("Reduce Operator Tree:") || key.equals("Processor Tree:")) { @@ -117,16 +101,16 @@ public void extractOpTree() throws JSONException, JsonParseException, JsonMappin } else if (key.equals("Join:")) { // this is the case when we have a map-side SMB join // one input of the join is treated as a dummy vertex - JSONArray array = vertexObject.getJSONArray(key); + JSONArrayAdaptor array = vertexObject.getJSONArray(key); for (int index = 0; index < array.length(); index++) { - JSONObject mpOpTree = array.getJSONObject(index); + JSONObjectAdaptor mpOpTree = array.getJSONObject(index); Vertex v = new Vertex(null, mpOpTree, parser); v.extractOpTree(); v.dummy = true; mergeJoinDummyVertexs.add(v); } } else if (key.equals("Merge File Operator")) { - JSONObject opTree = vertexObject.getJSONObject(key); + JSONObjectAdaptor opTree = vertexObject.getJSONObject(key); if (opTree.has("Map Operator Tree:")) { extractOp(opTree.getJSONArray("Map Operator Tree:").getJSONObject(0)); } else { @@ -135,8 +119,8 @@ public void extractOpTree() throws JSONException, JsonParseException, JsonMappin } else if (key.equals("Execution mode:")) { executionMode = " " + vertexObject.getString(key); } else if (key.equals("tagToInput:")) { - JSONObject tagToInput = vertexObject.getJSONObject(key); - for (String tag : JSONObject.getNames(tagToInput)) { + JSONObjectAdaptor tagToInput = vertexObject.getJSONObject(key); + for (String tag : JSONObjectAdaptor.getNames(tagToInput)) { this.tagToInput.put(tag, (String) tagToInput.get(tag)); } } else if (key.equals("tag:")) { @@ -148,40 +132,27 @@ public void extractOpTree() throws JSONException, JsonParseException, JsonMappin } } - /** - * @param operator - * @param parent - * @return - * @throws JSONException - * @throws JsonParseException - * @throws JsonMappingException - * @throws IOException - * @throws Exception - * assumption: each operator only has one parent but may have many - * children - */ - Op extractOp(JSONObject operator) throws JSONException, JsonParseException, JsonMappingException, - IOException, Exception { - String[] names = JSONObject.getNames(operator); + Op extractOp(JSONObjectAdaptor operator) throws IOException, Exception { + String[] names = JSONObjectAdaptor.getNames(operator); if (names.length != 1) { throw new Exception("Expect only one operator in " + operator.toString()); } else { String opName = names[0]; - JSONObject attrObj = (JSONObject) operator.get(opName); + JSONObjectAdaptor attrObj = (JSONObjectAdaptor) operator.get(opName); Map attrs = new TreeMap<>(); List children = new ArrayList<>(); String id = null; String outputVertexName = null; - for (String attrName : JSONObject.getNames(attrObj)) { + for (String attrName : JSONObjectAdaptor.getNames(attrObj)) { if (attrName.equals("children")) { Object childrenObj = attrObj.get(attrName); - if (childrenObj instanceof JSONObject) { - if (((JSONObject) childrenObj).length() != 0) { - children.add(extractOp((JSONObject) childrenObj)); + if (childrenObj instanceof JSONObjectAdaptor) { + if (((JSONObjectAdaptor) childrenObj).length() != 0) { + children.add(extractOp((JSONObjectAdaptor) childrenObj)); } - } else if (childrenObj instanceof JSONArray) { - if (((JSONArray) childrenObj).length() != 0) { - JSONArray array = ((JSONArray) childrenObj); + } else if (childrenObj instanceof JSONArrayAdaptor) { + if (((JSONArrayAdaptor) childrenObj).length() != 0) { + JSONArrayAdaptor array = ((JSONArrayAdaptor) childrenObj); for (int index = 0; index < array.length(); index++) { children.add(extractOp(array.getJSONObject(index))); } @@ -214,8 +185,7 @@ Op extractOp(JSONObject operator) throws JSONException, JsonParseException, Json } } - public void print(Printer printer, int indentFlag, String type, Vertex callingVertex) - throws JSONException, Exception { + public void print(Printer printer, int indentFlag, String type, Vertex callingVertex) throws Exception { // print vertexname if (parser.printSet.contains(this) && !hasMultiReduceOp) { if (type != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 74cec3e..b010cdc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -20,8 +20,6 @@ import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME; -import org.apache.commons.lang3.tuple.ImmutablePair; - import java.io.OutputStream; import java.io.PrintStream; import java.io.Serializable; @@ -37,74 +35,37 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; +import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.ObjectPair; -import org.apache.hadoop.hive.common.jsonexplain.JsonParser; -import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory; +import org.apache.hadoop.hive.common.jsonexplain.*; +import org.apache.hadoop.hive.common.jsonexplain.JSONObjectAdaptor; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.Validator.StringSet; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.DriverContext; -import org.apache.hadoop.hive.ql.exec.spark.SparkTask; -import org.apache.hadoop.hive.ql.exec.tez.TezTask; -import org.apache.hadoop.hive.ql.exec.vector.VectorGroupByOperator; -import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; -import org.apache.hadoop.hive.ql.plan.MapJoinDesc; -import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc; import org.apache.hadoop.hive.ql.hooks.ReadEntity; -import org.apache.hadoop.hive.ql.io.AcidUtils; -import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; -import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; -import org.apache.hadoop.hive.ql.lib.Dispatcher; -import org.apache.hadoop.hive.ql.lib.GraphWalker; -import org.apache.hadoop.hive.ql.lib.Node; -import org.apache.hadoop.hive.ql.lib.NodeProcessor; -import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; -import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger; -import org.apache.hadoop.hive.ql.optimizer.physical.Vectorizer; -import org.apache.hadoop.hive.ql.optimizer.physical.VectorizerReason; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.VectorizationDetailLevel; -import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.BaseWork; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.ql.plan.Explain.Vectorization; -import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.plan.ExplainWork; -import org.apache.hadoop.hive.ql.plan.GroupByDesc; import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.ql.plan.MapredWork; -import org.apache.hadoop.hive.ql.plan.MapWork; -import org.apache.hadoop.hive.ql.plan.ReduceWork; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.SparkWork; -import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.TezWork; -import org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo; -import org.apache.hadoop.hive.ql.plan.VectorReduceSinkDesc; -import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.serde2.Deserializer; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.AnnotationUtils; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -118,6 +79,7 @@ private final Set> visitedOps = new HashSet>(); private boolean isLogical = false; protected final Logger LOG; + private JSONObjectAdaptorFactory factory = new JSONObjectAdaptorFactory(); public ExplainTask() { super(); @@ -132,11 +94,11 @@ public ExplainTask() { * {"input_tables":[{"tablename": "default@test_sambavi_v1", "tabletype": "TABLE"}], * "input partitions":["default@srcpart@ds=2008-04-08/hr=11"]} */ - private static JSONObject getJSONDependencies(ExplainWork work) + private JSONObjectAdaptor getJSONDependencies(ExplainWork work) throws Exception { assert(work.getDependency()); - JSONObject outJSONObject = new JSONObject(new LinkedHashMap<>()); + JSONObjectAdaptor outJSONObject = factory.node(); List> inputTableInfo = new ArrayList>(); List> inputPartitionInfo = new ArrayList>(); for (ReadEntity input: work.getInputs()) { @@ -169,10 +131,10 @@ private static JSONObject getJSONDependencies(ExplainWork work) return outJSONObject; } - public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws Exception { + public JSONObjectAdaptor getJSONLogicalPlan(PrintStream out, ExplainWork work) throws Exception { isLogical = true; - JSONObject outJSONObject = new JSONObject(new LinkedHashMap<>()); + JSONObjectAdaptor outJSONObject = factory.node(); boolean jsonOutput = work.isFormatted(); if (jsonOutput) { out = null; @@ -182,7 +144,7 @@ public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws E if (out != null) { out.print("LOGICAL PLAN:"); } - JSONObject jsonPlan = outputMap(work.getParseContext().getTopOps(), true, + JSONObjectAdaptor jsonPlan = outputMap(work.getParseContext().getTopOps(), true, out, work.getExtended(), jsonOutput, 0); if (out != null) { out.println(); @@ -202,14 +164,14 @@ public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws E private static String falseCondNameVectorizationEnabled = HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED.varname + " IS false"; - private ImmutablePair outputPlanVectorization(PrintStream out, boolean jsonOutput) + private ImmutablePair outputPlanVectorization(PrintStream out, boolean jsonOutput) throws Exception { if (out != null) { out.println("PLAN VECTORIZATION:"); } - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + JSONObjectAdaptor json = jsonOutput ? factory.node() : null; HiveConf hiveConf = queryState.getConf(); @@ -242,21 +204,21 @@ public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws E } } - return new ImmutablePair(isVectorizationEnabled, jsonOutput ? json : null); + return new ImmutablePair(isVectorizationEnabled, jsonOutput ? json : null); } - public JSONObject getJSONPlan(PrintStream out, ExplainWork work) + public JSONObjectAdaptor getJSONPlan(PrintStream out, ExplainWork work) throws Exception { return getJSONPlan(out, work.getRootTasks(), work.getFetchTask(), work.isFormatted(), work.getExtended(), work.isAppendTaskType()); } - public JSONObject getJSONPlan(PrintStream out, List> tasks, Task fetchTask, - boolean jsonOutput, boolean isExtended, boolean appendTaskType) throws Exception { + public JSONObjectAdaptor getJSONPlan(PrintStream out, List> tasks, Task fetchTask, + boolean jsonOutput, boolean isExtended, boolean appendTaskType) throws Exception { // If the user asked for a formatted output, dump the json output // in the output stream - JSONObject outJSONObject = new JSONObject(new LinkedHashMap<>()); + JSONObjectAdaptor outJSONObject = factory.node(); if (jsonOutput) { out = null; @@ -274,7 +236,7 @@ public JSONObject getJSONPlan(PrintStream out, List> tasks, Task fetc boolean suppressOthersForVectorization = false; if (this.work != null && this.work.isVectorization()) { - ImmutablePair planVecPair = outputPlanVectorization(out, jsonOutput); + ImmutablePair planVecPair = outputPlanVectorization(out, jsonOutput); if (this.work.isVectorizationOnly()) { // Suppress the STAGES if vectorization is off. @@ -291,7 +253,7 @@ public JSONObject getJSONPlan(PrintStream out, List> tasks, Task fetc } if (!suppressOthersForVectorization) { - JSONObject jsonDependencies = outputDependencies(out, jsonOutput, appendTaskType, ordered); + JSONObjectAdaptor jsonDependencies = outputDependencies(out, jsonOutput, appendTaskType, ordered); if (out != null) { out.println(); @@ -302,7 +264,7 @@ public JSONObject getJSONPlan(PrintStream out, List> tasks, Task fetc } // Go over all the tasks and dump out the plans - JSONObject jsonPlan = outputStagePlans(out, ordered, + JSONObjectAdaptor jsonPlan = outputStagePlans(out, ordered, jsonOutput, isExtended); if (jsonOutput) { @@ -340,7 +302,7 @@ private Object toJson(String header, String message, PrintStream out, ExplainWor private Object toJson(String header, List messages, PrintStream out, ExplainWork work) throws Exception { if (work.isFormatted()) { - return new JSONArray(messages); + return factory.array(messages); } out.print(header); out.println(": "); @@ -362,17 +324,17 @@ public int execute(DriverContext driverContext) { out = new PrintStream(outS); if (work.isLogical()) { - JSONObject jsonLogicalPlan = getJSONLogicalPlan(out, work); + JSONObjectAdaptor jsonLogicalPlan = getJSONLogicalPlan(out, work); if (work.isFormatted()) { out.print(jsonLogicalPlan); } } else if (work.isAuthorize()) { - JSONObject jsonAuth = collectAuthRelatedEntities(out, work); + JSONObjectAdaptor jsonAuth = collectAuthRelatedEntities(out, work); if (work.isFormatted()) { out.print(jsonAuth); } } else if (work.getDependency()) { - JSONObject jsonDependencies = getJSONDependencies(work); + JSONObjectAdaptor jsonDependencies = getJSONDependencies(work); out.print(jsonDependencies); } else { if (work.isUserLevelExplain()) { @@ -380,7 +342,7 @@ public int execute(DriverContext driverContext) { // that we can get a TezJsonParser. JsonParser jsonParser = JsonParserFactory.getParser(conf); work.getConfig().setFormatted(true); - JSONObject jsonPlan = getJSONPlan(out, work); + JSONObjectAdaptor jsonPlan = getJSONPlan(out, work); if (work.getCboInfo() != null) { jsonPlan.put("cboInfo", work.getCboInfo()); } @@ -395,7 +357,7 @@ public int execute(DriverContext driverContext) { jsonPlan = getJSONPlan(out, work); } } else { - JSONObject jsonPlan = getJSONPlan(out, work); + JSONObjectAdaptor jsonPlan = getJSONPlan(out, work); if (work.isFormatted()) { out.print(jsonPlan); } @@ -416,13 +378,13 @@ public int execute(DriverContext driverContext) { } } - private JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work) + private JSONObjectAdaptor collectAuthRelatedEntities(PrintStream out, ExplainWork work) throws Exception { BaseSemanticAnalyzer analyzer = work.getAnalyzer(); HiveOperation operation = queryState.getHiveOperation(); - JSONObject object = new JSONObject(new LinkedHashMap<>()); + JSONObjectAdaptor object = factory.node(); Object jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, work); if (work.isFormatted()) { object.put("INPUTS", jsonInput); @@ -480,11 +442,11 @@ private static String indentString(int indent) { return sb.toString(); } - private JSONObject outputMap(Map mp, boolean hasHeader, PrintStream out, - boolean extended, boolean jsonOutput, int indent) throws Exception { + private JSONObjectAdaptor outputMap(Map mp, boolean hasHeader, PrintStream out, + boolean extended, boolean jsonOutput, int indent) throws Exception { TreeMap tree = getBasictypeKeyedMap(mp); - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + JSONObjectAdaptor json = jsonOutput ? factory.node() : null; if (out != null && hasHeader && !mp.isEmpty()) { out.println(); } @@ -528,7 +490,7 @@ else if (ent.getValue() instanceof List) { } if (jsonOutput) { for (TezWork.Dependency dep: (List)ent.getValue()) { - JSONObject jsonDep = new JSONObject(new LinkedHashMap<>()); + JSONObjectAdaptor jsonDep = factory.node(); jsonDep.put("parent", dep.getName()); jsonDep.put("type", dep.getType()); json.accumulate(ent.getKey().toString(), jsonDep); @@ -557,7 +519,7 @@ else if (ent.getValue() instanceof List) { } if (jsonOutput) { for (SparkWork.Dependency dep: (List) ent.getValue()) { - JSONObject jsonDep = new JSONObject(new LinkedHashMap<>()); + JSONObjectAdaptor jsonDep = factory.node(); jsonDep.put("parent", dep.getName()); jsonDep.put("type", dep.getShuffleType()); jsonDep.put("partitions", dep.getNumPartitions()); @@ -588,7 +550,7 @@ else if (ent.getValue() != null) { if (out != null) { out.println(); } - JSONObject jsonOut = outputPlan(ent.getValue(), out, + JSONObjectAdaptor jsonOut = outputPlan(ent.getValue(), out, extended, jsonOutput, jsonOutput ? 0 : indent + 2); if (jsonOutput) { json.put(ent.getKey().toString(), jsonOut); @@ -630,12 +592,12 @@ else if (ent.getValue() != null) { return ret; } - private JSONArray outputList(List l, PrintStream out, boolean hasHeader, - boolean extended, boolean jsonOutput, int indent) throws Exception { + private JSONArrayAdaptor outputList(List l, PrintStream out, boolean hasHeader, + boolean extended, boolean jsonOutput, int indent) throws Exception { boolean first_el = true; boolean nl = false; - JSONArray outputArray = new JSONArray(); + JSONArrayAdaptor outputArray = factory.array(); for (Object o : l) { if (isPrintable(o)) { @@ -654,7 +616,7 @@ private JSONArray outputList(List l, PrintStream out, boolean hasHeader, if (first_el && (out != null) && hasHeader) { out.println(); } - JSONObject jsonOut = outputPlan(o, out, extended, + JSONObjectAdaptor jsonOut = outputPlan(o, out, extended, jsonOutput, jsonOutput ? 0 : (hasHeader ? indent + 2 : indent)); if (jsonOutput) { outputArray.put(jsonOut); @@ -685,13 +647,13 @@ private boolean isPrintable(Object val) { return false; } - private JSONObject outputPlan(Object work, - PrintStream out, boolean extended, boolean jsonOutput, int indent) throws Exception { + private JSONObjectAdaptor outputPlan(Object work, + PrintStream out, boolean extended, boolean jsonOutput, int indent) throws Exception { return outputPlan(work, out, extended, jsonOutput, indent, ""); } - private JSONObject outputPlan(Object work, PrintStream out, - boolean extended, boolean jsonOutput, int indent, String appendToHeader) throws Exception { + private JSONObjectAdaptor outputPlan(Object work, PrintStream out, + boolean extended, boolean jsonOutput, int indent, String appendToHeader) throws Exception { // Check if work has an explain annotation Annotation note = AnnotationUtils.getAnnotation(work.getClass(), Explain.class); @@ -780,7 +742,7 @@ private JSONObject outputPlan(Object work, PrintStream out, } } - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + JSONObjectAdaptor json = jsonOutput ? factory.node() : null; // If this is an operator then we need to call the plan generation on the // conf and then the children if (work instanceof Operator) { @@ -788,11 +750,11 @@ private JSONObject outputPlan(Object work, PrintStream out, (Operator) work; if (operator.getConf() != null) { String appender = isLogical ? " (" + operator.getOperatorId() + ")" : ""; - JSONObject jsonOut = outputPlan(operator.getConf(), out, extended, + JSONObjectAdaptor jsonOut = outputPlan(operator.getConf(), out, extended, jsonOutput, jsonOutput ? 0 : indent, appender); if (this.work != null && this.work.isUserLevelExplain()) { if (jsonOut != null && jsonOut.length() > 0) { - ((JSONObject) jsonOut.get(JSONObject.getNames(jsonOut)[0])).put("OperatorId:", + ((JSONObjectAdaptor) jsonOut.get(JSONObjectAdaptor.getNames(jsonOut)[0])).put("OperatorId:", operator.getOperatorId()); } } @@ -806,9 +768,9 @@ private JSONObject outputPlan(Object work, PrintStream out, if (operator.getChildOperators() != null) { int cindent = jsonOutput ? 0 : indent + 2; for (Operator op : operator.getChildOperators()) { - JSONObject jsonOut = outputPlan(op, out, extended, jsonOutput, cindent); + JSONObjectAdaptor jsonOut = outputPlan(op, out, extended, jsonOutput, cindent); if (jsonOutput) { - ((JSONObject)json.get(JSONObject.getNames(json)[0])).accumulate("children", jsonOut); + ((JSONObjectAdaptor)json.get(JSONObjectAdaptor.getNames(json)[0])).accumulate("children", jsonOut); } } } @@ -959,7 +921,7 @@ private JSONObject outputPlan(Object work, PrintStream out, out.print(header); } - JSONObject jsonOut = outputMap(mp, !skipHeader && !emptyHeader, out, extended, jsonOutput, ind); + JSONObjectAdaptor jsonOut = outputMap(mp, !skipHeader && !emptyHeader, out, extended, jsonOutput, ind); if (jsonOutput && !mp.isEmpty()) { json.put(header, jsonOut); } @@ -973,7 +935,7 @@ private JSONObject outputPlan(Object work, PrintStream out, out.print(header); } - JSONArray jsonOut = outputList(l, out, !skipHeader && !emptyHeader, extended, jsonOutput, ind); + JSONArrayAdaptor jsonOut = outputList(l, out, !skipHeader && !emptyHeader, extended, jsonOutput, ind); if (jsonOutput && !l.isEmpty()) { json.put(header, jsonOut); @@ -987,12 +949,12 @@ private JSONObject outputPlan(Object work, PrintStream out, if (!skipHeader && out != null) { out.println(header); } - JSONObject jsonOut = outputPlan(val, out, extended, jsonOutput, ind); + JSONObjectAdaptor jsonOut = outputPlan(val, out, extended, jsonOutput, ind); if (jsonOutput && jsonOut != null && jsonOut.length() != 0) { if (!skipHeader) { json.put(header, jsonOut); } else { - for(String k: JSONObject.getNames(jsonOut)) { + for(String k: JSONObjectAdaptor.getNames(jsonOut)) { json.put(k, jsonOut.get(k)); } } @@ -1008,7 +970,7 @@ private JSONObject outputPlan(Object work, PrintStream out, if (jsonOutput) { if (keyJSONObject != null) { - JSONObject ret = new JSONObject(new LinkedHashMap<>()); + JSONObjectAdaptor ret = factory.node(); ret.put(keyJSONObject, json); return ret; } @@ -1035,9 +997,9 @@ private boolean shouldPrint(Explain exp, Object val) { return true; } - private JSONObject outputPlan(Task task, - PrintStream out, JSONObject parentJSON, boolean extended, - boolean jsonOutput, int indent) throws Exception { + private JSONObjectAdaptor outputPlan(Task task, + PrintStream out, JSONObjectAdaptor parentJSON, boolean extended, + boolean jsonOutput, int indent) throws Exception { if (out != null) { out.print(indentString(indent)); @@ -1048,7 +1010,7 @@ private JSONObject outputPlan(Task task, // Start by getting the work part of the task and call the output plan for // the work - JSONObject jsonOutputPlan = outputPlan(task.getWork(), out, extended, + JSONObjectAdaptor jsonOutputPlan = outputPlan(task.getWork(), out, extended, jsonOutput, jsonOutput ? 0 : indent + 2); if (out != null) { @@ -1061,12 +1023,12 @@ private JSONObject outputPlan(Task task, return null; } - private JSONObject outputDependencies(Task task, - PrintStream out, JSONObject parentJson, boolean jsonOutput, boolean taskType, int indent) + private JSONObjectAdaptor outputDependencies(Task task, + PrintStream out, JSONObjectAdaptor parentJson, boolean jsonOutput, boolean taskType, int indent) throws Exception { boolean first = true; - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + JSONObjectAdaptor json = jsonOutput ? factory.node() : null; if (out != null) { out.print(indentString(indent)); out.print(task.getId()); @@ -1152,7 +1114,7 @@ private JSONObject outputDependencies(Task task, } public String outputAST(String treeString, PrintStream out, - boolean jsonOutput, int indent) throws JSONException { + boolean jsonOutput, int indent) { if (out != null) { out.print(indentString(indent)); out.println("ABSTRACT SYNTAX TREE:"); @@ -1163,17 +1125,17 @@ public String outputAST(String treeString, PrintStream out, return jsonOutput ? treeString : null; } - public JSONObject outputDependencies(PrintStream out, boolean jsonOutput, - boolean appendTaskType, List tasks) + public JSONObjectAdaptor outputDependencies(PrintStream out, boolean jsonOutput, + boolean appendTaskType, List tasks) throws Exception { if (out != null) { out.println("STAGE DEPENDENCIES:"); } - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + JSONObjectAdaptor json = jsonOutput ? factory.node() : null; for (Task task : tasks) { - JSONObject jsonOut = outputDependencies(task, out, json, jsonOutput, appendTaskType, 2); + JSONObjectAdaptor jsonOut = outputDependencies(task, out, json, jsonOutput, appendTaskType, 2); if (jsonOutput && jsonOut != null) { json.put(task.getId(), jsonOut); } @@ -1182,15 +1144,15 @@ public JSONObject outputDependencies(PrintStream out, boolean jsonOutput, return jsonOutput ? json : null; } - public JSONObject outputStagePlans(PrintStream out, List tasks, - boolean jsonOutput, boolean isExtended) + public JSONObjectAdaptor outputStagePlans(PrintStream out, List tasks, + boolean jsonOutput, boolean isExtended) throws Exception { if (out != null) { out.println("STAGE PLANS:"); } - JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; + JSONObjectAdaptor json = jsonOutput ? factory.node() : null; for (Task task : tasks) { outputPlan(task, out, json, isExtended, jsonOutput, 2); } diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java index 55b922b..f8b0534 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java @@ -21,7 +21,6 @@ import java.net.InetAddress; import java.util.ArrayList; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -32,6 +31,8 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hive.common.jsonexplain.JSONObjectAdaptor; +import org.apache.hadoop.hive.common.jsonexplain.JSONObjectAdaptorFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.llap.registry.impl.LlapRegistryService; import org.apache.hadoop.hive.ql.QueryPlan; @@ -49,7 +50,6 @@ import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hive.common.util.ShutdownHookManager; -import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -69,6 +69,7 @@ private static TimelineClient timelineClient; private enum EntityTypes { HIVE_QUERY_ID }; private enum EventTypes { QUERY_SUBMITTED, QUERY_COMPLETED }; + private JSONObjectAdaptorFactory factory = new JSONObjectAdaptorFactory(); private enum OtherInfoTypes { QUERY, STATUS, TEZ, MAPRED, INVOKER_INFO, SESSION_ID, THREAD_NAME, VERSION, @@ -184,7 +185,7 @@ public void run() { ExplainTask explain = (ExplainTask) TaskFactory.get(work, conf); explain.initialize(queryState, plan, null, null); String query = plan.getQueryStr(); - JSONObject explainPlan = explain.getJSONPlan(null, work); + JSONObjectAdaptor explainPlan = explain.getJSONPlan(null, work); String logID = conf.getLogIdVar(hookContext.getSessionId()); List tablesRead = getTablesFromEntitySet(hookContext.getInputs()); List tablesWritten = getTablesFromEntitySet(hookContext.getOutputs()); @@ -256,14 +257,14 @@ protected ExecutionMode getExecutionMode(QueryPlan plan) { return mode; } - TimelineEntity createPreHookEvent(String queryId, String query, JSONObject explainPlan, + TimelineEntity createPreHookEvent(String queryId, String query, JSONObjectAdaptor explainPlan, long startTime, String user, String requestuser, int numMrJobs, int numTezJobs, String opId, String clientIpAddress, String hiveInstanceAddress, String hiveInstanceType, String sessionID, String logID, String threadId, String executionMode, List tablesRead, List tablesWritten, HiveConf conf, ApplicationId llapAppId) throws Exception { - JSONObject queryObj = new JSONObject(new LinkedHashMap<>()); + JSONObjectAdaptor queryObj = factory.node(); queryObj.put("queryText", query); queryObj.put("queryPlan", explainPlan); @@ -278,7 +279,7 @@ TimelineEntity createPreHookEvent(String queryId, String query, JSONObject expla for (Map.Entry setting : conf) { confMap.put(setting.getKey(), setting.getValue()); } - JSONObject confObj = new JSONObject((Map) confMap); + JSONObjectAdaptor confObj = factory.node(); TimelineEntity atsEntity = new TimelineEntity(); atsEntity.setEntityId(queryId); @@ -345,7 +346,7 @@ TimelineEntity createPostHookEvent(String queryId, long stopTime, String user, S atsEntity.addOtherInfo(OtherInfoTypes.STATUS.name(), success); // Perf times - JSONObject perfObj = new JSONObject(new LinkedHashMap<>()); + JSONObjectAdaptor perfObj = factory.node(); for (String key : perfLogger.getEndTimes().keySet()) { perfObj.put(key, perfLogger.getDuration(key)); }