diff --git LICENSE LICENSE index db3777d..c607f21 100644 --- LICENSE +++ LICENSE @@ -305,32 +305,6 @@ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -For the org.json library: - -Copyright (c) 2002 JSON.org - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -The Software shall be used for Good, not Evil. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - For the JLine library: Copyright (c) 2002-2006, Marc Prud'hommeaux @@ -494,4 +468,4 @@ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file +POSSIBILITY OF SUCH DAMAGE. diff --git common/pom.xml common/pom.xml index 8d4b1ea..1a70875 100644 --- common/pom.xml +++ common/pom.xml @@ -88,17 +88,16 @@ test + com.googlecode.json-simple + json-simple + + junit junit ${junit.version} test - org.json - json - ${json.version} - - io.dropwizard.metrics metrics-core ${dropwizard.version} diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java index 1a0d8e1..ee5b3c7 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java @@ -20,7 +20,7 @@ import java.io.PrintStream; -import org.json.JSONObject; +import org.json.simple.JSONObject; /** * JsonParser is the interface for classes that print a JSONObject diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonUtils.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonUtils.java new file mode 100644 index 0000000..93c0b9e --- /dev/null +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonUtils.java @@ -0,0 +1,60 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.common.jsonexplain; + +import org.json.simple.JSONArray; +import org.json.simple.JSONObject; + +import java.util.Iterator; + +public class JsonUtils { + + public static JSONObject accumulate(JSONObject jsonObject, String key, Object value) { + if (jsonObject.get(key) == null) { + if (value instanceof JSONArray) { + JSONArray newValue = new JSONArray(); + newValue.add(value); + jsonObject.put(key, newValue); + } else { + jsonObject.put(key, value); + } + } else { + Object previous = jsonObject.get(key); + if (previous instanceof JSONArray) { + ((JSONArray)previous).add(value); + } else { + JSONArray newValue = new JSONArray(); + newValue.add(previous); + newValue.add(value); + jsonObject.put(key, newValue); + } + } + return jsonObject; + } + + public static String[] getNames(JSONObject jsonObject) { + String[] result = new String[jsonObject.size()]; + Iterator iterator = jsonObject.keySet().iterator(); + for (int i = 0; iterator.hasNext(); i ++) { + Object key = iterator.next(); + result[i] = key.toString(); + } + return result; + } +} diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java index cc4947f..37720a2 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java @@ -26,8 +26,8 @@ import java.util.Map; import com.google.common.annotations.VisibleForTesting; -import org.json.JSONException; -import org.json.JSONObject; +import org.apache.hadoop.hive.common.jsonexplain.JsonUtils; +import org.json.simple.JSONObject; public class Op { String name; @@ -44,7 +44,7 @@ String outputVertexName; public Op(String name, String id, String outputVertexName, List children, List attrs, - JSONObject opObject, Vertex vertex) throws JSONException { + JSONObject opObject, Vertex vertex) { super(); this.name = name; this.operatorId = id; @@ -59,12 +59,12 @@ public Op(String name, String id, String outputVertexName, List children, Li void inlineJoinOp() throws Exception { // inline map join operator if (this.name.equals("Map Join Operator")) { - JSONObject mapjoinObj = opObject.getJSONObject("Map Join Operator"); + JSONObject mapjoinObj = (JSONObject) opObject.get("Map Join Operator"); // get the map for posToVertex - JSONObject verticeObj = mapjoinObj.getJSONObject("input vertices:"); + JSONObject verticeObj = (JSONObject) mapjoinObj.get("input vertices:"); Map posToVertex = new HashMap<>(); - for (String pos : JSONObject.getNames(verticeObj)) { - String vertexName = verticeObj.getString(pos); + for (String pos : JsonUtils.getNames(verticeObj)) { + String vertexName = verticeObj.get(pos).toString(); posToVertex.put(pos, vertexName); // update the connection Connection c = null; @@ -81,10 +81,10 @@ void inlineJoinOp() throws Exception { // update the attrs removeAttr("input vertices:"); // update the keys to use vertex name - JSONObject keys = mapjoinObj.getJSONObject("keys:"); - if (keys.length() != 0) { + JSONObject keys = (JSONObject) mapjoinObj.get("keys:"); + if (keys.size() != 0) { JSONObject newKeys = new JSONObject(); - for (String key : JSONObject.getNames(keys)) { + for (String key : JsonUtils.getNames(keys)) { String vertexName = posToVertex.get(key); if (vertexName != null) { newKeys.put(vertexName, keys.get(key)); diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java index 10e0a0c..1dfd89e 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java @@ -28,11 +28,11 @@ import java.util.Map; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.jsonexplain.JsonUtils; import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.map.JsonMappingException; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; +import org.json.simple.JSONArray; +import org.json.simple.JSONObject; public class Stage { String name; @@ -56,9 +56,9 @@ public Stage(String name) { vertexs = new LinkedHashMap<>(); } - public void addDependency(JSONObject object, Map stages) throws JSONException { - if (!object.has("ROOT STAGE")) { - String names = object.getString("DEPENDENT STAGES"); + public void addDependency(JSONObject object, Map stages) { + if (!object.keySet().contains("ROOT STAGE")) { + String names = object.get("DEPENDENT STAGES").toString(); for (String name : names.split(",")) { Stage parent = stages.get(name.trim()); this.parentStages.add(parent); @@ -75,30 +75,30 @@ public void addDependency(JSONObject object, Map stages) throws J * and/or attributes. */ public void extractVertex(JSONObject object) throws Exception { - if (object.has("Tez")) { + if (object.keySet().contains("Tez")) { this.tezStageDependency = new LinkedHashMap<>(); JSONObject tez = (JSONObject) object.get("Tez"); - JSONObject vertices = tez.getJSONObject("Vertices:"); - if (tez.has("Edges:")) { - JSONObject edges = tez.getJSONObject("Edges:"); + JSONObject vertices = (JSONObject) tez.get("Vertices:"); + if (tez.keySet().contains("Edges:")) { + JSONObject edges = (JSONObject) tez.get("Edges:"); // iterate for the first time to get all the vertices - for (String to : JSONObject.getNames(edges)) { - vertexs.put(to, new Vertex(to, vertices.getJSONObject(to))); + for (String to : JsonUtils.getNames(edges)) { + vertexs.put(to, new Vertex(to, (JSONObject) vertices.get(to))); } // iterate for the second time to get all the vertex dependency - for (String to : JSONObject.getNames(edges)) { + for (String to : JsonUtils.getNames(edges)) { Object o = edges.get(to); Vertex v = vertexs.get(to); // 1 to 1 mapping if (o instanceof JSONObject) { JSONObject obj = (JSONObject) o; - String parent = obj.getString("parent"); + String parent = obj.get("parent").toString(); Vertex parentVertex = vertexs.get(parent); if (parentVertex == null) { - parentVertex = new Vertex(parent, vertices.getJSONObject(parent)); + parentVertex = new Vertex(parent, (JSONObject) vertices.get(parent)); vertexs.put(parent, parentVertex); } - String type = obj.getString("type"); + String type = obj.get("type").toString(); // for union vertex, we reverse the dependency relationship if (!"CONTAINS".equals(type)) { v.addDependency(new Connection(type, parentVertex)); @@ -112,15 +112,15 @@ public void extractVertex(JSONObject object) throws Exception { // 1 to many mapping JSONArray from = (JSONArray) o; List list = new ArrayList<>(); - for (int index = 0; index < from.length(); index++) { - JSONObject obj = from.getJSONObject(index); - String parent = obj.getString("parent"); + for (int index = 0; index < from.size(); index++) { + JSONObject obj = (JSONObject) from.get(index); + String parent = obj.get("parent").toString(); Vertex parentVertex = vertexs.get(parent); if (parentVertex == null) { - parentVertex = new Vertex(parent, vertices.getJSONObject(parent)); + parentVertex = new Vertex(parent, (JSONObject) vertices.get(parent)); vertexs.put(parent, parentVertex); } - String type = obj.getString("type"); + String type = obj.get("type").toString(); if (!"CONTAINS".equals(type)) { v.addDependency(new Connection(type, parentVertex)); parentVertex.children.add(v); @@ -134,8 +134,8 @@ public void extractVertex(JSONObject object) throws Exception { } } } else { - for (String vertexName : JSONObject.getNames(vertices)) { - vertexs.put(vertexName, new Vertex(vertexName, vertices.getJSONObject(vertexName))); + for (String vertexName : JsonUtils.getNames(vertices)) { + vertexs.put(vertexName, new Vertex(vertexName, (JSONObject) vertices.get(vertexName))); } } // The opTree in vertex is extracted @@ -146,10 +146,10 @@ public void extractVertex(JSONObject object) throws Exception { } } } else { - String[] names = JSONObject.getNames(object); + String[] names = JsonUtils.getNames(object); for (String name : names) { if (name.contains("Operator")) { - this.op = extractOp(name, object.getJSONObject(name)); + this.op = extractOp(name, (JSONObject) object.get(name)); } else { attrs.add(new Attr(name, object.get(name).toString())); } @@ -161,7 +161,6 @@ public void extractVertex(JSONObject object) throws Exception { * @param opName * @param opObj * @return - * @throws JSONException * @throws JsonParseException * @throws JsonMappingException * @throws IOException @@ -169,26 +168,25 @@ public void extractVertex(JSONObject object) throws Exception { * This method address the create table operator, fetch operator, * etc */ - Op extractOp(String opName, JSONObject opObj) throws JSONException, JsonParseException, - JsonMappingException, IOException, Exception { + Op extractOp(String opName, JSONObject opObj) throws Exception { List attrs = new ArrayList<>(); Vertex v = null; - if (opObj.length() > 0) { - String[] names = JSONObject.getNames(opObj); + if (opObj.size() > 0) { + String[] names = JsonUtils.getNames(opObj); for (String name : names) { Object o = opObj.get(name); if (isPrintable(o)) { attrs.add(new Attr(name, o.toString())); } else if (o instanceof JSONObject) { JSONObject attrObj = (JSONObject) o; - if (attrObj.length() > 0) { + if (attrObj.size() > 0) { if (name.equals("Processor Tree:")) { JSONObject object = new JSONObject(); object.put(name, attrObj); v = new Vertex(null, object); v.extractOpTree(); } else { - for (String attrName : JSONObject.getNames(attrObj)) { + for (String attrName : JsonUtils.getNames(attrObj)) { attrs.add(new Attr(attrName, attrObj.get(attrName).toString())); } } @@ -217,7 +215,7 @@ private boolean isPrintable(Object val) { return false; } - public void print(PrintStream out, List indentFlag) throws JSONException, Exception { + public void print(PrintStream out, List indentFlag) throws Exception { // print stagename if (TezJsonParser.printSet.contains(this)) { out.println(TezJsonParser.prefixString(indentFlag) + " Please refer to the previous " diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java index 43ddff3..0dee2c4 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java @@ -28,13 +28,14 @@ import java.util.Map.Entry; import java.util.Set; +import com.sun.jersey.api.json.JSONUnmarshaller; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; +import org.apache.hadoop.hive.common.jsonexplain.JsonUtils; import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.map.JsonMappingException; -import org.json.JSONException; -import org.json.JSONObject; +import org.json.simple.JSONObject; public class TezJsonParser implements JsonParser { JSONObject inputObject; @@ -49,27 +50,26 @@ public TezJsonParser() { super(); LOG = LogFactory.getLog(this.getClass().getName()); } - public void extractStagesAndPlans() throws JSONException, JsonParseException, - JsonMappingException, Exception, IOException { + public void extractStagesAndPlans() throws Exception { // extract stages this.stages = new HashMap(); - JSONObject dependency = inputObject.getJSONObject("STAGE DEPENDENCIES"); - if (dependency.length() > 0) { + JSONObject dependency = (JSONObject) inputObject.get("STAGE DEPENDENCIES"); + if (dependency.size() > 0) { // iterate for the first time to get all the names of stages. - for (String stageName : JSONObject.getNames(dependency)) { + for (String stageName : JsonUtils.getNames(dependency)) { this.stages.put(stageName, new Stage(stageName)); } // iterate for the second time to get all the dependency. - for (String stageName : JSONObject.getNames(dependency)) { - JSONObject dependentStageNames = dependency.getJSONObject(stageName); + for (String stageName : JsonUtils.getNames(dependency)) { + JSONObject dependentStageNames = (JSONObject) dependency.get(stageName); this.stages.get(stageName).addDependency(dependentStageNames, this.stages); } } // extract stage plans - JSONObject stagePlans = inputObject.getJSONObject("STAGE PLANS"); - if (stagePlans.length() > 0) { - for (String stageName : JSONObject.getNames(stagePlans)) { - JSONObject stagePlan = stagePlans.getJSONObject(stageName); + JSONObject stagePlans = (JSONObject) inputObject.get("STAGE PLANS"); + if (stagePlans.size() > 0) { + for (String stageName : JsonUtils.getNames(stagePlans)) { + JSONObject stagePlan = (JSONObject) stagePlans.get(stageName); this.stages.get(stageName).extractVertex(stagePlan); } } @@ -116,8 +116,8 @@ public void print(JSONObject inputObject, PrintStream outputStream) throws Excep this.outputStream = outputStream; this.extractStagesAndPlans(); // print out the cbo info - if (inputObject.has("cboInfo")) { - outputStream.println(inputObject.getString("cboInfo")); + if (inputObject.keySet().contains("cboInfo")) { + outputStream.println(inputObject.get("cboInfo")); outputStream.println(); } // print out the vertex dependency in root stage diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java index 9b3405e..dc4400f 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java @@ -23,11 +23,11 @@ import java.util.ArrayList; import java.util.List; +import org.apache.hadoop.hive.common.jsonexplain.JsonUtils; import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.map.JsonMappingException; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; +import org.json.simple.JSONArray; +import org.json.simple.JSONObject; public class Vertex { public String name; @@ -67,12 +67,11 @@ public Vertex(String name, JSONObject vertexObject) { this.hasMultiReduceOp = false; } - public void addDependency(Connection connection) throws JSONException { + public void addDependency(Connection connection) { this.parentConnections.add(connection); } /** - * @throws JSONException * @throws JsonParseException * @throws JsonMappingException * @throws IOException @@ -80,21 +79,20 @@ public void addDependency(Connection connection) throws JSONException { * We assume that there is a single top-level Map Operator Tree or a * Reduce Operator Tree in a vertex */ - public void extractOpTree() throws JSONException, JsonParseException, JsonMappingException, - IOException, Exception { - if (vertexObject.length() != 0) { - for (String key : JSONObject.getNames(vertexObject)) { + public void extractOpTree() throws Exception { + if (vertexObject.size() != 0) { + for (String key : JsonUtils.getNames(vertexObject)) { if (key.equals("Map Operator Tree:")) { - extractOp(vertexObject.getJSONArray(key).getJSONObject(0)); + extractOp((JSONObject) ((JSONArray)vertexObject.get(key)).get(0)); } else if (key.equals("Reduce Operator Tree:") || key.equals("Processor Tree:")) { - extractOp(vertexObject.getJSONObject(key)); + extractOp((JSONObject) vertexObject.get(key)); } // this is the case when we have a map-side SMB join // one input of the join is treated as a dummy vertex else if (key.equals("Join:")) { - JSONArray array = vertexObject.getJSONArray(key); - for (int index = 0; index < array.length(); index++) { - JSONObject mpOpTree = array.getJSONObject(index); + JSONArray array = (JSONArray) vertexObject.get(key); + for (int index = 0; index < array.size(); index++) { + JSONObject mpOpTree = (JSONObject) array.get(index); Vertex v = new Vertex("", mpOpTree); v.extractOpTree(); v.dummy = true; @@ -109,19 +107,12 @@ else if (key.equals("Join:")) { /** * @param operator - * @param parent - * @return - * @throws JSONException - * @throws JsonParseException - * @throws JsonMappingException - * @throws IOException * @throws Exception * assumption: each operator only has one parent but may have many * children */ - Op extractOp(JSONObject operator) throws JSONException, JsonParseException, JsonMappingException, - IOException, Exception { - String[] names = JSONObject.getNames(operator); + Op extractOp(JSONObject operator) throws Exception { + String[] names = JsonUtils.getNames(operator); if (names.length != 1) { throw new Exception("Expect only one operator in " + operator.toString()); } else { @@ -131,18 +122,18 @@ Op extractOp(JSONObject operator) throws JSONException, JsonParseException, Json List children = new ArrayList<>(); String id = null; String outputVertexName = null; - for (String attrName : JSONObject.getNames(attrObj)) { + for (String attrName : JsonUtils.getNames(attrObj)) { if (attrName.equals("children")) { Object childrenObj = attrObj.get(attrName); if (childrenObj instanceof JSONObject) { - if (((JSONObject) childrenObj).length() != 0) { + if (((JSONObject) childrenObj).size() != 0) { children.add(extractOp((JSONObject) childrenObj)); } } else if (childrenObj instanceof JSONArray) { - if (((JSONArray) childrenObj).length() != 0) { + if (((JSONArray) childrenObj).size() != 0) { JSONArray array = ((JSONArray) childrenObj); - for (int index = 0; index < array.length(); index++) { - children.add(extractOp(array.getJSONObject(index))); + for (int index = 0; index < array.size(); index++) { + children.add(extractOp((JSONObject)array.get(index))); } } } else { @@ -172,7 +163,7 @@ Op extractOp(JSONObject operator) throws JSONException, JsonParseException, Json } public void print(PrintStream out, List indentFlag, String type, Vertex callingVertex) - throws JSONException, Exception { + throws Exception { // print vertexname if (TezJsonParser.printSet.contains(this) && !hasMultiReduceOp) { if (type != null) { diff --git common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestJsonUtils.java common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestJsonUtils.java new file mode 100644 index 0000000..e92cb8b --- /dev/null +++ common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestJsonUtils.java @@ -0,0 +1,102 @@ +package org.apache.hadoop.hive.common.jsonexplain; + +import org.json.simple.JSONArray; +import org.json.simple.JSONObject; +import org.junit.Test; + +import java.util.Arrays; +import java.util.List; + +import static org.junit.Assert.*; + +/** + * Created by dvoros on 1/12/17. + */ +public class TestJsonUtils { + + private static final String TEST_KEY = "key"; + private static final String TEST_VALUE_ONE = "one"; + private static final String TEST_VALUE_TWO = "two"; + private static final JSONArray TEST_ARRAY; + + static { + TEST_ARRAY = new JSONArray(); + TEST_ARRAY.add(TEST_VALUE_ONE); + } + + @Test + public void testAccumulateNewKeyWithSimpleObject() throws Exception { + JSONObject obj = new JSONObject(); + JsonUtils.accumulate(obj, TEST_KEY, TEST_VALUE_ONE); + assertEquals(obj.get(TEST_KEY), TEST_VALUE_ONE); + } + + @Test + public void testAccumulateNewKeyWithArray() throws Exception { + JSONObject obj = new JSONObject(); + JsonUtils.accumulate(obj, TEST_KEY, TEST_ARRAY); + assertEquals(((JSONArray)obj.get(TEST_KEY)).size(), 1); + assertEquals(((JSONArray)obj.get(TEST_KEY)).get(0), TEST_ARRAY); + } + + @Test + public void testAccumulateAlreadyPresentArrayNonEmpty() throws Exception { + JSONObject obj = new JSONObject(); + JSONArray arr = new JSONArray(); + arr.add(TEST_VALUE_ONE); + obj.put(TEST_KEY, arr); + JsonUtils.accumulate(obj, TEST_KEY, TEST_VALUE_TWO); + assertEquals(((JSONArray)obj.get(TEST_KEY)).size(), 2); + assertEquals(((JSONArray)obj.get(TEST_KEY)).get(0), TEST_VALUE_ONE); + assertEquals(((JSONArray)obj.get(TEST_KEY)).get(1), TEST_VALUE_TWO); + } + + @Test + public void testAccumulateAlreadyPresentArrayEmpty() throws Exception { + JSONObject obj = new JSONObject(); + obj.put(TEST_KEY, new JSONArray()); + JsonUtils.accumulate(obj, TEST_KEY, TEST_VALUE_TWO); + assertEquals(((JSONArray)obj.get(TEST_KEY)).size(), 1); + assertEquals(((JSONArray)obj.get(TEST_KEY)).get(0), TEST_VALUE_TWO); + } + + @Test + public void testAccumulateAlreadyPresentNonArray() throws Exception { + JSONObject obj = new JSONObject(); + obj.put(TEST_KEY, TEST_VALUE_ONE); + JsonUtils.accumulate(obj, TEST_KEY, TEST_VALUE_TWO); + assertEquals(((JSONArray)obj.get(TEST_KEY)).size(), 2); + assertEquals(((JSONArray)obj.get(TEST_KEY)).get(0), TEST_VALUE_ONE); + assertEquals(((JSONArray)obj.get(TEST_KEY)).get(1), TEST_VALUE_TWO); + } + + @Test + public void testGetNamesEmpty() throws Exception { + JSONObject obj = new JSONObject(); + String[] result = JsonUtils.getNames(obj); + assertEquals(0, result.length); + } + + @Test + public void testGetNamesSingle() throws Exception { + JSONObject obj = new JSONObject(); + obj.put("key1", "value1"); + String[] result = JsonUtils.getNames(obj); + assertEquals(1, result.length); + assertEquals("key1", result[0]); + } + + @Test + public void testGetNamesMultiple() throws Exception { + JSONObject obj = new JSONObject(); + obj.put("key1", "value1"); + obj.put("key2", "value2"); + obj.put("key3", "value3"); + List result = Arrays.asList(JsonUtils.getNames(obj)); + assertEquals(3, result.size()); + assertTrue(result.contains("key1")); + assertTrue(result.contains("key2")); + assertTrue(result.contains("key3")); + } + +} diff --git common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestOp.java common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestOp.java index fc8381b..76a4a97 100644 --- common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestOp.java +++ common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestOp.java @@ -20,7 +20,8 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import org.json.JSONObject; +import org.json.simple.JSONObject; +import org.json.simple.parser.JSONParser; import org.junit.Test; import java.util.ArrayList; @@ -32,13 +33,14 @@ public class TestOp { private ObjectMapper objectMapper = new ObjectMapper(); + private JSONParser parser = new JSONParser(); @Test public void testInlineJoinOpJsonShouldMatch() throws Exception { String jsonString = "{\"Map Join Operator\":{" + "\"input vertices:\":{\"a\":\"AVERTEX\"}," + "\"keys:\":{\"a\":\"AKEY\",\"b\":\"BKEY\"}}}"; - JSONObject mapJoin = new JSONObject(jsonString); + JSONObject mapJoin = (JSONObject) parser.parse(jsonString); Vertex vertex = new Vertex("vertex-name", null); @@ -55,4 +57,4 @@ public void testInlineJoinOpJsonShouldMatch() throws Exception { assertEquals(expected, result); } -} \ No newline at end of file +} diff --git common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestStage.java common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestStage.java index eaf03c3..5777cfe 100644 --- common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestStage.java +++ common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestStage.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.common.jsonexplain.tez; -import org.json.JSONObject; +import org.json.simple.JSONObject; +import org.json.simple.parser.JSONParser; import org.junit.Before; import org.junit.Test; @@ -35,6 +36,8 @@ private Stage stageA; private Stage stageB; + private JSONParser parser = new JSONParser(); + @Before public void setUp() { this.uut = new Stage("uut"); @@ -50,7 +53,7 @@ public void testAddDependencyNonRoot() throws Exception { String jsonString = "{\"DEPENDENT STAGES\":\"a,b\"}"; - JSONObject names = new JSONObject(jsonString); + JSONObject names = (JSONObject) parser.parse(jsonString); uut.addDependency(names, children); @@ -72,7 +75,7 @@ public void testAddDependencyRoot() throws Exception { children.put("b", stageB); String jsonString = "{\"ROOT STAGE\":\"X\",\"DEPENDENT STAGES\":\"a,b\"}"; - JSONObject names = new JSONObject(jsonString); + JSONObject names = (JSONObject) parser.parse(jsonString); uut.addDependency(names, children); @@ -86,7 +89,7 @@ public void testAddDependencyRoot() throws Exception { public void testExtractVertexNonTez() throws Exception { String jsonString = "{\"OperatorName\":{\"a\":\"A\",\"b\":\"B\"}," + "\"attr1\":\"ATTR1\"}"; - JSONObject object = new JSONObject(jsonString); + JSONObject object = (JSONObject) parser.parse(jsonString); uut.extractVertex(object); @@ -99,7 +102,7 @@ public void testExtractVertexNonTez() throws Exception { @Test public void testExtractVertexTezNoEdges() throws Exception { String jsonString = "{\"Tez\":{\"a\":\"A\",\"Vertices:\":{\"v1\":{}}}}"; - JSONObject object = new JSONObject(jsonString); + JSONObject object = (JSONObject) parser.parse(jsonString); uut.extractVertex(object); assertEquals(1, uut.vertexs.size()); @@ -111,7 +114,7 @@ public void testExtractVertexTezWithOneEdge() throws Exception { String jsonString = "{\"Tez\":{\"a\":\"A\"," + "\"Vertices:\":{\"v1\":{},\"v2\":{}}," + "\"Edges:\":{\"v2\":{\"parent\":\"v1\",\"type\":\"TYPE\"}}}}"; - JSONObject object = new JSONObject(jsonString); + JSONObject object = (JSONObject) parser.parse(jsonString);; uut.extractVertex(object); assertEquals(2, uut.vertexs.size()); @@ -132,7 +135,7 @@ public void testExtractVertexTezWithOneToManyEdge() throws Exception { "\"Vertices:\":{\"v1\":{},\"v2\":{},\"v3\":{}}," + "\"Edges:\":{\"v1\":[{\"parent\":\"v2\",\"type\":\"TYPE1\"}," + "{\"parent\":\"v3\",\"type\":\"TYPE2\"}]}}}"; - JSONObject object = new JSONObject(jsonString); + JSONObject object = (JSONObject) parser.parse(jsonString);; uut.extractVertex(object); @@ -164,7 +167,7 @@ public void testExtractOpEmptyObject() throws Exception { @Test public void testExtractOpSimple() throws Exception { String jsonString = "{\"a\":\"A\",\"b\":\"B\"}"; - JSONObject object = new JSONObject(jsonString); + JSONObject object = (JSONObject) parser.parse(jsonString);; Op result = uut.extractOp("op-name", object); @@ -177,7 +180,7 @@ public void testExtractOpSimple() throws Exception { public void testExtract() throws Exception { String jsonString = "{\"b\":{\"b2\":\"B2\",\"b1\":\"B1\"}," + "\"Processor Tree:\":{\"a1\":{\"t1\":\"T1\"}}}"; - JSONObject object = new JSONObject(jsonString); + JSONObject object = (JSONObject) parser.parse(jsonString);; Op result = uut.extractOp("op-name", object); assertEquals("op-name", result.name); @@ -192,4 +195,4 @@ public void testExtract() throws Exception { assertNotNull(result.vertex); } -} \ No newline at end of file +} diff --git common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java index ce57e12..bad0531 100644 --- common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java +++ common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.common.jsonexplain.tez; -import org.json.JSONObject; +import org.json.simple.JSONObject; +import org.json.simple.parser.JSONParser; import org.junit.Before; import org.junit.Test; @@ -28,6 +29,8 @@ private TezJsonParser uut; + private JSONParser parser = new JSONParser(); + @Before public void setUp() throws Exception { this.uut = new TezJsonParser(); @@ -37,7 +40,7 @@ public void setUp() throws Exception { public void testExtractStagesAndPlans() throws Exception { String jsonString = "{\"STAGE DEPENDENCIES\":{\"s1\":{\"ROOT STAGE\":\"\"}," + "\"s2\":{\"DEPENDENT STAGES\":\"s1\"}},\"STAGE PLANS\":{}}"; - JSONObject input = new JSONObject(jsonString); + JSONObject input = (JSONObject) parser.parse(jsonString); uut.inputObject = input; uut.extractStagesAndPlans(); @@ -51,4 +54,4 @@ public void testExtractStagesAndPlans() throws Exception { assertEquals("s1", uut.stages.get("s2").parentStages.get(0).name); } -} \ No newline at end of file +} diff --git common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestVertex.java common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestVertex.java index 3086bae..d49daec 100644 --- common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestVertex.java +++ common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestVertex.java @@ -18,16 +18,20 @@ package org.apache.hadoop.hive.common.jsonexplain.tez; -import org.json.JSONObject; +import org.json.simple.JSONObject; +import org.json.simple.parser.JSONParser; import org.junit.Test; import static org.junit.Assert.assertEquals; public class TestVertex { + private JSONParser parser = new JSONParser(); + @Test public void testExtractOpTree() throws Exception { - JSONObject object = new JSONObject("{\"Join:\":[{},{}]}"); + String jsonString = "{\"Join:\":[{},{}]}"; + JSONObject object = (JSONObject) parser.parse(jsonString); Vertex uut = new Vertex("name", object); uut.extractOpTree(); @@ -46,7 +50,7 @@ public void testExtractOpTreeUnknownKeyShouldThrowException() throws Exception { @Test public void testExtractOpNonJsonChildrenShouldThrow() throws Exception { String jsonString = "{\"opName\":{\"children\":\"not-json\"}}"; - JSONObject operator = new JSONObject(jsonString); + JSONObject operator = (JSONObject) parser.parse(jsonString);; Vertex uut = new Vertex("name", null); @@ -60,7 +64,7 @@ public void testExtractOpNonJsonChildrenShouldThrow() throws Exception { @Test public void testExtractOpNoChildrenOperatorId() throws Exception { String jsonString = "{\"opName\":{\"OperatorId:\":\"operator-id\"}}"; - JSONObject operator = new JSONObject(jsonString); + JSONObject operator = (JSONObject) parser.parse(jsonString);; Vertex uut = new Vertex("name", null); @@ -75,7 +79,7 @@ public void testExtractOpNoChildrenOperatorId() throws Exception { public void testExtractOpOneChild() throws Exception { String jsonString = "{\"opName\":{\"children\":{\"childName\":" + "{\"OperatorId:\":\"child-operator-id\"}}}}"; - JSONObject operator = new JSONObject(jsonString); + JSONObject operator = (JSONObject) parser.parse(jsonString);; Vertex uut = new Vertex("name", null); @@ -91,7 +95,7 @@ public void testExtractOpMultipleChildren() throws Exception { String jsonString = "{\"opName\":{\"children\":[" + "{\"childName1\":{\"OperatorId:\":\"child-operator-id1\"}}," + "{\"childName2\":{\"OperatorId:\":\"child-operator-id2\"}}]}}"; - JSONObject operator = new JSONObject(jsonString); + JSONObject operator = (JSONObject) parser.parse(jsonString);; Vertex uut = new Vertex("name", null); @@ -103,4 +107,4 @@ public void testExtractOpMultipleChildren() throws Exception { assertEquals("childName2", result.children.get(1).name); assertEquals("child-operator-id2", result.children.get(1).operatorId); } -} \ No newline at end of file +} diff --git pom.xml pom.xml index 5a327a3..99e024a 100644 --- pom.xml +++ pom.xml @@ -144,7 +144,7 @@ 1.1 2.5 3.5.2 - 20090211 + 1.1.1 4.11 2.22 0.9.3 @@ -256,6 +256,11 @@ ${javaewah.version} + com.googlecode.json-simple + json-simple + ${json-simple.version} + + com.jolbox bonecp ${bonecp.version} @@ -565,11 +570,6 @@ ${snappy.version} - org.json - json - ${json.version} - - org.mockito mockito-all ${mockito-all.version} diff --git ql/pom.xml ql/pom.xml index 12ef898..04c1095 100644 --- ql/pom.xml +++ ql/pom.xml @@ -293,16 +293,15 @@ ${gson.version} + com.googlecode.json-simple + json-simple + + org.iq80.snappy snappy ${snappy.version} - org.json - json - ${json.version} - - stax stax-api ${stax.version} @@ -705,7 +704,6 @@ commons-lang:commons-lang org.apache.commons:commons-lang3 org.jodd:jodd-core - org.json:json org.apache.avro:avro org.apache.avro:avro-mapred org.apache.hive.shims:hive-shims-0.20S @@ -719,6 +717,7 @@ org.codehaus.jackson:jackson-core-asl org.codehaus.jackson:jackson-mapper-asl com.google.guava:guava + com.googlecode.json-simple:json-simple net.sf.opencsv:opencsv org.apache.hive:spark-client joda-time:joda-time diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 94fc6b5..73c5af3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -43,6 +43,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory; +import org.apache.hadoop.hive.common.jsonexplain.JsonUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.DriverContext; @@ -64,9 +65,8 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.AnnotationUtils; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; +import org.json.simple.JSONArray; +import org.json.simple.JSONObject; /** * ExplainTask implementation. @@ -255,7 +255,9 @@ private Object toJson(String header, String message, PrintStream out, ExplainWor private Object toJson(String header, List messages, PrintStream out, ExplainWork work) throws Exception { if (work.isFormatted()) { - return new JSONArray(messages); + JSONArray result = new JSONArray(); + result.addAll(messages); + return result; } out.print(header); out.println(": "); @@ -448,7 +450,7 @@ else if (ent.getValue() instanceof List) { JSONObject jsonDep = new JSONObject(); jsonDep.put("parent", dep.getName()); jsonDep.put("type", dep.getType()); - json.accumulate(ent.getKey().toString(), jsonDep); + JsonUtils.accumulate(json, ent.getKey().toString(), jsonDep); } } } else if (ent.getValue() != null && !((List) ent.getValue()).isEmpty() @@ -478,7 +480,7 @@ else if (ent.getValue() instanceof List) { jsonDep.put("parent", dep.getName()); jsonDep.put("type", dep.getShuffleType()); jsonDep.put("partitions", dep.getNumPartitions()); - json.accumulate(ent.getKey().toString(), jsonDep); + JsonUtils.accumulate(json, ent.getKey().toString(), jsonDep); } } } else { @@ -536,7 +538,7 @@ private JSONArray outputList(List l, PrintStream out, boolean hasHeader, } if (jsonOutput) { - outputArray.put(o); + outputArray.add(o); } nl = true; } @@ -547,7 +549,7 @@ private JSONArray outputList(List l, PrintStream out, boolean hasHeader, JSONObject jsonOut = outputPlan(o, out, extended, jsonOutput, jsonOutput ? 0 : (hasHeader ? indent + 2 : indent)); if (jsonOutput) { - outputArray.put(jsonOut); + outputArray.add(jsonOut); } } @@ -624,8 +626,8 @@ JSONObject outputPlan(Object work, PrintStream out, JSONObject jsonOut = outputPlan(operator.getConf(), out, extended, jsonOutput, jsonOutput ? 0 : indent, appender); if (this.work != null && this.work.isUserLevelExplain()) { - if (jsonOut != null && jsonOut.length() > 0) { - ((JSONObject) jsonOut.get(JSONObject.getNames(jsonOut)[0])).put("OperatorId:", + if (jsonOut != null && jsonOut.size() > 0) { + ((JSONObject) jsonOut.get(JsonUtils.getNames(jsonOut)[0])).put("OperatorId:", operator.getOperatorId()); } } @@ -641,7 +643,7 @@ JSONObject outputPlan(Object work, PrintStream out, for (Operator op : operator.getChildOperators()) { JSONObject jsonOut = outputPlan(op, out, extended, jsonOutput, cindent); if (jsonOutput) { - ((JSONObject)json.get(JSONObject.getNames(json)[0])).accumulate("children", jsonOut); + JsonUtils.accumulate((JSONObject)json.get(JsonUtils.getNames(json)[0]), "children", jsonOut); } } } @@ -767,7 +769,7 @@ JSONObject outputPlan(Object work, PrintStream out, if (!skipHeader) { json.put(header, jsonOut); } else { - for(String k: JSONObject.getNames(jsonOut)) { + for(String k: JsonUtils.getNames(jsonOut)) { json.put(k, jsonOut.get(k)); } } @@ -928,7 +930,7 @@ JSONObject outputDependencies(Task task, } public String outputAST(String treeString, PrintStream out, - boolean jsonOutput, int indent) throws JSONException { + boolean jsonOutput, int indent) { if (out != null) { out.print(indentString(indent)); out.println("ABSTRACT SYNTAX TREE:"); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java index 3c10169..0176439 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java @@ -66,7 +66,7 @@ import org.apache.tez.dag.api.VertexGroup; import org.apache.tez.dag.api.client.DAGClient; import org.apache.tez.dag.api.client.StatusGetOpts; -import org.json.JSONObject; +import org.json.simple.JSONObject; /** * @@ -292,7 +292,9 @@ DAG build(JobConf conf, TezWork work, Path scratchDir, DAG dag = DAG.create(work.getName()); // set some info for the query - JSONObject json = new JSONObject().put("context", "Hive").put("description", ctx.getCmd()); + JSONObject json = new JSONObject(); + json.put("context", "Hive"); + json.put("description", ctx.getCmd()); String dagInfo = json.toString(); if (LOG.isDebugEnabled()) { diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java index e4ea7e3..96242f9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java @@ -36,7 +36,7 @@ import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.json.JSONObject; +import org.json.simple.JSONObject; import com.google.common.util.concurrent.ThreadFactoryBuilder; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java index c154d6e..dedf3fc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java @@ -52,9 +52,10 @@ import org.apache.thrift.protocol.TJSONProtocol; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonGenerator; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; +import org.json.simple.JSONArray; +import org.json.simple.JSONObject; +import org.json.simple.parser.ParseException; +import org.json.simple.parser.*; import javax.annotation.Nullable; @@ -302,8 +303,9 @@ public static ReadMetaData readMetaData(FileSystem fs, Path metadataPath) read = mdstream.read(buffer); } String md = new String(sb.toByteArray(), "UTF-8"); - JSONObject jsonContainer = new JSONObject(md); - String version = jsonContainer.getString("version"); + JSONParser parser = new JSONParser(); + JSONObject jsonContainer = (JSONObject) parser.parse(md); + String version = jsonContainer.get("version").toString(); String fcversion = getJSONStringEntry(jsonContainer, "fcversion"); checkCompatibility(version, fcversion); String tableDesc = getJSONStringEntry(jsonContainer,"table"); @@ -314,10 +316,11 @@ public static ReadMetaData readMetaData(FileSystem fs, Path metadataPath) TDeserializer deserializer = new TDeserializer(new TJSONProtocol.Factory()); deserializer.deserialize(table, tableDesc, "UTF-8"); // TODO : jackson-streaming-iterable-redo this - JSONArray jsonPartitions = new JSONArray(jsonContainer.getString("partitions")); - partitionsList = new ArrayList(jsonPartitions.length()); - for (int i = 0; i < jsonPartitions.length(); ++i) { - String partDesc = jsonPartitions.getString(i); + + JSONArray jsonPartitions = (JSONArray) parser.parse(jsonContainer.get("partitions").toString()); + partitionsList = new ArrayList(jsonPartitions.size()); + for (int i = 0; i < jsonPartitions.size(); ++i) { + String partDesc = jsonPartitions.get(i).toString(); Partition partition = new Partition(); deserializer.deserialize(partition, partDesc, "UTF-8"); partitionsList.add(partition); @@ -325,7 +328,7 @@ public static ReadMetaData readMetaData(FileSystem fs, Path metadataPath) } return new ReadMetaData(table, partitionsList,readReplicationSpec(jsonContainer)); - } catch (JSONException e) { + } catch (ParseException e) { throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg("Error in serializing metadata"), e); } catch (TException e) { throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg("Error in serializing metadata"), e); @@ -349,9 +352,9 @@ public String apply(@Nullable String s) { @VisibleForTesting static String getJSONStringEntry(JSONObject jsonContainer, String name) { String retval = null; - try { - retval = jsonContainer.getString(name); - } catch (JSONException ignored) {} + if (jsonContainer.get(name) != null) { + retval = jsonContainer.get(name).toString(); + } return retval; } diff --git ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java index 26cee8d..ca443f8 100644 --- ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java @@ -23,6 +23,7 @@ import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.junit.Before; import org.junit.Test; +import org.json.simple.JSONObject; import static org.junit.Assert.assertEquals; @@ -39,7 +40,7 @@ public void setUp() { @Test public void testCreatePreHookEventJsonShhouldMatch() throws Exception { TimelineEntity timelineEntity = uut.createPreHookEvent( - "test-query-id", "test-query", new org.json.JSONObject(), 0L, + "test-query-id", "test-query", new JSONObject(), 0L, "test-user", "test-request-user", 0, 0, "test-opid"); String resultStr = (String) timelineEntity.getOtherInfo() .get(ATSHook.OtherInfoTypes.QUERY.name()); @@ -50,4 +51,4 @@ public void testCreatePreHookEventJsonShhouldMatch() throws Exception { assertEquals(expected, result); } -} \ No newline at end of file +} diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java index 5bca87a..df8bea4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java @@ -24,7 +24,8 @@ import org.apache.hadoop.hive.metastore.api.Table; import org.apache.thrift.TSerializer; import org.apache.thrift.protocol.TJSONProtocol; -import org.json.JSONObject; +import org.json.simple.JSONObject; +import org.json.simple.parser.JSONParser; import org.junit.Test; import java.io.ByteArrayInputStream; @@ -42,6 +43,8 @@ */ public class TestEximUtil extends TestCase { + private JSONParser parser = new JSONParser(); + private class FakeSeekableInputStream extends DataInputStream implements Seekable, PositionedReadable { @@ -131,7 +134,7 @@ public void testReadMetaData() throws Exception { @Test public void testGetJSONStringEntry() throws Exception { String jsonString = "{\"string-key\":\"string-value\",\"non-string-key\":1}"; - JSONObject jsonObject = new JSONObject(jsonString); + JSONObject jsonObject = (JSONObject) parser.parse(jsonString); assertEquals("string-value", EximUtil.getJSONStringEntry(jsonObject, "string-key")); assertEquals("1", EximUtil.getJSONStringEntry(jsonObject, "non-string-key")); assertNull(EximUtil.getJSONStringEntry(jsonObject, "no-such-key"));