diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java index 796ccc8..cf30798 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java @@ -28,6 +28,8 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.Hive; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; @@ -45,9 +47,6 @@ import org.apache.thrift.protocol.TJSONProtocol; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonGenerator; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; import javax.annotation.Nullable; @@ -404,13 +403,14 @@ public static ReadMetaData readMetaData(FileSystem fs, Path metadataPath) read = mdstream.read(buffer); } String md = new String(sb.toByteArray(), "UTF-8"); - JSONObject jsonContainer = new JSONObject(md); - String version = jsonContainer.getString("version"); - String fcversion = getJSONStringEntry(jsonContainer, "fcversion"); + ObjectMapper objectMapper = new ObjectMapper(); + JsonNode jsonNode = objectMapper.readTree(md); + String version = jsonNode.get("version").getTextValue(); + String fcversion = getJSONStringEntry(jsonNode, "fcversion"); checkCompatibility(version, fcversion); - String dbDesc = getJSONStringEntry(jsonContainer, "db"); - String tableDesc = getJSONStringEntry(jsonContainer,"table"); + String dbDesc = getJSONStringEntry(jsonNode, "db"); + String tableDesc = getJSONStringEntry(jsonNode,"table"); TDeserializer deserializer = new TDeserializer(new TJSONProtocol.Factory()); Database db = null; @@ -425,19 +425,19 @@ public static ReadMetaData readMetaData(FileSystem fs, Path metadataPath) table = new Table(); deserializer.deserialize(table, tableDesc, "UTF-8"); // TODO : jackson-streaming-iterable-redo this - JSONArray jsonPartitions = new JSONArray(jsonContainer.getString("partitions")); - partitionsList = new ArrayList(jsonPartitions.length()); - for (int i = 0; i < jsonPartitions.length(); ++i) { - String partDesc = jsonPartitions.getString(i); + JsonNode partitionsNode = jsonNode.get("partitions"); + Iterator partitionsIter = partitionsNode.getElements(); + int size = partitionsNode.size(); + partitionsList = new ArrayList(size); + while (partitionsIter.hasNext()) { + String partDesc = partitionsIter.next().getTextValue(); Partition partition = new Partition(); deserializer.deserialize(partition, partDesc, "UTF-8"); partitionsList.add(partition); } } - return new ReadMetaData(db, table, partitionsList,readReplicationSpec(jsonContainer)); - } catch (JSONException e) { - throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METADATA.getMsg(), e); + return new ReadMetaData(db, table, partitionsList, readReplicationSpec(jsonNode)); } catch (TException e) { throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METADATA.getMsg(), e); } finally { @@ -447,22 +447,22 @@ public static ReadMetaData readMetaData(FileSystem fs, Path metadataPath) } } - private static ReplicationSpec readReplicationSpec(final JSONObject jsonContainer){ + private static ReplicationSpec readReplicationSpec(final JsonNode jsonNode){ Function keyFetcher = new Function() { @Override public String apply(@Nullable String s) { - return getJSONStringEntry(jsonContainer,s); + return getJSONStringEntry(jsonNode, s); } }; return new ReplicationSpec(keyFetcher); } - private static String getJSONStringEntry(JSONObject jsonContainer, String name) { - String retval = null; - try { - retval = jsonContainer.getString(name); - } catch (JSONException ignored) {} - return retval; + private static String getJSONStringEntry(JsonNode objectNode, String name) { + if (objectNode.has(name)) { + return objectNode.get(name).getTextValue(); + } else { + return null; + } } /* check the forward and backward compatibility */