From 88920b0a7d17cf1cd6f0dcc207e18c24faaf759a Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Mon, 2 Oct 2017 16:31:48 -0500 Subject: [PATCH] HBASE-16338 Remove Jackson1 deps * Change imports from org.codehaus to com.fasterxml * Exclude transitive jackson1 from hadoop and others * Minor test cleanup to add assert messages, fix some parameter order * Add anti-pattern check for using jackson 1 imports * Add explicit non-null serialization directive to ScannerModel --- dev-support/hbase-personality.sh | 6 ++ hbase-client/pom.xml | 8 +- .../org/apache/hadoop/hbase/util/JsonMapper.java | 2 +- .../apache/hadoop/hbase/client/TestOperation.java | 2 +- .../apache/hadoop/hbase/RESTApiClusterManager.java | 18 ++-- hbase-mapreduce/pom.xml | 8 -- .../apache/hadoop/hbase/PerformanceEvaluation.java | 10 +-- .../hadoop/hbase/TestPerformanceEvaluation.java | 6 +- .../src/main/resources/supplemental-models.xml | 13 --- hbase-rest/pom.xml | 12 +-- ...amingUtil.java => ProtobufStreamingOutput.java} | 11 ++- .../org/apache/hadoop/hbase/rest/RESTServer.java | 4 +- .../hadoop/hbase/rest/TableScanResource.java | 26 ++---- .../apache/hadoop/hbase/rest/model/CellModel.java | 2 +- .../hadoop/hbase/rest/model/ColumnSchemaModel.java | 5 +- .../hadoop/hbase/rest/model/NamespacesModel.java | 3 +- .../apache/hadoop/hbase/rest/model/RowModel.java | 2 +- .../hadoop/hbase/rest/model/ScannerModel.java | 6 +- .../rest/model/StorageClusterStatusModel.java | 6 ++ .../rest/model/StorageClusterVersionModel.java | 3 - .../hadoop/hbase/rest/model/TableSchemaModel.java | 7 +- .../hadoop/hbase/rest/HBaseRESTTestingUtility.java | 5 +- .../apache/hadoop/hbase/rest/RowResourceBase.java | 4 +- .../apache/hadoop/hbase/rest/TestDeleteRow.java | 2 +- .../hadoop/hbase/rest/TestMultiRowResource.java | 9 +- .../hbase/rest/TestNamespacesInstanceResource.java | 9 +- .../hadoop/hbase/rest/TestSchemaResource.java | 52 ++++++++---- .../apache/hadoop/hbase/rest/TestTableScan.java | 62 ++++---------- .../hadoop/hbase/rest/TestVersionResource.java | 21 ++--- .../hbase/rest/model/TestColumnSchemaModel.java | 16 ++-- .../hadoop/hbase/rest/model/TestModelBase.java | 6 +- .../hbase/rest/model/TestTableSchemaModel.java | 3 + hbase-server/pom.xml | 20 ++--- .../apache/hadoop/hbase/io/hfile/AgeSnapshot.java | 2 +- .../hadoop/hbase/io/hfile/BlockCacheUtil.java | 17 ++-- .../hadoop/hbase/io/hfile/LruBlockCache.java | 5 +- .../hbase/io/hfile/bucket/BucketAllocator.java | 2 +- .../org/apache/hadoop/hbase/ipc/RpcServer.java | 2 +- .../hadoop/hbase/monitoring/MonitoredTaskImpl.java | 2 +- .../org/apache/hadoop/hbase/util/JSONBean.java | 6 +- .../apache/hadoop/hbase/util/JSONMetricUtil.java | 10 +-- .../apache/hadoop/hbase/wal/WALPrettyPrinter.java | 2 +- .../hbase-webapps/master/processMaster.jsp | 2 +- .../resources/hbase-webapps/master/processRS.jsp | 2 +- .../hbase-webapps/regionserver/processRS.jsp | 2 +- .../hbase/io/hfile/TestBlockCacheReporting.java | 4 +- .../hadoop/hbase/util/TestJSONMetricUtil.java | 33 ++++---- hbase-shaded/hbase-shaded-mapreduce/pom.xml | 4 - hbase-shaded/pom.xml | 4 + hbase-shell/src/main/ruby/hbase/taskmonitor.rb | 2 +- hbase-spark/pom.xml | 15 ++++ pom.xml | 98 ++++++++++++++++------ 52 files changed, 303 insertions(+), 280 deletions(-) rename hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/{ProtobufStreamingUtil.java => ProtobufStreamingOutput.java} (90%) diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh index c0386e5e40..2dc8308185 100755 --- a/dev-support/hbase-personality.sh +++ b/dev-support/hbase-personality.sh @@ -425,6 +425,12 @@ function hbaseanti_patchfile ((result=result+1)) fi + warnings=$(${GREP} 'import org.codehaus.jackson' "${patchfile}") + if [[ ${warnings} -gt 0 ]]; then + add_vote_table -1 hbaseanti "" "The patch appears use Jackson 1 classes/annotations: ${warnings}." + ((result=result+1)) + fi + if [[ ${result} -gt 0 ]]; then return 1 fi diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml index d9aa006fef..a8e73c7cb5 100644 --- a/hbase-client/pom.xml +++ b/hbase-client/pom.xml @@ -168,10 +168,6 @@ org.apache.htrace htrace-core - - org.codehaus.jackson - jackson-mapper-asl - org.jruby.jcodings jcodings @@ -218,6 +214,10 @@ + + com.fasterxml.jackson.core + jackson-databind + diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java index 53cd264437..b5d31ff131 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.util; import java.io.IOException; import java.util.Map; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.yetus.audience.InterfaceAudience; -import org.codehaus.jackson.map.ObjectMapper; /** * Utility class for converting objects to JSON diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java index fa9c4ad996..a8162c0a21 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java @@ -62,10 +62,10 @@ import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.BuilderStyleTest; import org.apache.hadoop.hbase.util.Bytes; -import org.codehaus.jackson.map.ObjectMapper; import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; +import com.fasterxml.jackson.databind.ObjectMapper; /** * Run tests that use the functionality of the Operation superclass for diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java index 055b58a081..8f69d337b5 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java @@ -18,13 +18,13 @@ package org.apache.hadoop.hbase; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.util.ReflectionUtils; -import org.codehaus.jackson.JsonNode; -import org.codehaus.jackson.map.ObjectMapper; import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; @@ -222,8 +222,8 @@ public class RESTApiClusterManager extends Configured implements ClusterManager if (hosts != null) { // Iterate through the list of hosts, stopping once you've reached the requested hostname. for (JsonNode host : hosts) { - if (host.get("hostname").getTextValue().equals(hostname)) { - hostId = host.get("hostId").getTextValue(); + if (host.get("hostname").textValue().equals(hostname)) { + hostId = host.get("hostId").textValue(); break; } } @@ -272,12 +272,12 @@ public class RESTApiClusterManager extends Configured implements ClusterManager if (roles != null) { // Iterate through the list of roles, stopping once the requested one is found. for (JsonNode role : roles) { - if (role.get("hostRef").get("hostId").getTextValue().equals(hostId) && + if (role.get("hostRef").get("hostId").textValue().equals(hostId) && role.get("type") - .getTextValue() + .textValue() .toLowerCase(Locale.ROOT) .equals(roleType.toLowerCase(Locale.ROOT))) { - roleValue = role.get(property).getTextValue(); + roleValue = role.get(property).textValue(); break; } } @@ -306,8 +306,8 @@ public class RESTApiClusterManager extends Configured implements ClusterManager if (services != null) { // Iterate through the list of services, stopping once the requested one is found. for (JsonNode serviceEntry : services) { - if (serviceEntry.get("type").getTextValue().equals(service.toString())) { - serviceName = serviceEntry.get("name").getTextValue(); + if (serviceEntry.get("type").textValue().equals(service.toString())) { + serviceName = serviceEntry.get("name").textValue(); break; } } diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml index eaf6aea5d2..aaae0a47ff 100644 --- a/hbase-mapreduce/pom.xml +++ b/hbase-mapreduce/pom.xml @@ -285,14 +285,6 @@ org.apache.zookeeper zookeeper - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-mapper-asl - junit junit diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java index 05e984ee56..2bf94f41df 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase; -import static org.codehaus.jackson.map.SerializationConfig.Feature.SORT_PROPERTIES_ALPHABETICALLY; - import java.io.IOException; import java.io.PrintStream; import java.lang.reflect.Constructor; @@ -71,8 +69,6 @@ import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.CompareFilter; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterAllFilter; import org.apache.hadoop.hbase.filter.FilterList; @@ -85,7 +81,6 @@ import org.apache.hadoop.hbase.io.hfile.RandomDistribution; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.CompactingMemStore; -import org.apache.hadoop.hbase.regionserver.TestHRegionFileSystem; import org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration; import org.apache.hadoop.hbase.trace.SpanReceiverHost; import org.apache.hadoop.hbase.util.*; @@ -98,7 +93,6 @@ import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import org.codehaus.jackson.map.ObjectMapper; import org.apache.htrace.Sampler; import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; @@ -108,6 +102,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa import com.codahale.metrics.Histogram; import com.codahale.metrics.UniformReservoir; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.MapperFeature; /** * Script used evaluating HBase performance and scalability. Runs a HBase @@ -133,7 +129,7 @@ public class PerformanceEvaluation extends Configured implements Tool { private static final Log LOG = LogFactory.getLog(PerformanceEvaluation.class.getName()); private static final ObjectMapper MAPPER = new ObjectMapper(); static { - MAPPER.configure(SORT_PROPERTIES_ALPHABETICALLY, true); + MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true); } public static final String TABLE_NAME = "TestTable"; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java index 86a3d3fb03..106b7e9ac0 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java @@ -37,9 +37,6 @@ import org.apache.hadoop.hbase.PerformanceEvaluation.RandomReadTest; import org.apache.hadoop.hbase.PerformanceEvaluation.TestOptions; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.codehaus.jackson.JsonGenerationException; -import org.codehaus.jackson.map.JsonMappingException; -import org.codehaus.jackson.map.ObjectMapper; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -47,6 +44,9 @@ import org.junit.experimental.categories.Category; import com.codahale.metrics.Histogram; import com.codahale.metrics.Snapshot; import com.codahale.metrics.UniformReservoir; +import com.fasterxml.jackson.core.JsonGenerationException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; @Category({MiscTests.class, SmallTests.class}) public class TestPerformanceEvaluation { diff --git a/hbase-resource-bundle/src/main/resources/supplemental-models.xml b/hbase-resource-bundle/src/main/resources/supplemental-models.xml index 68e2d5f88a..4acf2fd54f 100644 --- a/hbase-resource-bundle/src/main/resources/supplemental-models.xml +++ b/hbase-resource-bundle/src/main/resources/supplemental-models.xml @@ -512,19 +512,6 @@ under the License. - - - org.glassfish.jersey.media - jersey-media-json-jackson1 - - - CDDL 1.1 - https://glassfish.java.net/public/CDDL+GPL_1_1.html - repo - - - - org.glassfish.web diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml index 0a1257342c..c88527e3ff 100644 --- a/hbase-rest/pom.xml +++ b/hbase-rest/pom.xml @@ -300,8 +300,8 @@ jersey-container-servlet-core - org.glassfish.jersey.media - jersey-media-json-jackson1 + com.fasterxml.jackson.jaxrs + jackson-jaxrs-json-provider @@ -320,14 +320,6 @@ junit test - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-mapper-asl - org.mockito mockito-all diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java similarity index 90% rename from hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java rename to hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java index cb0f4c824d..b06704496d 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java @@ -35,23 +35,22 @@ import org.apache.hadoop.hbase.rest.model.CellSetModel; import org.apache.hadoop.hbase.rest.model.RowModel; import org.apache.hadoop.hbase.util.Bytes; +public class ProtobufStreamingOutput implements StreamingOutput { + private static final Log LOG = LogFactory.getLog(ProtobufStreamingOutput.class); -public class ProtobufStreamingUtil implements StreamingOutput { - - private static final Log LOG = LogFactory.getLog(ProtobufStreamingUtil.class); private String contentType; private ResultScanner resultScanner; private int limit; private int fetchSize; - protected ProtobufStreamingUtil(ResultScanner scanner, String type, int limit, int fetchSize) { + protected ProtobufStreamingOutput(ResultScanner scanner, String type, int limit, int fetchSize) { this.resultScanner = scanner; this.contentType = type; this.limit = limit; this.fetchSize = fetchSize; if (LOG.isTraceEnabled()) { - LOG.trace("Created ScanStreamingUtil with content type = " + this.contentType - + " user limit : " + this.limit + " scan fetch size : " + this.fetchSize); + LOG.trace("Created StreamingOutput with content type = " + this.contentType + + " user limit : " + this.limit + " scan fetch size : " + this.fetchSize); } } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java index 907a220544..c2f6cf6a61 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java @@ -27,6 +27,7 @@ import java.util.Set; import java.util.EnumSet; import java.util.concurrent.ArrayBlockingQueue; +import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; @@ -67,7 +68,6 @@ import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.eclipse.jetty.jmx.MBeanContainer; import org.eclipse.jetty.servlet.FilterHolder; -import org.glassfish.jersey.jackson1.Jackson1Feature; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; @@ -238,7 +238,7 @@ public class RESTServer implements Constants { // set up the Jersey servlet container for Jetty ResourceConfig application = new ResourceConfig(). - packages("org.apache.hadoop.hbase.rest").register(Jackson1Feature.class); + packages("org.apache.hadoop.hbase.rest").register(JacksonJaxbJsonProvider.class); ServletHolder sh = new ServletHolder(new ServletContainer(application)); // Set the default max thread number to 100 to limit diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java index aeb213b0a8..f8b959331d 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java @@ -23,15 +23,13 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; -import javax.ws.rs.PathParam; import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; +import javax.ws.rs.core.StreamingOutput; import javax.ws.rs.core.UriInfo; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @@ -47,13 +45,14 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.rest.model.CellModel; import org.apache.hadoop.hbase.rest.model.RowModel; -import org.codehaus.jackson.annotate.JsonIgnore; -import org.codehaus.jackson.annotate.JsonProperty; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; @InterfaceAudience.Private public class TableScanResource extends ResourceBase { - private static final Log LOG = LogFactory.getLog(TableScanResource.class); + TableResource tableResource; ResultScanner results; int userRequestedLimit; @@ -75,23 +74,14 @@ public class TableScanResource extends ResourceBase { servlet.getMetrics().incrementSucessfulScanRequests(1); final Iterator itr = results.iterator(); return new CellSetModelStream(new ArrayList() { + @Override public Iterator iterator() { return new Iterator() { int count = rowsToSend; @Override public boolean hasNext() { - if (count > 0) { - return itr.hasNext(); - } else { - return false; - } - } - - @Override - public void remove() { - throw new UnsupportedOperationException( - "Remove method cannot be used in CellSetModelStream"); + return count > 0 && itr.hasNext(); } @Override @@ -127,7 +117,7 @@ public class TableScanResource extends ResourceBase { servlet.getMetrics().incrementRequests(1); try { int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10); - ProtobufStreamingUtil stream = new ProtobufStreamingUtil(this.results, contentType, + StreamingOutput stream = new ProtobufStreamingOutput(this.results, contentType, userRequestedLimit, fetchSize); servlet.getMetrics().incrementSucessfulScanRequests(1); ResponseBuilder response = Response.ok(stream); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java index 3465490cce..ffb6743f5e 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java @@ -28,6 +28,7 @@ import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlValue; +import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @@ -38,7 +39,6 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell; -import org.codehaus.jackson.annotate.JsonProperty; /** * Representation of a cell. A cell is a single value associated a column and diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java index e9686f2224..967f6ba2ce 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java @@ -31,8 +31,9 @@ import javax.xml.namespace.QName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; -import org.codehaus.jackson.annotate.JsonAnyGetter; -import org.codehaus.jackson.annotate.JsonAnySetter; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; /** * Representation of a column family schema. diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/NamespacesModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/NamespacesModel.java index 9bb5adb405..c0dfa75355 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/NamespacesModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/NamespacesModel.java @@ -34,7 +34,8 @@ import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces; -import org.codehaus.jackson.annotate.JsonProperty; + +import com.fasterxml.jackson.annotation.JsonProperty; /** diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java index c8f028c437..8f0bf53e74 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java @@ -30,12 +30,12 @@ import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; +import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; -import org.codehaus.jackson.annotate.JsonProperty; /** * Representation of a row. A row is a related set of cells, grouped by common diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java index 739af9def3..a678fdede4 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java @@ -44,7 +44,6 @@ import org.apache.hadoop.hbase.filter.ColumnPaginationFilter; import org.apache.hadoop.hbase.filter.ColumnPrefixFilter; import org.apache.hadoop.hbase.filter.ColumnRangeFilter; import org.apache.hadoop.hbase.filter.CompareFilter; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.DependentColumnFilter; import org.apache.hadoop.hbase.filter.FamilyFilter; import org.apache.hadoop.hbase.filter.Filter; @@ -77,10 +76,10 @@ import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; import com.google.protobuf.ByteString; -import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider; - /** * A representation of Scanner parameters. * @@ -101,6 +100,7 @@ import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider; * */ @XmlRootElement(name="Scanner") +@JsonInclude(JsonInclude.Include.NON_NULL) @InterfaceAudience.Private public class ScannerModel implements ProtobufMessageHandler, Serializable { diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java index ccce92d56c..2f38a440fe 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java @@ -36,6 +36,8 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus; import org.apache.hadoop.hbase.util.Bytes; +import com.fasterxml.jackson.annotation.JsonProperty; + /** * Representation of the status of a storage cluster: *

@@ -561,6 +563,8 @@ public class StorageClusterStatusModel */ @XmlElement(name = "Node") @XmlElementWrapper(name = "LiveNodes") + // workaround https://github.com/FasterXML/jackson-dataformat-xml/issues/192 + @JsonProperty("LiveNodes") public List getLiveNodes() { return liveNodes; } @@ -570,6 +574,8 @@ public class StorageClusterStatusModel */ @XmlElement(name = "Node") @XmlElementWrapper(name = "DeadNodes") + // workaround https://github.com/FasterXML/jackson-dataformat-xml/issues/192 + @JsonProperty("DeadNodes") public List getDeadNodes() { return deadNodes; } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java index af05b099f7..584099765c 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java @@ -19,13 +19,10 @@ package org.apache.hadoop.hbase.rest.model; -import org.codehaus.jackson.annotate.JsonValue; - import java.io.Serializable; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; -import javax.xml.bind.annotation.XmlValue; import org.apache.yetus.audience.InterfaceAudience; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java index 7e81745f93..553bfe015d 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java @@ -43,9 +43,10 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema; import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema; import org.apache.hadoop.hbase.util.Bytes; -import org.codehaus.jackson.annotate.JsonAnyGetter; -import org.codehaus.jackson.annotate.JsonAnySetter; -import org.codehaus.jackson.annotate.JsonIgnore; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonIgnore; /** * A representation of HBase table descriptors. diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java index 95d9432eca..0714c7bcc8 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java @@ -18,6 +18,7 @@ */ package org.apache.hadoop.hbase.rest; +import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -32,9 +33,7 @@ import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.servlet.ServletHolder; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.glassfish.jersey.jackson1.Jackson1Feature; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; @@ -63,7 +62,7 @@ public class HBaseRESTTestingUtility { // set up the Jersey servlet container for Jetty ResourceConfig app = new ResourceConfig(). - packages("org.apache.hadoop.hbase.rest").register(Jackson1Feature.class); + packages("org.apache.hadoop.hbase.rest").register(JacksonJaxbJsonProvider.class); ServletHolder sh = new ServletHolder(new ServletContainer(app)); // set up Jetty and run the embedded server diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java index aa4fb3ed42..35442ea016 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java @@ -30,6 +30,8 @@ import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -43,8 +45,6 @@ import org.apache.hadoop.hbase.rest.model.CellModel; import org.apache.hadoop.hbase.rest.model.CellSetModel; import org.apache.hadoop.hbase.rest.model.RowModel; import org.apache.hadoop.hbase.util.Bytes; -import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider; -import org.codehaus.jackson.map.ObjectMapper; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java index 516ce9e798..a3c5e3745f 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java @@ -33,7 +33,7 @@ public class TestDeleteRow extends RowResourceBase { @Test public void testDeleteNonExistentColumn() throws Exception { Response response = putValueJson(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = checkAndDeleteJson(TABLE, ROW_1, COLUMN_1, VALUE_2); assertEquals(304, response.getCode()); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java index 537a293f63..ee14ea54ef 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java @@ -32,8 +32,6 @@ import org.apache.hadoop.hbase.rest.model.RowModel; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.util.Bytes; -import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider; -import org.codehaus.jackson.map.ObjectMapper; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -47,9 +45,10 @@ import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; import java.io.IOException; -import java.util.ArrayList; import java.util.Collection; -import java.util.List; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; import static org.junit.Assert.assertEquals; @@ -202,7 +201,7 @@ public class TestMultiRowResource { assertEquals(response.getCode(), 200); ObjectMapper mapper = new JacksonJaxbJsonProvider().locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE); - CellSetModel cellSet = (CellSetModel) mapper.readValue(response.getBody(), CellSetModel.class); + CellSetModel cellSet = mapper.readValue(response.getBody(), CellSetModel.class); assertEquals(2, cellSet.getRows().size()); assertEquals(ROW_1, Bytes.toString(cellSet.getRows().get(0).getKey())); assertEquals(VALUE_1, Bytes.toString(cellSet.getRows().get(0).getCells().get(0).getValue())); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java index 58e8ea0532..9c724cd8a7 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java @@ -48,8 +48,6 @@ import org.apache.hadoop.hbase.rest.model.TestNamespacesInstanceModel; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.util.Bytes; -import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider; -import org.codehaus.jackson.map.ObjectMapper; import static org.junit.Assert.*; @@ -58,6 +56,9 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; + @Category({RestTests.class, MediumTests.class}) public class TestNamespacesInstanceResource { private static String NAMESPACE1 = "TestNamespacesInstanceResource1"; @@ -265,11 +266,11 @@ public class TestNamespacesInstanceResource { // Try REST post and puts with invalid content. response = client.post(namespacePath1, Constants.MIMETYPE_JSON, toXML(model1)); - assertEquals(400, response.getCode()); + assertEquals(500, response.getCode()); String jsonString = jsonMapper.writeValueAsString(model2); response = client.put(namespacePath2, Constants.MIMETYPE_XML, Bytes.toBytes(jsonString)); assertEquals(400, response.getCode()); - response = client.post(namespacePath3, Constants.MIMETYPE_PROTOBUF, toXML(model1)); + response = client.post(namespacePath3, Constants.MIMETYPE_PROTOBUF, toXML(model3)); assertEquals(500, response.getCode()); NamespaceDescriptor nd1 = findNamespace(admin, NAMESPACE1); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java index f0c3d4a82d..4866d53c2f 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java @@ -21,13 +21,14 @@ package org.apache.hadoop.hbase.rest; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.StringWriter; -import java.util.ArrayList; import java.util.Collection; -import java.util.List; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.http.Header; import org.apache.http.message.BasicHeader; @@ -47,6 +48,7 @@ import org.apache.hadoop.hbase.util.Bytes; import static org.junit.Assert.*; +import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -57,6 +59,8 @@ import org.junit.runners.Parameterized; @Category({RestTests.class, MediumTests.class}) @RunWith(Parameterized.class) public class TestSchemaResource { + private static final Log LOG = LogFactory.getLog(TestSchemaResource.class); + private static String TABLE1 = "TestSchemaResource1"; private static String TABLE2 = "TestSchemaResource2"; @@ -72,11 +76,8 @@ public class TestSchemaResource { private static boolean csrfEnabled = true; @Parameterized.Parameters - public static Collection data() { - List params = new ArrayList<>(2); - params.add(new Object[] {Boolean.TRUE}); - params.add(new Object[] {Boolean.FALSE}); - return params; + public static Collection parameters() { + return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED; } public TestSchemaResource(Boolean csrf) { @@ -104,6 +105,21 @@ public class TestSchemaResource { TEST_UTIL.shutdownMiniCluster(); } + @After + public void tearDown() throws Exception { + Admin admin = TEST_UTIL.getAdmin(); + + for (String table : new String[] {TABLE1, TABLE2}) { + TableName t = TableName.valueOf(table); + if (admin.tableExists(t)) { + admin.disableTable(t); + admin.deleteTable(t); + } + } + + conf.set("hbase.rest.readonly", "false"); + } + private static byte[] toXML(TableSchemaModel model) throws JAXBException { StringWriter writer = new StringWriter(); context.createMarshaller().marshal(model, writer); @@ -123,7 +139,7 @@ public class TestSchemaResource { Response response; Admin admin = TEST_UTIL.getAdmin(); - assertFalse(admin.tableExists(TableName.valueOf(TABLE1))); + assertFalse("Table " + TABLE1 + " should not exist", admin.tableExists(TableName.valueOf(TABLE1))); // create the table model = testTableSchemaModel.buildTestModel(TABLE1); @@ -131,27 +147,28 @@ public class TestSchemaResource { if (csrfEnabled) { // test put operation is forbidden without custom header response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model)); - assertEquals(response.getCode(), 400); + assertEquals(400, response.getCode()); } response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model), extraHdr); - assertEquals(response.getCode(), 201); + assertEquals("put failed with csrf " + (csrfEnabled ? "enabled" : "disabled"), + 201, response.getCode()); // recall the same put operation but in read-only mode conf.set("hbase.rest.readonly", "true"); response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model), extraHdr); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); // retrieve the schema and validate it response = client.get(schemaPath, Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); model = fromXML(response.getBody()); testTableSchemaModel.checkModel(model, TABLE1); // with json retrieve the schema and validate it response = client.get(schemaPath, Constants.MIMETYPE_JSON); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); model = testTableSchemaModel.fromJSON(Bytes.toString(response.getBody())); testTableSchemaModel.checkModel(model, TABLE1); @@ -164,14 +181,14 @@ public class TestSchemaResource { // test delete schema operation is forbidden in read-only mode response = client.delete(schemaPath, extraHdr); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); // return read-only setting back to default conf.set("hbase.rest.readonly", "false"); // delete the table and make sure HBase concurs response = client.delete(schemaPath, extraHdr); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertFalse(admin.tableExists(TableName.valueOf(TABLE1))); } @@ -191,11 +208,12 @@ public class TestSchemaResource { if (csrfEnabled) { // test put operation is forbidden without custom header response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); - assertEquals(response.getCode(), 400); + assertEquals(400, response.getCode()); } response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput(), extraHdr); - assertEquals(response.getCode(), 201); + assertEquals("put failed with csrf " + (csrfEnabled ? "enabled" : "disabled"), + 201, response.getCode()); // recall the same put operation but in read-only mode conf.set("hbase.rest.readonly", "true"); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java index c674b3c7d9..8380a0a4fb 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java @@ -43,6 +43,8 @@ import javax.xml.bind.annotation.XmlRootElement; import javax.xml.parsers.SAXParserFactory; import javax.xml.stream.XMLStreamException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -61,11 +63,6 @@ import org.apache.hadoop.hbase.rest.model.RowModel; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.util.Bytes; -import org.codehaus.jackson.JsonFactory; -import org.codehaus.jackson.JsonParser; -import org.codehaus.jackson.JsonToken; -import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider; -import org.codehaus.jackson.map.ObjectMapper; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -73,8 +70,15 @@ import org.junit.experimental.categories.Category; import org.xml.sax.InputSource; import org.xml.sax.XMLReader; +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; + @Category({RestTests.class, MediumTests.class}) public class TestTableScan { + private static final Log LOG = LogFactory.getLog(TestTableScan.class); private static final TableName TABLE = TableName.valueOf("TestScanResource"); private static final String CFA = "a"; @@ -201,7 +205,7 @@ public class TestTableScan { builder.append("?"); builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1); builder.append("&"); - builder.append(Constants.SCAN_LIMIT + "=20"); + builder.append(Constants.SCAN_LIMIT + "=2"); Response response = client.get("/" + TABLE + builder.toString(), Constants.MIMETYPE_JSON); assertEquals(200, response.getCode()); @@ -210,7 +214,7 @@ public class TestTableScan { .locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE); CellSetModel model = mapper.readValue(response.getStream(), CellSetModel.class); int count = TestScannerResource.countCellSet(model); - assertEquals(20, count); + assertEquals(2, count); checkRowsNotNull(model); //Test scanning with no limit. @@ -305,52 +309,22 @@ public class TestTableScan { @Test public void testStreamingJSON() throws Exception { - // Test scanning particular columns with limit. + //Test with start row and end row. StringBuilder builder = new StringBuilder(); builder.append("/*"); builder.append("?"); builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1); builder.append("&"); - builder.append(Constants.SCAN_LIMIT + "=20"); + builder.append(Constants.SCAN_START_ROW + "=aaa"); + builder.append("&"); + builder.append(Constants.SCAN_END_ROW + "=aay"); Response response = client.get("/" + TABLE + builder.toString(), Constants.MIMETYPE_JSON); assertEquals(200, response.getCode()); - assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); + + int count = 0; ObjectMapper mapper = new JacksonJaxbJsonProvider() .locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE); - CellSetModel model = mapper.readValue(response.getStream(), CellSetModel.class); - int count = TestScannerResource.countCellSet(model); - assertEquals(20, count); - checkRowsNotNull(model); - - //Test scanning with no limit. - builder = new StringBuilder(); - builder.append("/*"); - builder.append("?"); - builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_2); - response = client.get("/" + TABLE + builder.toString(), - Constants.MIMETYPE_JSON); - assertEquals(200, response.getCode()); - assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); - model = mapper.readValue(response.getStream(), CellSetModel.class); - count = TestScannerResource.countCellSet(model); - assertEquals(expectedRows2, count); - checkRowsNotNull(model); - - //Test with start row and end row. - builder = new StringBuilder(); - builder.append("/*"); - builder.append("?"); - builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1); - builder.append("&"); - builder.append(Constants.SCAN_START_ROW + "=aaa"); - builder.append("&"); - builder.append(Constants.SCAN_END_ROW + "=aay"); - response = client.get("/" + TABLE + builder.toString(), - Constants.MIMETYPE_JSON); - assertEquals(200, response.getCode()); - - count = 0; JsonFactory jfactory = new JsonFactory(mapper); JsonParser jParser = jfactory.createJsonParser(response.getStream()); boolean found = false; @@ -390,7 +364,7 @@ public class TestTableScan { int rowCount = readProtobufStream(response.getStream()); assertEquals(15, rowCount); - //Test with start row and end row. + //Test with start row and end row. builder = new StringBuilder(); builder.append("/*"); builder.append("?"); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java index 99fce2c80a..e76422bc4d 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java @@ -36,10 +36,11 @@ import org.apache.hadoop.hbase.rest.model.VersionModel; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.util.Bytes; -import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider; -import org.codehaus.jackson.map.ObjectMapper; import org.glassfish.jersey.servlet.ServletContainer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; + import static org.junit.Assert.*; import org.junit.AfterClass; @@ -99,7 +100,7 @@ public class TestVersionResource { @Test public void testGetStargateVersionText() throws IOException { Response response = client.get("/version", Constants.MIMETYPE_TEXT); - assertTrue(response.getCode() == 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_TEXT, response.getHeader("content-type")); String body = Bytes.toString(response.getBody()); assertTrue(body.length() > 0); @@ -117,7 +118,7 @@ public class TestVersionResource { @Test public void testGetStargateVersionXML() throws IOException, JAXBException { Response response = client.get("/version", Constants.MIMETYPE_XML); - assertTrue(response.getCode() == 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); VersionModel model = (VersionModel) context.createUnmarshaller().unmarshal( @@ -129,7 +130,7 @@ public class TestVersionResource { @Test public void testGetStargateVersionJSON() throws IOException { Response response = client.get("/version", Constants.MIMETYPE_JSON); - assertTrue(response.getCode() == 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); ObjectMapper mapper = new JacksonJaxbJsonProvider() .locateMapper(VersionModel.class, MediaType.APPLICATION_JSON_TYPE); @@ -142,13 +143,13 @@ public class TestVersionResource { @Test public void testGetStargateVersionPB() throws IOException { Response response = client.get("/version", Constants.MIMETYPE_PROTOBUF); - assertTrue(response.getCode() == 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF, response.getHeader("content-type")); VersionModel model = new VersionModel(); model.getObjectFromMessage(response.getBody()); validate(model); response = client.get("/version", Constants.MIMETYPE_PROTOBUF_IETF); - assertTrue(response.getCode() == 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF_IETF, response.getHeader("content-type")); model = new VersionModel(); model.getObjectFromMessage(response.getBody()); @@ -158,7 +159,7 @@ public class TestVersionResource { @Test public void testGetStorageClusterVersionText() throws IOException { Response response = client.get("/version/cluster", Constants.MIMETYPE_TEXT); - assertTrue(response.getCode() == 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_TEXT, response.getHeader("content-type")); } @@ -166,7 +167,7 @@ public class TestVersionResource { public void testGetStorageClusterVersionXML() throws IOException, JAXBException { Response response = client.get("/version/cluster",Constants.MIMETYPE_XML); - assertTrue(response.getCode() == 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); StorageClusterVersionModel clusterVersionModel = (StorageClusterVersionModel) @@ -180,7 +181,7 @@ public class TestVersionResource { @Test public void testGetStorageClusterVersionJSON() throws IOException { Response response = client.get("/version/cluster", Constants.MIMETYPE_JSON); - assertTrue(response.getCode() == 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); ObjectMapper mapper = new JacksonJaxbJsonProvider() .locateMapper(StorageClusterVersionModel.class, MediaType.APPLICATION_JSON_TYPE); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java index af5545e02d..3c41d6dccd 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java @@ -62,14 +62,14 @@ public class TestColumnSchemaModel extends TestModelBase { } protected void checkModel(ColumnSchemaModel model) { - assertEquals(model.getName(), COLUMN_NAME); - assertEquals(model.__getBlockcache(), BLOCKCACHE); - assertEquals(model.__getBlocksize(), BLOCKSIZE); - assertEquals(model.__getBloomfilter(), BLOOMFILTER); - assertTrue(model.__getCompression().equalsIgnoreCase(COMPRESSION)); - assertEquals(model.__getInMemory(), IN_MEMORY); - assertEquals(model.__getTTL(), TTL); - assertEquals(model.__getVersions(), VERSIONS); + assertEquals("name", COLUMN_NAME, model.getName()); + assertEquals("block cache", BLOCKCACHE, model.__getBlockcache()); + assertEquals("block size", BLOCKSIZE, model.__getBlocksize()); + assertEquals("bloomfilter", BLOOMFILTER, model.__getBloomfilter()); + assertTrue("compression", model.__getCompression().equalsIgnoreCase(COMPRESSION)); + assertEquals("in memory", IN_MEMORY, model.__getInMemory()); + assertEquals("ttl", TTL, model.__getTTL()); + assertEquals("versions", VERSIONS, model.__getVersions()); } public void testFromPB() throws Exception { diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java index 9cba485b78..b5826b8ae4 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java @@ -24,9 +24,6 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.provider.JAXBContextResolver; import org.apache.hadoop.hbase.util.Base64; -import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.node.ObjectNode; import org.junit.experimental.categories.Category; import javax.ws.rs.core.MediaType; @@ -35,6 +32,9 @@ import javax.xml.bind.JAXBException; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; @Category({RestTests.class, SmallTests.class}) public abstract class TestModelBase extends TestCase { diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java index 4b2eb05c24..c41128d737 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java @@ -23,6 +23,8 @@ import java.util.Iterator; import javax.xml.bind.JAXBContext; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -30,6 +32,7 @@ import org.junit.experimental.categories.Category; @Category({RestTests.class, SmallTests.class}) public class TestTableSchemaModel extends TestModelBase { + private static final Log LOG = LogFactory.getLog(TestTableSchemaModel.class); public static final String TABLE_NAME = "testTable"; private static final boolean IS_META = false; diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml index ad080f18d7..4eb7f9bb81 100644 --- a/hbase-server/pom.xml +++ b/hbase-server/pom.xml @@ -482,14 +482,18 @@ org.eclipse.jetty jetty-security + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.core + jackson-annotations + org.glassfish.jersey.containers jersey-container-servlet-core - - org.glassfish.jersey.media - jersey-media-json-jackson1 - org.glassfish.web @@ -502,10 +506,6 @@ org.glassfish javax.el - - org.codehaus.jackson - jackson-core-asl - org.codehaus.jettison jettison @@ -516,10 +516,6 @@ - - org.codehaus.jackson - jackson-mapper-asl - com.github.stephenc.findbugs diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java index dd3bf25659..bb80abee72 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.io.hfile; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import org.apache.hadoop.hbase.metrics.impl.FastLongHistogram; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; /** * Snapshot of block cache age in cache. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java index fc807db7b5..3c04fa81cb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java @@ -24,16 +24,15 @@ import java.util.NavigableSet; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.ConcurrentSkipListSet; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.core.JsonGenerationException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.metrics.impl.FastLongHistogram; import org.apache.hadoop.hbase.util.Bytes; -import org.codehaus.jackson.JsonGenerationException; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import org.codehaus.jackson.map.JsonMappingException; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.SerializationConfig; - /** * Utilty for aggregating counts in CachedBlocks and toString/toJSON CachedBlocks and BlockCaches. @@ -50,9 +49,9 @@ public class BlockCacheUtil { */ private static final ObjectMapper MAPPER = new ObjectMapper(); static { - MAPPER.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false); - MAPPER.configure(SerializationConfig.Feature.FLUSH_AFTER_WRITE_VALUE, true); - MAPPER.configure(SerializationConfig.Feature.INDENT_OUTPUT, true); + MAPPER.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + MAPPER.configure(SerializationFeature.FLUSH_AFTER_WRITE_VALUE, true); + MAPPER.configure(SerializationFeature.INDENT_OUTPUT, true); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java index bd003934be..0fde0a7ee4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java @@ -45,14 +45,15 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.HasThread; import org.apache.hadoop.util.StringUtils; -import org.codehaus.jackson.annotate.JsonIgnore; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + /** * A block cache implementation that is memory-aware using {@link HeapSize}, * memory-bound using an LRU eviction algorithm, and concurrent: backed by a diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java index d9c3c9a664..40b64be18d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java @@ -37,7 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 36d383a3e4..24cf1665a7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -36,6 +36,7 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.atomic.LongAdder; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -68,7 +69,6 @@ import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; -import org.codehaus.jackson.map.ObjectMapper; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java index 688a71cf8a..b3869f4519 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hbase.monitoring; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.yetus.audience.InterfaceAudience; -import org.codehaus.jackson.map.ObjectMapper; import java.io.IOException; import java.util.HashMap; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java index 0739e91ba7..f4a146ecdb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java @@ -41,11 +41,11 @@ import javax.management.openmbean.CompositeData; import javax.management.openmbean.CompositeType; import javax.management.openmbean.TabularData; +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerationException; +import com.fasterxml.jackson.core.JsonGenerator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.codehaus.jackson.JsonFactory; -import org.codehaus.jackson.JsonGenerationException; -import org.codehaus.jackson.JsonGenerator; /** * Utility for doing JSON and MBeans. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java index f64934ed9a..879f32e63a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java @@ -38,11 +38,11 @@ import javax.management.ObjectName; import javax.management.ReflectionException; import javax.management.openmbean.CompositeData; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.codehaus.jackson.JsonNode; -import org.codehaus.jackson.JsonProcessingException; -import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jettison.json.JSONException; public final class JSONMetricUtil { @@ -112,8 +112,8 @@ public final class JSONMetricUtil { return sw.toString(); } - public static JsonNode mappStringToJsonNode(String jsonString) throws - JsonProcessingException, IOException { + public static JsonNode mappStringToJsonNode(String jsonString) + throws JsonProcessingException, IOException { ObjectMapper mapper = new ObjectMapper(); JsonNode node = mapper.readTree(jsonString); return node; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java index 9ed5a6464e..d4c320bd58 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java @@ -27,6 +27,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.HelpFormatter; @@ -47,7 +48,6 @@ import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; -import org.codehaus.jackson.map.ObjectMapper; /** * WALPrettyPrinter prints the contents of a given WAL with a variety of diff --git a/hbase-server/src/main/resources/hbase-webapps/master/processMaster.jsp b/hbase-server/src/main/resources/hbase-webapps/master/processMaster.jsp index 6e87e72ac7..d99e198232 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/processMaster.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/processMaster.jsp @@ -29,7 +29,7 @@ import="java.lang.management.GarbageCollectorMXBean" import="org.apache.hadoop.hbase.util.JSONMetricUtil" import="org.apache.hadoop.hbase.procedure2.util.StringUtils" - import="org.codehaus.jackson.JsonNode" + import="com.fasterxml.jackson.databind.JsonNode" %> <% RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean(); diff --git a/hbase-server/src/main/resources/hbase-webapps/master/processRS.jsp b/hbase-server/src/main/resources/hbase-webapps/master/processRS.jsp index cc18d5bb1f..f0df0c0ad9 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/processRS.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/processRS.jsp @@ -29,7 +29,7 @@ import="java.lang.management.GarbageCollectorMXBean" import="org.apache.hadoop.hbase.util.JSONMetricUtil" import="org.apache.hadoop.hbase.procedure2.util.StringUtils" - import="org.codehaus.jackson.JsonNode" + import="com.fasterxml.jackson.databind.JsonNode" %> <% RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean(); diff --git a/hbase-server/src/main/resources/hbase-webapps/regionserver/processRS.jsp b/hbase-server/src/main/resources/hbase-webapps/regionserver/processRS.jsp index cc18d5bb1f..f0df0c0ad9 100644 --- a/hbase-server/src/main/resources/hbase-webapps/regionserver/processRS.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/regionserver/processRS.jsp @@ -29,7 +29,7 @@ import="java.lang.management.GarbageCollectorMXBean" import="org.apache.hadoop.hbase.util.JSONMetricUtil" import="org.apache.hadoop.hbase.procedure2.util.StringUtils" - import="org.codehaus.jackson.JsonNode" + import="com.fasterxml.jackson.databind.JsonNode" %> <% RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java index ee5a364232..dab867327f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java @@ -23,6 +23,8 @@ import java.io.IOException; import java.util.Map; import java.util.NavigableSet; +import com.fasterxml.jackson.core.JsonGenerationException; +import com.fasterxml.jackson.databind.JsonMappingException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -32,8 +34,6 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.io.hfile.TestCacheConfig.DataCacheEntry; import org.apache.hadoop.hbase.io.hfile.TestCacheConfig.IndexCacheEntry; -import org.codehaus.jackson.JsonGenerationException; -import org.codehaus.jackson.map.JsonMappingException; import org.junit.After; import org.junit.Before; import org.junit.Test; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java index 30da26aa19..1135039100 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java @@ -22,6 +22,7 @@ import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; import java.util.Hashtable; import java.util.List; +import java.util.Map; import javax.management.MalformedObjectNameException; import javax.management.ObjectName; @@ -29,13 +30,14 @@ import javax.management.openmbean.CompositeData; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.codehaus.jackson.JsonNode; -import org.codehaus.jackson.JsonProcessingException; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -51,17 +53,14 @@ public class TestJSONMetricUtil { String[] values = {"MemoryPool", "Par Eden Space"}; String[] values2 = {"MemoryPool", "Par Eden Space", "Test"}; String[] emptyValue = {}; - Hashtable properties = JSONMetricUtil.buldKeyValueTable(keys, values); - Hashtable nullObject = JSONMetricUtil.buldKeyValueTable(keys, values2); - Hashtable nullObject1 = JSONMetricUtil.buldKeyValueTable(keys, emptyValue); - Hashtable nullObject2 = JSONMetricUtil.buldKeyValueTable(emptyKey, values2); - Hashtable nullObject3 = JSONMetricUtil.buldKeyValueTable(emptyKey, emptyValue); - assertEquals(properties.get("type"), values[0]); - assertEquals(properties.get("name"), values[1]); - assertEquals(nullObject, null); - assertEquals(nullObject1, null); - assertEquals(nullObject2, null); - assertEquals(nullObject3, null); + Map properties = JSONMetricUtil.buldKeyValueTable(keys, values); + assertEquals(values[0], properties.get("type")); + assertEquals(values[1], properties.get("name")); + + assertNull(JSONMetricUtil.buldKeyValueTable(keys, values2)); + assertNull(JSONMetricUtil.buldKeyValueTable(keys, emptyValue)); + assertNull(JSONMetricUtil.buldKeyValueTable(emptyKey, values2)); + assertNull(JSONMetricUtil.buldKeyValueTable(emptyKey, emptyValue)); } @Test @@ -73,10 +72,10 @@ public class TestJSONMetricUtil { JsonNode r2 = JSONMetricUtil.searchJson(node, "data2"); JsonNode r3 = JSONMetricUtil.searchJson(node, "data3"); JsonNode r4 = JSONMetricUtil.searchJson(node, "data4"); - assertEquals(r1.getIntValue(), 100); - assertEquals(r2.getTextValue(), "hello"); - assertEquals(r3.get(0).getIntValue(), 1); - assertEquals(r4.getIntValue(), 0); + assertEquals(100, r1.intValue()); + assertEquals("hello", r2.textValue()); + assertEquals(1, r3.get(0).intValue()); + assertEquals(0, r4.intValue()); } @Test diff --git a/hbase-shaded/hbase-shaded-mapreduce/pom.xml b/hbase-shaded/hbase-shaded-mapreduce/pom.xml index 2afa3c2ecd..1f2a2bf1ec 100644 --- a/hbase-shaded/hbase-shaded-mapreduce/pom.xml +++ b/hbase-shaded/hbase-shaded-mapreduce/pom.xml @@ -145,10 +145,6 @@ org.glassfish.jersey.containers jersey-container-servlet-core - - org.glassfish.jersey.media - jersey-media-json-jackson1 - diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml index 41049469f9..7f92901cac 100644 --- a/hbase-shaded/pom.xml +++ b/hbase-shaded/pom.xml @@ -176,6 +176,10 @@ com.dropwizard org.apache.hadoop.hbase.shaded.com.dropwizard + + com.fasterxml + org.apache.hadoop.hbase.shaded.com.fasterxml + diff --git a/hbase-shell/src/main/ruby/hbase/taskmonitor.rb b/hbase-shell/src/main/ruby/hbase/taskmonitor.rb index 849752b6af..15804864be 100644 --- a/hbase-shell/src/main/ruby/hbase/taskmonitor.rb +++ b/hbase-shell/src/main/ruby/hbase/taskmonitor.rb @@ -74,7 +74,7 @@ module Hbase # Returns a filtered list of tasks on the given host def tasksOnHost(filter, host) java_import 'java.net.URL' - java_import 'org.codehaus.jackson.map.ObjectMapper' + java_import 'com.fasterxml.jackson.databind.ObjectMapper' infoport = @admin.getClusterStatus.getLoad(host).getInfoServerPort.to_s diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml index 320104b070..b4607b82c3 100644 --- a/hbase-spark/pom.xml +++ b/hbase-spark/pom.xml @@ -123,6 +123,21 @@ 3.1.4 test + + com.fasterxml.jackson.module + jackson-module-scala_${scala.binary.version} + ${jackson.version} + + + org.scala-lang + scala-library + + + org.scala-lang + scala-reflect + + + org.apache.hadoop hadoop-client diff --git a/pom.xml b/pom.xml index faca5113a4..944670af2e 100755 --- a/pom.xml +++ b/pom.xml @@ -1399,11 +1399,10 @@ 4.5.3 4.4.6 3.2.1 - 2.23.2 + 2.9.1 2.2.12 9.4.6.v20170531 9.2.19.v20160908 - 1.9.13 3.1.0 2.0.1 2.25.1 @@ -1871,28 +1870,20 @@ joni ${joni.version} - - org.codehaus.jackson - jackson-core-asl - ${jackson1.version} + com.fasterxml.jackson.jaxrs + jackson-jaxrs-json-provider + ${jackson.version} - org.codehaus.jackson - jackson-mapper-asl - ${jackson1.version} + com.fasterxml.jackson.core + jackson-databind + ${jackson.version} - org.codehaus.jackson - jackson-jaxrs - ${jackson1.version} - - - org.codehaus.jackson - jackson-xc - ${jackson1.version} + com.fasterxml.jackson.core + jackson-annotations + ${jackson.version} org.jamon @@ -1981,11 +1972,6 @@ jersey-client ${jersey.version} - - org.glassfish.jersey.media - jersey-media-json-jackson1 - ${jackson.version} - org.glassfish.web @@ -2394,6 +2380,14 @@ javax.inject javax.inject + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-mapper-asl + @@ -2452,6 +2446,14 @@ xerces xercesImpl + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-mapper-asl + ${hadoop-two.version} @@ -2482,6 +2484,14 @@ xerces xercesImpl + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-mapper-asl + @@ -2522,6 +2532,14 @@ junit junit + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-mapper-asl + @@ -2608,6 +2626,14 @@ javax.inject javax.inject + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-mapper-asl + @@ -2654,6 +2680,14 @@ xerces xercesImpl + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-mapper-asl + ${hadoop-three.version} @@ -2680,6 +2714,14 @@ xerces xercesImpl + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-mapper-asl + @@ -2726,6 +2768,14 @@ junit junit + + org.codehause.jackson + jackson-core-asl + + + org.codehause.jackson + jackson-mapper-asl + -- 2.14.1