From 3f7449835e52c215f84d7f1d43ca70ba0711b0d7 Mon Sep 17 00:00:00 2001 From: Balazs Meszaros Date: Mon, 30 Jan 2017 09:16:53 -0800 Subject: [PATCH] HBASE-12894 Upgrade Jetty to 9.2.6 - removed Jetty 6.x dependencies (org.mortbay.jetty.*) - corrected @Ignore-d unit tests --- .../apache/hadoop/hbase/rest/client/Response.java | 4 +- .../hadoop/hbase/http/AdminAuthorizedServlet.java | 2 +- .../org/apache/hadoop/hbase/http/HttpServer.java | 18 +++---- .../hbase/http/SslSocketConnectorSecure.java | 58 ---------------------- .../org/apache/hadoop/hbase/master/HMaster.java | 18 ++++--- .../regionserver/DumpReplicationQueues.java | 32 ++++++++---- .../hbase/client/locking/TestEntityLocks.java | 6 +-- .../hbase/http/HttpServerFunctionalTest.java | 2 +- .../apache/hadoop/hbase/http/TestHttpServer.java | 35 ++++++------- .../hadoop/hbase/http/TestServletFilter.java | 2 +- .../hadoop/hbase/http/conf/TestConfServlet.java | 2 +- .../hadoop/hbase/http/resource/JerseyResource.java | 2 +- 12 files changed, 66 insertions(+), 115 deletions(-) delete mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/http/SslSocketConnectorSecure.java diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java index 27db365..87a9d9a 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java @@ -24,9 +24,9 @@ import java.io.InputStream; import org.apache.http.Header; import org.apache.http.HttpResponse; +import org.eclipse.jetty.util.log.Log; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.mortbay.log.Log; /** * The HTTP result code, response headers, and body of a HTTP response. @@ -139,7 +139,7 @@ public class Response { try { body = Client.getResponseBody(resp); } catch (IOException ioe) { - Log.debug("encountered ioe when obtaining body", ioe); + Log.getLog().debug("encountered ioe when obtaining body", ioe); } } return body; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java index 552cdf3..dc1f4ec 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java @@ -25,7 +25,7 @@ import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.mortbay.jetty.servlet.DefaultServlet; +import org.eclipse.jetty.servlet.DefaultServlet; /** * General servlet which is admin-authorized. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index be8e98f..3ce2f09 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -534,22 +534,23 @@ public class HttpServer implements FilterContainer { Preconditions.checkNotNull(webAppContext); + HandlerCollection handlerCollection = new HandlerCollection(); + ContextHandlerCollection contexts = new ContextHandlerCollection(); RequestLog requestLog = HttpRequestLog.getRequestLog(name); if (requestLog != null) { RequestLogHandler requestLogHandler = new RequestLogHandler(); requestLogHandler.setRequestLog(requestLog); - HandlerCollection handlers = new HandlerCollection(); - handlers.setHandlers(new Handler[] { requestLogHandler, contexts }); - webServer.setHandler(handlers); - } else { - webServer.setHandler(contexts); + handlerCollection.addHandler(requestLogHandler); } final String appDir = getWebAppsPath(name); - webServer.setHandler(webAppContext); + handlerCollection.addHandler(contexts); + handlerCollection.addHandler(webAppContext); + + webServer.setHandler(handlerCollection); addDefaultApps(contexts, appDir, conf); @@ -629,14 +630,13 @@ public class HttpServer implements FilterContainer { logDir = System.getProperty("hadoop.log.dir"); } if (logDir != null) { - ServletContextHandler logContext = new ServletContextHandler(parent, "/*"); + ServletContextHandler logContext = new ServletContextHandler(parent, "/logs"); logContext.addServlet(AdminAuthorizedServlet.class, "/*"); logContext.setResourceBase(logDir); if (conf.getBoolean( ServerConfigurationKeys.HBASE_JETTY_LOGS_SERVE_ALIASES, ServerConfigurationKeys.DEFAULT_HBASE_JETTY_LOGS_SERVE_ALIASES)) { - @SuppressWarnings("unchecked") Map params = logContext.getInitParams(); params.put( "org.mortbay.jetty.servlet.Default.aliases", "true"); @@ -1260,7 +1260,6 @@ public class HttpServer implements FilterContainer { /** * Return the set of parameter names, quoting each name. */ - @SuppressWarnings("unchecked") @Override public Enumeration getParameterNames() { return new Enumeration() { @@ -1301,7 +1300,6 @@ public class HttpServer implements FilterContainer { return result; } - @SuppressWarnings("unchecked") @Override public Map getParameterMap() { Map result = new HashMap(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/SslSocketConnectorSecure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/SslSocketConnectorSecure.java deleted file mode 100644 index 2d78a3f..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/SslSocketConnectorSecure.java +++ /dev/null @@ -1,58 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.http; - -import org.mortbay.jetty.security.SslSocketConnector; - -import javax.net.ssl.SSLServerSocket; -import java.io.IOException; -import java.net.ServerSocket; -import java.util.ArrayList; - -/** - * This subclass of the Jetty SslSocketConnector exists solely to control - * the TLS protocol versions allowed. This is fallout from the POODLE - * vulnerability (CVE-2014-3566), which requires that SSLv3 be disabled. - * Only TLS 1.0 and later protocols are allowed. - */ -public class SslSocketConnectorSecure extends SslSocketConnector { - - public SslSocketConnectorSecure() { - super(); - } - - /** - * Create a new ServerSocket that will not accept SSLv3 connections, - * but will accept TLSv1.x connections. - */ - protected ServerSocket newServerSocket(String host, int port,int backlog) - throws IOException { - SSLServerSocket socket = (SSLServerSocket) - super.newServerSocket(host, port, backlog); - ArrayList nonSSLProtocols = new ArrayList(); - for (String p : socket.getEnabledProtocols()) { - if (!p.contains("SSLv3")) { - nonSSLProtocols.add(p); - } - } - socket.setEnabledProtocols(nonSSLProtocols.toArray( - new String[nonSSLProtocols.size()])); - return socket; - } -} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 04c9b43..8a4820a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -175,9 +175,10 @@ import org.apache.hadoop.hbase.zookeeper.ZKClusterId; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import org.mortbay.jetty.Connector; -import org.mortbay.jetty.nio.SelectChannelConnector; -import org.mortbay.jetty.servlet.Context; +import org.eclipse.jetty.server.Connector; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.webapp.WebAppContext; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; @@ -385,7 +386,7 @@ public class HMaster extends HRegionServer implements MasterServices { private FavoredNodesManager favoredNodesManager; /** jetty server for master to redirect requests to regionserver infoServer */ - private org.mortbay.jetty.Server masterJettyServer; + private Server masterJettyServer; public static class RedirectServlet extends HttpServlet { private static final long serialVersionUID = 2894774810058302472L; @@ -517,14 +518,17 @@ public class HMaster extends HRegionServer implements MasterServices { if(RedirectServlet.regionServerInfoPort == infoPort) { return infoPort; } - masterJettyServer = new org.mortbay.jetty.Server(); - Connector connector = new SelectChannelConnector(); + masterJettyServer = new Server(); + ServerConnector connector = new ServerConnector(masterJettyServer); connector.setHost(addr); connector.setPort(infoPort); masterJettyServer.addConnector(connector); masterJettyServer.setStopAtShutdown(true); - Context context = new Context(masterJettyServer, "/", Context.NO_SESSIONS); + + WebAppContext context = new WebAppContext(null, "/", null, null, null, null, WebAppContext.NO_SESSIONS); context.addServlet(RedirectServlet.class, "/*"); + context.setServer(masterJettyServer); + try { masterJettyServer.start(); } catch (Exception e) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java index 4502141..766b551 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java @@ -17,35 +17,49 @@ */ package org.apache.hadoop.hbase.replication.regionserver; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.Abortable; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; import org.apache.hadoop.hbase.io.WALLink; import org.apache.hadoop.hbase.procedure2.util.StringUtils; -import org.apache.hadoop.hbase.replication.*; -import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.hadoop.hbase.replication.ReplicationException; +import org.apache.hadoop.hbase.replication.ReplicationFactory; +import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; +import org.apache.hadoop.hbase.replication.ReplicationPeers; +import org.apache.hadoop.hbase.replication.ReplicationQueueInfo; +import org.apache.hadoop.hbase.replication.ReplicationQueues; +import org.apache.hadoop.hbase.replication.ReplicationQueuesClient; +import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments; +import org.apache.hadoop.hbase.replication.ReplicationTracker; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.zookeeper.KeeperException; -import org.mortbay.util.IO; import com.google.common.util.concurrent.AtomicLongMap; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.util.*; - /** * Provides information about the existing states of replication, replication peers and queues. * diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java index aa87d82..769ee06 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java @@ -29,12 +29,12 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.*; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Threads; +import org.eclipse.jetty.util.log.Log; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; -import org.mortbay.log.Log; import static org.mockito.Mockito.*; import static org.junit.Assert.*; @@ -80,13 +80,13 @@ public class TestEntityLocks { private boolean waitLockTimeOut(EntityLock lock, long maxWaitTimeMillis) { long startMillis = System.currentTimeMillis(); while (lock.isLocked()) { - Log.info("Sleeping..."); + Log.getLog().info("Sleeping..."); Threads.sleepWithoutInterrupt(100); if (!lock.isLocked()) { return true; } if (System.currentTimeMillis() - startMillis > maxWaitTimeMillis) { - Log.info("Timedout..."); + Log.getLog().info("Timedout..."); return false; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java index a64c1e8..7d610e4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java @@ -40,7 +40,7 @@ public class HttpServerFunctionalTest extends Assert { /** JVM property for the webapp test dir : {@value} */ public static final String TEST_BUILD_WEBAPPS = "test.build.webapps"; /** expected location of the test.build.webapps dir: {@value} */ - private static final String BUILD_WEBAPPS_DIR = "build/test/webapps"; + private static final String BUILD_WEBAPPS_DIR = "src/main/resources/hbase-webapps"; /** name of the test webapp: {@value} */ private static final String TEST = "test"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java index 3a58adc..3b9e852 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java @@ -17,9 +17,6 @@ */ package org.apache.hadoop.hbase.http; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; - import java.io.IOException; import java.io.PrintWriter; import java.net.HttpURLConnection; @@ -48,22 +45,23 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; -import junit.framework.Assert; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.http.HttpServer.QuotingInputFilter.RequestQuoter; import org.apache.hadoop.hbase.http.resource.JerseyResource; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.ShellBasedUnixGroupsMapping; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.util.ajax.JSON; import org.junit.AfterClass; +import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; @@ -71,9 +69,6 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.mockito.internal.util.reflection.Whitebox; -import org.eclipse.jetty.server.ServerConnector; -import org.eclipse.jetty.util.ajax.JSON; - @Category({MiscTests.class, SmallTests.class}) public class TestHttpServer extends HttpServerFunctionalTest { private static final Log LOG = LogFactory.getLog(TestHttpServer.class); @@ -83,7 +78,6 @@ public class TestHttpServer extends HttpServerFunctionalTest { @SuppressWarnings("serial") public static class EchoMapServlet extends HttpServlet { - @SuppressWarnings("unchecked") @Override public void doGet(HttpServletRequest request, HttpServletResponse response @@ -110,7 +104,6 @@ public class TestHttpServer extends HttpServerFunctionalTest { @SuppressWarnings("serial") public static class EchoServlet extends HttpServlet { - @SuppressWarnings("unchecked") @Override public void doGet(HttpServletRequest request, HttpServletResponse response @@ -238,7 +231,6 @@ public class TestHttpServer extends HttpServerFunctionalTest { } @Test - @Ignore public void testContentTypes() throws Exception { // Static CSS files should have text/css URL cssUrl = new URL(baseUrl, "/static/test.css"); @@ -252,7 +244,7 @@ public class TestHttpServer extends HttpServerFunctionalTest { conn = (HttpURLConnection)servletUrl.openConnection(); conn.connect(); assertEquals(200, conn.getResponseCode()); - assertEquals("text/plain; charset=utf-8", conn.getContentType()); + assertEquals("text/plain;charset=utf-8", conn.getContentType()); // We should ignore parameters for mime types - ie a parameter // ending in .css should not change mime type @@ -260,21 +252,22 @@ public class TestHttpServer extends HttpServerFunctionalTest { conn = (HttpURLConnection)servletUrl.openConnection(); conn.connect(); assertEquals(200, conn.getResponseCode()); - assertEquals("text/plain; charset=utf-8", conn.getContentType()); + assertEquals("text/plain;charset=utf-8", conn.getContentType()); // Servlets that specify text/html should get that content type servletUrl = new URL(baseUrl, "/htmlcontent"); conn = (HttpURLConnection)servletUrl.openConnection(); conn.connect(); assertEquals(200, conn.getResponseCode()); - assertEquals("text/html; charset=utf-8", conn.getContentType()); + assertEquals("text/html;charset=utf-8", conn.getContentType()); // JSPs should default to text/html with utf8 - servletUrl = new URL(baseUrl, "/testjsp.jsp"); - conn = (HttpURLConnection)servletUrl.openConnection(); - conn.connect(); - assertEquals(200, conn.getResponseCode()); - assertEquals("text/html; charset=utf-8", conn.getContentType()); + // JSPs do not work from unit tests + // servletUrl = new URL(baseUrl, "/testjsp.jsp"); + // conn = (HttpURLConnection)servletUrl.openConnection(); + // conn.connect(); + // assertEquals(200, conn.getResponseCode()); + // assertEquals("text/html; charset=utf-8", conn.getContentType()); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java index cfb5ff3..1d24ec2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java @@ -183,7 +183,7 @@ public class TestServletFilter extends HttpServerFunctionalTest { http.start(); fail("expecting exception"); } catch (IOException e) { - GenericTestUtils.assertExceptionContains("Unable to initialize WebAppContext", e); + GenericTestUtils.assertExceptionContains("Problem starting http server", e); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java index 0385355..29c7381 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java @@ -29,9 +29,9 @@ import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.eclipse.jetty.util.ajax.JSON; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.mortbay.util.ajax.JSON; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java index fa0a5ec..da9519e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java @@ -32,7 +32,7 @@ import javax.ws.rs.core.Response; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; /** * A simple Jersey resource class TestHttpServer. -- 2.10.1 (Apple Git-78)