diff --git src/main/java/org/apache/hadoop/hbase/rest/Constants.java src/main/java/org/apache/hadoop/hbase/rest/Constants.java index 21d76fe..f40eca9 100644 --- src/main/java/org/apache/hadoop/hbase/rest/Constants.java +++ src/main/java/org/apache/hadoop/hbase/rest/Constants.java @@ -39,4 +39,5 @@ public interface Constants { public static final String MIMETYPE_JSON = "application/json"; public static final String CRLF = "\r\n"; + public static final String FILTER_CLASSES = "hbase.rest.filter.classes"; } diff --git src/main/java/org/apache/hadoop/hbase/rest/Main.java src/main/java/org/apache/hadoop/hbase/rest/Main.java index ca3edc5..20d2823 100644 --- src/main/java/org/apache/hadoop/hbase/rest/Main.java +++ src/main/java/org/apache/hadoop/hbase/rest/Main.java @@ -26,6 +26,7 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.ParseException; +import org.apache.commons.lang.ArrayUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -197,6 +198,14 @@ public class Main implements Constants { context.addServlet(sh, "/*"); context.addFilter(GzipFilter.class, "/*", 0); + //Load filters from configuration. + String[] filterClasses = servlet.getConfiguration().getStrings(FILTER_CLASSES, + ArrayUtils.EMPTY_STRING_ARRAY); + for (String filter : filterClasses) { + filter = filter.trim(); + context.addFilter(Class.forName(filter), "/*", 0); + } + // Put up info server. int port = conf.getInt("hbase.rest.info.port", 8085); if (port >= 0) { diff --git src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java new file mode 100644 index 0000000..ef53f46 --- /dev/null +++ src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.rest; + +import java.io.IOException; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +public class DummyFilter implements Filter { + private Log LOG = LogFactory.getLog(getClass()); + + @Override + public void destroy() { + } + + @Override + public void doFilter(ServletRequest paramServletRequest, ServletResponse paramServletResponse, + FilterChain paramFilterChain) throws IOException, ServletException { + if (paramServletRequest instanceof HttpServletRequest + && paramServletResponse instanceof HttpServletResponse) { + HttpServletRequest request = (HttpServletRequest) paramServletRequest; + HttpServletResponse response = (HttpServletResponse) paramServletResponse; + + String path = request.getRequestURI(); + LOG.info(path); + if (path.indexOf("/status/cluster") >= 0) { + LOG.info("Blocking cluster status request"); + response.sendError(HttpServletResponse.SC_NOT_FOUND, "Cluster status cannot be requested."); + } else { + paramFilterChain.doFilter(request, response); + } + } + } + + @Override + public void init(FilterConfig filterChain) throws ServletException { + } + +} diff --git src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java index 6b723be..5e504e9 100644 --- src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java +++ src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java @@ -19,6 +19,7 @@ */ package org.apache.hadoop.hbase.rest; +import org.apache.commons.lang.ArrayUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -68,6 +69,14 @@ public class HBaseRESTTestingUtility { Context context = new Context(server, "/", Context.SESSIONS); context.addServlet(sh, "/*"); context.addFilter(GzipFilter.class, "/*", 0); + + //Load filters specified from configuration. + String[] filterClasses = conf.getStrings(Constants.FILTER_CLASSES, + ArrayUtils.EMPTY_STRING_ARRAY); + for (String filter : filterClasses) { + filter = filter.trim(); + context.addFilter(Class.forName(filter), "/*", 0); + } // start the server server.start(); // get the port diff --git src/test/java/org/apache/hadoop/hbase/rest/TestResourceFilter.java src/test/java/org/apache/hadoop/hbase/rest/TestResourceFilter.java new file mode 100644 index 0000000..49bbf22 --- /dev/null +++ src/test/java/org/apache/hadoop/hbase/rest/TestResourceFilter.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.rest; + +import static org.junit.Assert.assertEquals; + +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.rest.client.Client; +import org.apache.hadoop.hbase.rest.client.Cluster; +import org.apache.hadoop.hbase.rest.client.Response; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(SmallTests.class) +public class TestResourceFilter { + + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static final HBaseRESTTestingUtility REST_TEST_UTIL = + new HBaseRESTTestingUtility(); + private static Client client; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + TEST_UTIL.getConfiguration().set(Constants.FILTER_CLASSES, DummyFilter.class.getName()); + TEST_UTIL.startMiniCluster(); + REST_TEST_UTIL.startServletContainer(TEST_UTIL.getConfiguration()); + client = new Client(new Cluster().add("localhost", + REST_TEST_UTIL.getServletPort())); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + REST_TEST_UTIL.shutdownServletContainer(); + TEST_UTIL.shutdownMiniCluster(); + } + + @Test + public void testFilter() throws Exception { + String path = "/status/cluster"; + Response response = client.get(path); + assertEquals(404, response.getCode()); + } +}