diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java index 0460b0a..2c398a9 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java @@ -19,9 +19,11 @@ package org.apache.hadoop.hbase.rest; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.HelpFormatter; @@ -35,13 +37,16 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; +import org.apache.hadoop.hbase.http.HttpServer; import org.apache.hadoop.hbase.http.InfoServer; import org.apache.hadoop.hbase.rest.filter.AuthFilter; +import org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.util.DNS; import org.apache.hadoop.hbase.util.HttpServerUtil; import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.hbase.util.VersionInfo; +import org.apache.hadoop.util.StringUtils; import org.mortbay.jetty.Connector; import org.mortbay.jetty.Server; import org.mortbay.jetty.nio.SelectChannelConnector; @@ -67,6 +72,13 @@ import com.sun.jersey.spi.container.servlet.ServletContainer; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) public class RESTServer implements Constants { + static String REST_CSRF_ENABLED_KEY = "hbase.rest.csrf.enabled"; + static boolean REST_CSRF_ENABLED_DEFAULT = false; + static String REST_CSRF_CUSTOM_HEADER_KEY ="hbase.rest-csrf.custom-header"; + static String REST_CSRF_CUSTOM_HEADER_DEFAULT = "X-XSRF-HEADER"; + static String REST_CSRF_METHODS_TO_IGNORE_KEY = "hbase.rest-csrf.methods-to-ignore"; + static String REST_CSRF_METHODS_TO_IGNORE_DEFAULT = "GET,OPTIONS,HEAD,TRACE"; + private static void printUsageAndExit(Options options, int exitCode) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("bin/hbase rest start", "", options, @@ -76,6 +88,38 @@ public class RESTServer implements Constants { } /** + * Returns a list of strings from a comma-delimited configuration value. + * + * @param conf configuration to check + * @param name configuration property name + * @param defaultValue default value if no value found for name + * @return list of strings from comma-delimited configuration value, or an + * empty list if not found + */ + private static List getTrimmedStringList(Configuration conf, + String name, String defaultValue) { + String valueString = conf.get(name, defaultValue); + if (valueString == null) { + return new ArrayList<>(); + } + return new ArrayList<>(StringUtils.getTrimmedStringCollection(valueString)); + } + + static void addCSRFFilter(Context context, Configuration conf) { + if (conf.getBoolean(REST_CSRF_ENABLED_KEY, REST_CSRF_ENABLED_DEFAULT)) { + String[] urls = { "/*" }; + Set restCsrfMethodsToIgnore = new HashSet<>(); + restCsrfMethodsToIgnore.addAll(getTrimmedStringList(conf, + REST_CSRF_METHODS_TO_IGNORE_KEY, REST_CSRF_METHODS_TO_IGNORE_DEFAULT)); + Map restCsrfParams = RestCsrfPreventionFilter + .getFilterParams(conf, "hbase.rest-csrf."); + HttpServer.defineFilter(context, "csrf", RestCsrfPreventionFilter.class.getName(), + restCsrfParams, urls); + // context.addFilter(RestCsrfPreventionFilter.class.getName(), "/*", 0); + } + } + + /** * The main method for the HBase rest server. * @param args command-line arguments * @throws Exception exception @@ -234,6 +278,7 @@ public class RESTServer implements Constants { filter = filter.trim(); context.addFilter(Class.forName(filter), "/*", 0); } + addCSRFFilter(context, conf); HttpServerUtil.constrainHttpMethods(context); // Put up info server. diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java index ebedf57..142c276 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java @@ -375,13 +375,27 @@ public class Client { /** * Send a PUT request - * @param cluster the cluster definition * @param path the path or URI * @param contentType the content MIME type * @param content the content bytes + * @param extraHdr extra Header to send * @return a Response object with response detail * @throws IOException */ + public Response put(String path, String contentType, byte[] content, Header extraHdr) + throws IOException { + return put(cluster, path, contentType, content, extraHdr); + } + + /** + * Send a PUT request + * @param cluster the cluster definition + * @param path the path or URI + * @param contentType the content MIME type + * @param content the content bytes + * @return a Response object with response detail + * @throws IOException for error + */ public Response put(Cluster cluster, String path, String contentType, byte[] content) throws IOException { Header[] headers = new Header[1]; @@ -391,6 +405,27 @@ public class Client { /** * Send a PUT request + * @param cluster the cluster definition + * @param path the path or URI + * @param contentType the content MIME type + * @param content the content bytes + * @param extraHdr additional Header to send + * @return a Response object with response detail + * @throws IOException for error + */ + public Response put(Cluster cluster, String path, String contentType, + byte[] content, Header extraHdr) throws IOException { + int cnt = extraHdr == null ? 1 : 2; + Header[] headers = new Header[cnt]; + headers[0] = new Header("Content-Type", contentType); + if (extraHdr != null) { + headers[1] = extraHdr; + } + return put(cluster, path, headers, content); + } + + /** + * Send a PUT request * @param path the path or URI * @param headers the HTTP headers to include, Content-Type must be * supplied @@ -442,13 +477,27 @@ public class Client { /** * Send a POST request - * @param cluster the cluster definition * @param path the path or URI * @param contentType the content MIME type * @param content the content bytes + * @param extraHdr additional Header to send * @return a Response object with response detail * @throws IOException */ + public Response post(String path, String contentType, byte[] content, Header extraHdr) + throws IOException { + return post(cluster, path, contentType, content, extraHdr); + } + + /** + * Send a POST request + * @param cluster the cluster definition + * @param path the path or URI + * @param contentType the content MIME type + * @param content the content bytes + * @return a Response object with response detail + * @throws IOException for error + */ public Response post(Cluster cluster, String path, String contentType, byte[] content) throws IOException { Header[] headers = new Header[1]; @@ -458,6 +507,27 @@ public class Client { /** * Send a POST request + * @param cluster the cluster definition + * @param path the path or URI + * @param contentType the content MIME type + * @param content the content bytes + * @param extraHdr additional Header to send + * @return a Response object with response detail + * @throws IOException for error + */ + public Response post(Cluster cluster, String path, String contentType, + byte[] content, Header extraHdr) throws IOException { + int cnt = extraHdr == null ? 1 : 2; + Header[] headers = new Header[cnt]; + headers[0] = new Header("Content-Type", contentType); + if (extraHdr != null) { + headers[1] = extraHdr; + } + return post(cluster, path, headers, content); + } + + /** + * Send a POST request * @param path the path or URI * @param headers the HTTP headers to include, Content-Type must be * supplied @@ -506,11 +576,22 @@ public class Client { /** * Send a DELETE request - * @param cluster the cluster definition * @param path the path or URI + * @param extraHdr additional Header to send * @return a Response object with response detail * @throws IOException */ + public Response delete(String path, Header extraHdr) throws IOException { + return delete(cluster, path, extraHdr); + } + + /** + * Send a DELETE request + * @param cluster the cluster definition + * @param path the path or URI + * @return a Response object with response detail + * @throws IOException for error + */ public Response delete(Cluster cluster, String path) throws IOException { DeleteMethod method = new DeleteMethod(); try { @@ -522,4 +603,24 @@ public class Client { method.releaseConnection(); } } + + /** + * Send a DELETE request + * @param cluster the cluster definition + * @param path the path or URI + * @return a Response object with response detail + * @throws IOException for error + */ + public Response delete(Cluster cluster, String path, Header extraHdr) throws IOException { + DeleteMethod method = new DeleteMethod(); + try { + Header[] headers = { extraHdr }; + int code = execute(cluster, method, headers, path); + headers = method.getResponseHeaders(); + byte[] content = method.getResponseBody(); + return new Response(code, headers, content); + } finally { + method.releaseConnection(); + } + } } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java index 628b17c..17aecf3 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java @@ -75,6 +75,7 @@ public class HBaseRESTTestingUtility { filter = filter.trim(); context.addFilter(Class.forName(filter), "/*", 0); } + RESTServer.addCSRFFilter(context, conf); HttpServerUtil.constrainHttpMethods(context); LOG.info("Loaded filter classes :" + filterClasses); // start the server diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java index 17bb733..d005445 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java @@ -21,10 +21,15 @@ package org.apache.hadoop.hbase.rest; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.StringWriter; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; +import org.apache.commons.httpclient.Header; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -45,8 +50,11 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; @Category({RestTests.class, MediumTests.class}) +@RunWith(Parameterized.class) public class TestSchemaResource { private static String TABLE1 = "TestSchemaResource1"; private static String TABLE2 = "TestSchemaResource2"; @@ -58,10 +66,27 @@ public class TestSchemaResource { private static JAXBContext context; private static Configuration conf; private static TestTableSchemaModel testTableSchemaModel; + private static Header extraHdr = null; + + private static boolean csrfEnabled = true; + + @Parameterized.Parameters + public static Collection data() { + List params = new ArrayList(); + params.add(new Object[] {Boolean.TRUE}); + params.add(new Object[] {Boolean.FALSE}); + return params; + } + + public TestSchemaResource(Boolean csrf) { + csrfEnabled = csrf; + } @BeforeClass public static void setUpBeforeClass() throws Exception { conf = TEST_UTIL.getConfiguration(); + conf.setBoolean(RESTServer.REST_CSRF_ENABLED_KEY, csrfEnabled); + extraHdr = new Header(RESTServer.REST_CSRF_CUSTOM_HEADER_DEFAULT, ""); TEST_UTIL.startMiniCluster(); REST_TEST_UTIL.startServletContainer(conf); client = new Client(new Cluster().add("localhost", @@ -102,12 +127,18 @@ public class TestSchemaResource { // create the table model = testTableSchemaModel.buildTestModel(TABLE1); testTableSchemaModel.checkModel(model, TABLE1); - response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model)); + if (csrfEnabled) { + // test put operation is forbidden without custom header + response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model)); + assertEquals(response.getCode(), 400); + } + + response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model), extraHdr); assertEquals(response.getCode(), 201); // recall the same put operation but in read-only mode conf.set("hbase.rest.readonly", "true"); - response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model)); + response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model), extraHdr); assertEquals(response.getCode(), 403); // retrieve the schema and validate it @@ -124,15 +155,21 @@ public class TestSchemaResource { model = testTableSchemaModel.fromJSON(Bytes.toString(response.getBody())); testTableSchemaModel.checkModel(model, TABLE1); + if (csrfEnabled) { + // test delete schema operation is forbidden without custom header + response = client.delete(schemaPath); + assertEquals(400, response.getCode()); + } + // test delete schema operation is forbidden in read-only mode - response = client.delete(schemaPath); + response = client.delete(schemaPath, extraHdr); assertEquals(response.getCode(), 403); // return read-only setting back to default conf.set("hbase.rest.readonly", "false"); // delete the table and make sure HBase concurs - response = client.delete(schemaPath); + response = client.delete(schemaPath, extraHdr); assertEquals(response.getCode(), 200); assertFalse(admin.tableExists(TableName.valueOf(TABLE1))); } @@ -149,14 +186,21 @@ public class TestSchemaResource { // create the table model = testTableSchemaModel.buildTestModel(TABLE2); testTableSchemaModel.checkModel(model, TABLE2); + + if (csrfEnabled) { + // test put operation is forbidden without custom header + response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); + assertEquals(response.getCode(), 400); + } response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, - model.createProtobufOutput()); + model.createProtobufOutput(), extraHdr); assertEquals(response.getCode(), 201); // recall the same put operation but in read-only mode conf.set("hbase.rest.readonly", "true"); response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, - model.createProtobufOutput()); + model.createProtobufOutput(), extraHdr); + assertNotNull(extraHdr); assertEquals(response.getCode(), 403); // retrieve the schema and validate it @@ -175,15 +219,21 @@ public class TestSchemaResource { model.getObjectFromMessage(response.getBody()); testTableSchemaModel.checkModel(model, TABLE2); + if (csrfEnabled) { + // test delete schema operation is forbidden without custom header + response = client.delete(schemaPath); + assertEquals(400, response.getCode()); + } + // test delete schema operation is forbidden in read-only mode - response = client.delete(schemaPath); + response = client.delete(schemaPath, extraHdr); assertEquals(response.getCode(), 403); // return read-only setting back to default conf.set("hbase.rest.readonly", "false"); // delete the table and make sure HBase concurs - response = client.delete(schemaPath); + response = client.delete(schemaPath, extraHdr); assertEquals(response.getCode(), 200); assertFalse(admin.tableExists(TableName.valueOf(TABLE2))); } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java new file mode 100644 index 0000000..3e9335c --- /dev/null +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java @@ -0,0 +1,151 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.rest.filter; + +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This filter provides protection against cross site request forgery (CSRF) + * attacks for REST APIs. Enabling this filter on an endpoint results in the + * requirement of all client to send a particular (configurable) HTTP header + * with every request. In the absense of this header the filter will reject the + * attempt as a bad request. + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class RestCsrfPreventionFilter implements Filter { + + private static final Logger LOG = + LoggerFactory.getLogger(RestCsrfPreventionFilter.class); + + public static final String CUSTOM_HEADER_PARAM = "custom-header"; + public static final String CUSTOM_METHODS_TO_IGNORE_PARAM = + "methods-to-ignore"; + static final String HEADER_DEFAULT = "X-XSRF-HEADER"; + static final String METHODS_TO_IGNORE_DEFAULT = "GET,OPTIONS,HEAD,TRACE"; + private String headerName = HEADER_DEFAULT; + private Set methodsToIgnore = null; + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + String customHeader = filterConfig.getInitParameter(CUSTOM_HEADER_PARAM); + if (customHeader != null) { + headerName = customHeader; + } + String customMethodsToIgnore = + filterConfig.getInitParameter(CUSTOM_METHODS_TO_IGNORE_PARAM); + if (customMethodsToIgnore != null) { + parseMethodsToIgnore(customMethodsToIgnore); + } else { + parseMethodsToIgnore(METHODS_TO_IGNORE_DEFAULT); + } + LOG.info("Adding cross-site request forgery (CSRF) protection, " + + "headerName = {}, methodsToIgnore = {}", headerName, methodsToIgnore); + } + + void parseMethodsToIgnore(String mti) { + String[] methods = mti.split(","); + methodsToIgnore = new HashSet(); + for (int i = 0; i < methods.length; i++) { + methodsToIgnore.add(methods[i]); + } + } + + /** + * Returns the configured custom header name. + * + * @return custom header name + */ + public String getHeaderName() { + return headerName; + } + + /** + * Returns whether or not the request is allowed. + * + * @param method HTTP Method + * @param header value of HTTP header defined by {@link #getHeaderName()} + * @return true if the request is allowed, otherwise false + */ + public boolean isRequestAllowed(String method, String header) { + return methodsToIgnore.contains(method) || header != null; + } + + @Override + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain chain) throws IOException, ServletException { + HttpServletRequest httpRequest = (HttpServletRequest)request; + if (isRequestAllowed(httpRequest.getMethod(), + httpRequest.getHeader(headerName))) { + chain.doFilter(request, response); + } else { + ((HttpServletResponse)response).sendError( + HttpServletResponse.SC_BAD_REQUEST, + "Missing Required Header for Vulnerability Protection"); + } + } + + @Override + public void destroy() { + } + + /** + * Constructs a mapping of configuration properties to be used for filter + * initialization. The mapping includes all properties that start with the + * specified configuration prefix. Property names in the mapping are trimmed + * to remove the configuration prefix. + * + * @param conf configuration to read + * @param confPrefix configuration prefix + * @return mapping of configuration properties to be used for filter + * initialization + */ + public static Map getFilterParams(Configuration conf, + String confPrefix) { + Map filterConfigMap = new HashMap<>(); + for (Map.Entry entry : conf) { + String name = entry.getKey(); + if (name.startsWith(confPrefix)) { + String value = conf.get(name); + name = name.substring(confPrefix.length()); + filterConfigMap.put(name, value); + } + } + return filterConfigMap; + } +}