diff --git a/pom.xml b/pom.xml
index 61152ca..ca9a02b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1203,6 +1203,11 @@
jetty-util
${jetty.version}
+
+ org.mortbay.jetty
+ jetty-sslengine
+ ${jetty.version}
+
org.mortbay.jetty
jsp-2.1
@@ -1996,7 +2001,7 @@
- 0.23.3
+ 0.23.7
1.6.1
diff --git a/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureRpcEngine.java b/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureRpcEngine.java
index fd505d7..8c30d08 100644
--- a/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureRpcEngine.java
+++ b/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureRpcEngine.java
@@ -29,6 +29,8 @@ import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
import org.apache.hadoop.hbase.util.Objects;
import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import java.io.IOException;
@@ -77,6 +79,23 @@ public class SecureRpcEngine implements RpcEngine {
return this.conf;
}
+ private static String getLazyProxyUser(User ticket) throws IOException {
+ String proxyUser = null;
+ UserGroupInformation realConnTicket = ticket.getUGI().getRealUser();
+ UserGroupInformation realCurrTicket = User.getCurrent().getUGI().getRealUser();
+ User currUser = User.getCurrent();
+ //As an optimization so as to avoid sending an extra param
+ //Add when a connection that has a proxy user wants to change it
+ //Add when a connection that has no proxy wants to use one
+ if(realConnTicket != null &&
+ realConnTicket.equals(realCurrTicket) &&
+ !ticket.getName().equals(currUser.getName()) ||
+ realConnTicket == null && realCurrTicket != null) {
+ proxyUser = currUser.getName();
+ }
+ return proxyUser;
+ }
+
private static class Invoker implements InvocationHandler {
private Class extends VersionedProtocol> protocol;
private InetSocketAddress address;
@@ -101,8 +120,9 @@ public class SecureRpcEngine implements RpcEngine {
if (logDebug) {
startTime = System.currentTimeMillis();
}
+ String proxyUser = getLazyProxyUser(ticket);
HbaseObjectWritable value = (HbaseObjectWritable)
- client.call(new Invocation(method, protocol, args), address,
+ client.call(new Invocation(method, protocol, args, proxyUser), address,
protocol, ticket, rpcTimeout);
if (logDebug) {
long callTime = System.currentTimeMillis() - startTime;
@@ -162,10 +182,10 @@ public class SecureRpcEngine implements RpcEngine {
if (this.client == null) {
throw new IOException("Client must be initialized by calling setConf(Configuration)");
}
-
+ String proxyUser = getLazyProxyUser(ticket);
Invocation[] invocations = new Invocation[params.length];
for (int i = 0; i < params.length; i++) {
- invocations[i] = new Invocation(method, protocol, params[i]);
+ invocations[i] = new Invocation(method, protocol, params[i], proxyUser);
}
Writable[] wrappedValues =
@@ -288,6 +308,19 @@ public class SecureRpcEngine implements RpcEngine {
throw new IOException("Could not find requested method, the usual " +
"cause is a version mismatch between client and server.");
}
+ if (call.getProxyUser() != null) {
+ UserGroupInformation proxyUgi =
+ UserGroupInformation.createProxyUser(
+ call.getProxyUser(),
+ RequestContext.getRequestUser().getUGI().getRealUser());
+ ProxyUsers.authorize(proxyUgi,
+ RequestContext.get().getRemoteAddress().getHostAddress(),
+ conf);
+ RequestContext.set(User.create(proxyUgi),
+ RequestContext.get().getRemoteAddress(),
+ RequestContext.get().getProtocol());
+ LOG.debug("Resetting request context with new proxy user: " + proxyUgi.getUserName());
+ }
if (verbose) log("Call: " + call);
Method method =
@@ -346,4 +379,4 @@ public class SecureRpcEngine implements RpcEngine {
v = v.substring(0, 55)+"...";
LOG.info(v);
}
-}
\ No newline at end of file
+}
diff --git a/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java b/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java
index 1006570..ac28026 100644
--- a/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java
+++ b/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java
@@ -19,6 +19,8 @@
*/
package org.apache.hadoop.hbase.ipc;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.HbaseObjectWritable;
@@ -33,6 +35,8 @@ import java.lang.reflect.Method;
/** A method invocation, including the method name and its parameters.*/
public class Invocation extends VersionedWritable implements Configurable {
+ protected static final Log LOG =
+ LogFactory.getLog(Invocation.class);
protected String methodName;
@SuppressWarnings("rawtypes")
protected Class[] parameterClasses;
@@ -40,20 +44,27 @@ public class Invocation extends VersionedWritable implements Configurable {
protected Configuration conf;
private long clientVersion;
private int clientMethodsHash;
+ protected String proxyUser;
private static byte RPC_VERSION = 1;
+ private static int PROXY_BIT = 0;
+ public final static int BYPASS_CHECK_VERSION = 0x7F;
public Invocation() {}
public Invocation(Method method,
- Class extends VersionedProtocol> declaringClass, Object[] parameters) {
+ Class extends VersionedProtocol> declaringClass, Object[] parameters, String proxyUser) {
+ this.proxyUser = proxyUser;
this.methodName = method.getName();
this.parameterClasses = method.getParameterTypes();
this.parameters = parameters;
- if (declaringClass.equals(VersionedProtocol.class)) {
+ if (VersionedProtocol.class.isAssignableFrom(declaringClass)) {
//VersionedProtocol is exempted from version check.
- clientVersion = 0;
+ clientVersion = BYPASS_CHECK_VERSION;
clientMethodsHash = 0;
+ if(proxyUser != null) {
+ clientMethodsHash |= 1 << PROXY_BIT;
+ }
} else {
try {
Field versionField = declaringClass.getField("VERSION");
@@ -69,6 +80,11 @@ public class Invocation extends VersionedWritable implements Configurable {
}
}
+ public Invocation(Method method,
+ Class extends VersionedProtocol> declaringClass, Object[] parameters) {
+ this(method, declaringClass, parameters, null);
+ }
+
/** @return The name of the method invoked. */
public String getMethodName() { return methodName; }
@@ -127,6 +143,10 @@ public class Invocation extends VersionedWritable implements Configurable {
this.conf);
parameterClasses[i] = objectWritable.getDeclaredClass();
}
+ if(clientVersion == BYPASS_CHECK_VERSION &&
+ (clientMethodsHash & 1 << PROXY_BIT) != 0) {
+ proxyUser = in.readUTF();
+ }
}
public void write(DataOutput out) throws IOException {
@@ -139,6 +159,10 @@ public class Invocation extends VersionedWritable implements Configurable {
HbaseObjectWritable.writeObject(out, parameters[i], parameterClasses[i],
conf);
}
+ if(clientVersion == BYPASS_CHECK_VERSION &&
+ (clientMethodsHash & 1 << PROXY_BIT) != 0) {
+ out.writeUTF(proxyUser);
+ }
}
@Override
@@ -170,4 +194,8 @@ public class Invocation extends VersionedWritable implements Configurable {
public byte getVersion() {
return RPC_VERSION;
}
+
+ public String getProxyUser() {
+ return proxyUser;
+ }
}
diff --git a/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java b/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java
index a93fa49..768b33f 100644
--- a/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java
+++ b/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java
@@ -294,7 +294,7 @@ class WritableRpcEngine implements RpcEngine {
//Verify protocol version.
//Bypass the version check for VersionedProtocol
- if (!method.getDeclaringClass().equals(VersionedProtocol.class)) {
+ if (call.getProtocolVersion() != Invocation.BYPASS_CHECK_VERSION) {
long clientVersion = call.getProtocolVersion();
ProtocolSignature serverInfo = ((VersionedProtocol) instance)
.getProtocolSignature(protocol.getCanonicalName(), call
diff --git a/src/main/java/org/apache/hadoop/hbase/rest/Constants.java b/src/main/java/org/apache/hadoop/hbase/rest/Constants.java
index 21d76fe..f40eca9 100644
--- a/src/main/java/org/apache/hadoop/hbase/rest/Constants.java
+++ b/src/main/java/org/apache/hadoop/hbase/rest/Constants.java
@@ -39,4 +39,5 @@ public interface Constants {
public static final String MIMETYPE_JSON = "application/json";
public static final String CRLF = "\r\n";
+ public static final String FILTER_CLASSES = "hbase.rest.filter.classes";
}
diff --git a/src/main/java/org/apache/hadoop/hbase/rest/DoAsServletContainer.java b/src/main/java/org/apache/hadoop/hbase/rest/DoAsServletContainer.java
new file mode 100644
index 0000000..3316af4
--- /dev/null
+++ b/src/main/java/org/apache/hadoop/hbase/rest/DoAsServletContainer.java
@@ -0,0 +1,64 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.rest;
+
+import com.sun.jersey.spi.container.servlet.ServletContainer;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.rest.filter.AuthFilter;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+
+public class DoAsServletContainer extends ServletContainer{
+ Log LOG = LogFactory.getLog(DoAsServletContainer.class);
+
+ @Override
+ public void service(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
+ try {
+ String proxyUser = (String)request.getAttribute(AuthFilter.ATTR_USER);
+ UserGroupInformation ugi =
+ UserGroupInformation.createProxyUser(proxyUser,
+ User.getCurrent().getUGI());
+ LOG.debug("Doing as "+proxyUser);
+ ugi.doAs(new PrivilegedExceptionAction() {
+ @Override
+ public Void run() throws Exception {
+ callService(request, response);
+ return null;
+ }
+ });
+ } catch (Exception e) {
+ LOG.debug("Service call failed.", e);
+ throw new IllegalStateException(e);
+ }
+ }
+
+ public void callService(HttpServletRequest request, HttpServletResponse response) throws
+ ServletException, IOException {
+ super.service(request, response);
+ }
+}
diff --git a/src/main/java/org/apache/hadoop/hbase/rest/Main.java b/src/main/java/org/apache/hadoop/hbase/rest/Main.java
index ca3edc5..f73dd6d 100644
--- a/src/main/java/org/apache/hadoop/hbase/rest/Main.java
+++ b/src/main/java/org/apache/hadoop/hbase/rest/Main.java
@@ -30,6 +30,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.rest.filter.AuthFilter;
import org.apache.hadoop.hbase.rest.filter.GzipFilter;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.InfoServer;
@@ -45,6 +46,8 @@ import java.util.Map.Entry;
import org.mortbay.jetty.Connector;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.nio.SelectChannelConnector;
+import org.mortbay.jetty.security.SslSelectChannelConnector;
+import org.mortbay.jetty.security.SslSocketConnector;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.ServletHolder;
import org.mortbay.thread.QueuedThreadPool;
@@ -81,8 +84,10 @@ public class Main implements Constants {
VersionInfo.logVersion();
Configuration conf = HBaseConfiguration.create();
+ boolean secureREST = User.isSecurityEnabled() && User.isHBaseSecurityEnabled(conf);
+
// login the server principal (if using secure Hadoop)
- if (User.isSecurityEnabled() && User.isHBaseSecurityEnabled(conf)) {
+ if (secureREST) {
String machineName = Strings.domainNamePointerToHostName(
DNS.getDefaultHost(conf.get("hbase.rest.dns.interface", "default"),
conf.get("hbase.rest.dns.nameserver", "default")));
@@ -146,6 +151,9 @@ public class Main implements Constants {
// set up the Jersey servlet container for Jetty
ServletHolder sh = new ServletHolder(ServletContainer.class);
+ if (secureREST) {
+ sh = new ServletHolder(DoAsServletContainer.class);
+ }
sh.setInitParameter(
"com.sun.jersey.config.property.resourceConfigClass",
ResourceConfig.class.getCanonicalName());
@@ -171,6 +179,15 @@ public class Main implements Constants {
Server server = new Server();
Connector connector = new SelectChannelConnector();
+ if(conf.getBoolean("hbase.rest.ssl.enabled", false)) {
+ connector = new SslSelectChannelConnector();
+ String keystore = conf.get("hbase.rest.ssl.keystore.store");
+ String password = conf.get("hbase.rest.ssl.keystore.password");
+ String keyPassword = conf.get("hbase.rest.ssl.keystore.keypassword", password);
+ ((SslSelectChannelConnector)connector).setKeystore(keystore);
+ ((SslSelectChannelConnector)connector).setPassword(password);
+ ((SslSelectChannelConnector)connector).setKeyPassword(keyPassword);
+ }
connector.setPort(servlet.getConfiguration().getInt("hbase.rest.port", 8080));
connector.setHost(servlet.getConfiguration().get("hbase.rest.host", "0.0.0.0"));
@@ -196,6 +213,18 @@ public class Main implements Constants {
context.addServlet(shPojoMap, "/status/cluster");
context.addServlet(sh, "/*");
context.addFilter(GzipFilter.class, "/*", 0);
+ if (secureREST) {
+ context.addFilter(AuthFilter.class, "/*", 0);
+ }
+
+ //Load filters from configuration.
+ String[] filterClasses = servlet.getConfiguration().getStrings(FILTER_CLASSES);
+ if(filterClasses != null) {
+ for(String filter : filterClasses){
+ filter = filter.trim();
+ context.addFilter(Class.forName(filter), "/*", 0);
+ }
+ }
// Put up info server.
int port = conf.getInt("hbase.rest.info.port", 8085);
diff --git a/src/main/java/org/apache/hadoop/hbase/rest/SecurityException.java b/src/main/java/org/apache/hadoop/hbase/rest/SecurityException.java
new file mode 100644
index 0000000..c5c7c9d
--- /dev/null
+++ b/src/main/java/org/apache/hadoop/hbase/rest/SecurityException.java
@@ -0,0 +1,31 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.rest;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response;
+
+public class SecurityException extends WebApplicationException {
+
+ public SecurityException(String msg) {
+ super(Response.status(Response.Status.UNAUTHORIZED).entity(msg).build());
+ }
+
+}
diff --git a/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
new file mode 100644
index 0000000..3ccd86d
--- /dev/null
+++ b/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.rest.filter;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.util.Strings;
+import org.apache.hadoop.net.DNS;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Authentication filter that extends Hadoop-auth AuthenticationFilter to override
+ * the configuration loading.
+ */
+public class AuthFilter extends AuthenticationFilter {
+ private static final Log LOG = LogFactory.getLog(AuthFilter.class);
+ private static final String HBASE_PREFIX = "hbase.rest.authentication.";
+ public static final String ATTR_USER = "hbase.rest.user";
+
+
+ private HttpServlet optionsServlet;
+
+ public AuthFilter() {
+ optionsServlet = new HttpServlet() {};
+ }
+
+ /**
+ * Initialize the filter.
+ *
+ * @param filterConfig filter configuration.
+ * @throws ServletException thrown if the filter could not be initialized.
+ */
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ super.init(filterConfig);
+ optionsServlet.init();
+ }
+
+ /**
+ * Destroy the filter.
+ */
+ @Override
+ public void destroy() {
+ optionsServlet.destroy();
+ super.destroy();
+ }
+
+ /**
+ * Returns the configuration from hbase configuration to be used by the authentication filter.
+ *
+ * All properties from hbase configuration which name starts with {@link #HBASE_PREFIX} will
+ * be returned. The keys of the returned properties are trimmed from the {@link #HBASE_PREFIX}
+ * prefix, for example the hbase configuration property name 'hbase.rest.authentication.type'
+ * will be just 'type'.
+ *
+ * @param configPrefix configuration prefix, this parameter is ignored by this implementation.
+ * @param filterConfig filter configuration, this parameter is ignored by this implementation.
+ * @return all hbase configuration properties prefixed with {@link #HBASE_PREFIX}, without the
+ * prefix.
+ */
+ @Override
+ protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
+
+ Properties props = new Properties();
+ Configuration conf = HBaseConfiguration.create();
+ //setting the cookie path to root '/' so it is used for all resources.
+ props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
+ String principalKey = HBASE_PREFIX+KerberosAuthenticationHandler.PRINCIPAL;
+ for (Map.Entry entry : conf) {
+ String name = entry.getKey();
+ if (name.startsWith(HBASE_PREFIX)) {
+ String value = conf.get(name);
+ if(name.equals(principalKey)) {
+ try {
+ String machineName = Strings.domainNamePointerToHostName(
+ DNS.getDefaultHost(conf.get("hbase.rest.dns.interface", "default"),
+ conf.get("hbase.rest.dns.nameserver", "default")));
+ value = org.apache.hadoop.security.SecurityUtil.getServerPrincipal(
+ value, machineName);
+ } catch (IOException e) {
+ throw new IllegalStateException("Failed to retrieve server principal", e);
+ }
+ }
+ LOG.debug("Setting property "+name+"="+value);
+ name = name.substring(HBASE_PREFIX.length());
+ props.setProperty(name, value);
+ }
+ }
+
+ return props;
+ }
+
+ /**
+ * Enforces authentication using Hadoop-auth AuthenticationFilter.
+ *
+ * This method is overriden to respond to HTTP OPTIONS requests for authenticated calls, regardless
+ * of the target servlet supporting OPTIONS or not and to inject the authenticated user name as
+ * request attribute for HBase to retrieve the user id.
+ *
+ * @param request http request.
+ * @param response http response.
+ * @param filterChain filter chain.
+ * @throws IOException thrown if an IO error occurs.
+ * @throws ServletException thrown if a servlet error occurs.
+ */
+ @Override
+ public void doFilter(final ServletRequest request, final ServletResponse response, final FilterChain filterChain)
+ throws IOException, ServletException {
+
+ FilterChain filterChainWrapper = new FilterChain() {
+ @Override
+ public void doFilter(final ServletRequest servletRequest, final ServletResponse servletResponse)
+ throws IOException, ServletException {
+ HttpServletRequest httpRequest = (HttpServletRequest) servletRequest;
+ if (httpRequest.getMethod().equals("OPTIONS")) {
+ optionsServlet.service(request, response);
+ }
+ else {
+ httpRequest.setAttribute(ATTR_USER, httpRequest.getRemoteUser());
+ LOG.debug("Authenticated: " + httpRequest.getRemoteUser());
+ filterChain.doFilter(servletRequest, servletResponse);
+ }
+ }
+ };
+
+ super.doFilter(request, response, filterChainWrapper);
+ }
+
+}