diff --git a/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java b/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java new file mode 100644 index 0000000..2936f11 --- /dev/null +++ b/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java @@ -0,0 +1,94 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.thrift; + +import java.lang.reflect.Field; +import java.lang.reflect.Method; +import java.util.Map; + +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.SaslRpcServer; + +/** + * Functions that bridge Thrift's SASL transports to Hadoop's SASL callback + * handlers and authentication classes. + * + * This is a 0.23/2.x specific implementation + */ +public class HadoopThriftAuthBridge23 extends HadoopThriftAuthBridge20S { + + static Field SASL_PROPS_FIELD; + static Class SASL_PROPERTIES_RESOLVER_CLASS; + static { + SASL_PROPERTIES_RESOLVER_CLASS = null; + final String SASL_PROP_RES_CLASSNAME = "org.apache.hadoop.security.SaslPropertiesResolver"; + try { + SASL_PROPERTIES_RESOLVER_CLASS = Class.forName(SASL_PROP_RES_CLASSNAME); + // found the class, so this would be hadoop version 2.4 or newer (See + // HADOOP-10221, HADOOP-10451) + } catch (ClassNotFoundException e) { + } + + if (SASL_PROPERTIES_RESOLVER_CLASS == null) { + // this must be a pre hadoop 2.4 version + try { + SASL_PROPS_FIELD = SaslRpcServer.class.getField("SASL_PROPS"); + } catch (NoSuchFieldException e) { + // Older version of hadoop should have had this field + throw new IllegalStateException("Error finding hadoop SASL properties resolver class", e); + } + } + } + + /** + * Read and return Hadoop SASL configuration which can be configured using + * "hadoop.rpc.protection" + * + * @param conf + * @return Hadoop SASL configuration + */ + @SuppressWarnings("unchecked") + @Override + public Map getHadoopSaslProperties(Configuration conf) { + if (SASL_PROPS_FIELD != null) { + // pre hadoop 2.4 way of finding the sasl property settings + // Initialize the SaslRpcServer to ensure QOP parameters are read from + // conf + SaslRpcServer.init(conf); + try { + return (Map) SASL_PROPS_FIELD.get(null); + } catch (Exception e) { + throw new IllegalStateException("Error finding hadoop SASL properties", e); + } + } + // 2.4 and later way of finding sasl property + try { + Method getInstanceMethod = SASL_PROPERTIES_RESOLVER_CLASS.getMethod("getInstance", + Configuration.class); + Method getDefaultPropertiesMethod = SASL_PROPERTIES_RESOLVER_CLASS.getMethod( + "getDefaultProperties"); + Configurable saslPropertiesResolver = (Configurable) getInstanceMethod.invoke(null, conf); + saslPropertiesResolver.setConf(conf); + return (Map) getDefaultPropertiesMethod.invoke(saslPropertiesResolver); + } catch (Exception e) { + throw new IllegalStateException("Error finding hadoop SASL properties", e); + } + } + +} diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java index bf9c84f..f354fb7 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hive.shims; -import java.lang.IllegalArgumentException; import java.util.HashMap; import java.util.Map; @@ -33,6 +32,7 @@ private static HadoopShims hadoopShims; private static JettyShims jettyShims; private static AppenderSkeleton eventCounter; + private static HadoopThriftAuthBridge hadoopThriftAuthBridge; /** * The names of the classes for shimming Hadoop for each major version. @@ -72,6 +72,22 @@ } /** + * The names of the classes for shimming {@link HadoopThriftAuthBridge} + */ + private static final HashMap HADOOP_THRIFT_AUTH_BRIDGE_CLASSES = + new HashMap(); + + static { + HADOOP_THRIFT_AUTH_BRIDGE_CLASSES.put("0.20", + "org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge"); + HADOOP_THRIFT_AUTH_BRIDGE_CLASSES.put("0.20S", + "org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge20S"); + HADOOP_THRIFT_AUTH_BRIDGE_CLASSES.put("0.23", + "org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge23"); + } + + + /** * Factory method to get an instance of HadoopShims based on the * version of Hadoop on the classpath. */ @@ -101,13 +117,12 @@ public static synchronized AppenderSkeleton getEventCounter() { } public static synchronized HadoopThriftAuthBridge getHadoopThriftAuthBridge() { - if (getHadoopShims().isSecureShimImpl()) { - return createShim("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge20S", - HadoopThriftAuthBridge.class); - } else { - return new HadoopThriftAuthBridge(); - } - } + if (hadoopThriftAuthBridge == null) { + hadoopThriftAuthBridge = loadShims(HADOOP_THRIFT_AUTH_BRIDGE_CLASSES, + HadoopThriftAuthBridge.class); + } + return hadoopThriftAuthBridge; + } private static T loadShims(Map classMap, Class xface) { String vers = getMajorVersion(); @@ -115,13 +130,12 @@ public static synchronized HadoopThriftAuthBridge getHadoopThriftAuthBridge() { return createShim(className, xface); } - private static T createShim(String className, Class xface) { + private static T createShim(String className, Class xface) { try { Class clazz = Class.forName(className); return xface.cast(clazz.newInstance()); } catch (Exception e) { - throw new RuntimeException("Could not load shims in class " + - className, e); + throw new RuntimeException("Could not load shims in class " + className, e); } }