diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
index dc08893..43d24d8 100644
--- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
+++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
@@ -36,7 +36,7 @@
import org.apache.hive.jdbc.miniHS2.MiniHS2;
import com.google.common.io.Files;
-import org.apache.hive.service.server.HiveServer2;
+import org.apache.hive.service.server.HiveServer2ClientUtils;
/**
* Wrapper around Hadoop's MiniKdc for use in hive tests.
@@ -194,7 +194,7 @@
.withConf(hiveConf)
.withMiniKdc(hivePrincipal, hiveKeytab)
.withAuthenticationType(authType);
- if (HiveServer2.isHTTPTransportMode(hiveConf)) {
+ if (HiveServer2ClientUtils.isHTTPTransportMode(hiveConf)) {
miniHS2Builder.withHTTPTransport();
}
return miniHS2Builder.build();
diff --git jdbc/pom.xml jdbc/pom.xml
index 3d0719b..6ac9007 100644
--- jdbc/pom.xml
+++ jdbc/pom.xml
@@ -42,23 +42,12 @@
org.apache.hive
- hive-service
- ${project.version}
-
-
- org.apache.hive
- hive-exec
-
-
-
-
- org.apache.hivehive-serde${project.version}org.apache.hive
- hive-metastore
+ hive-service-client${project.version}
diff --git jdbc/src/java/org/apache/hive/jdbc/EmbeddedCLIServicePortal.java jdbc/src/java/org/apache/hive/jdbc/EmbeddedCLIServicePortal.java
new file mode 100644
index 0000000..c572ecc
--- /dev/null
+++ jdbc/src/java/org/apache/hive/jdbc/EmbeddedCLIServicePortal.java
@@ -0,0 +1,42 @@
+package org.apache.hive.jdbc;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.Service;
+import org.apache.hive.service.rpc.thrift.TCLIService;
+import org.apache.hive.service.rpc.thrift.TCLIService.Iface;
+
+public class EmbeddedCLIServicePortal {
+
+ public static Iface get(Map hiveConfs) {
+ TCLIService.Iface embeddedClient;
+ try {
+ Class clazz =
+ (Class) Class.forName("org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService");
+ embeddedClient = clazz.newInstance();
+ ((Service) embeddedClient).init(buildOverlayedConf(hiveConfs));
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException("Please Load hive-service jar to the classpath to enable embedded mode");
+ } catch (Exception e) {
+ throw new RuntimeException("Error initializing embedded mode", e);
+ }
+ return embeddedClient;
+ }
+
+ private static HiveConf buildOverlayedConf(Map confOverlay) {
+ HiveConf conf = new HiveConf();
+ if (confOverlay != null && !confOverlay.isEmpty()) {
+ // apply overlay query specific settings, if any
+ for (Map.Entry confEntry : confOverlay.entrySet()) {
+ try {
+ conf.set(confEntry.getKey(), confEntry.getValue());
+ } catch (IllegalArgumentException e) {
+ throw new RuntimeException("Error applying statement specific settings", e);
+ }
+ }
+ }
+ return conf;
+ }
+
+}
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index bc332c1..d797d40 100644
--- jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -18,7 +18,6 @@
package org.apache.hive.jdbc;
-import org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
@@ -29,14 +28,12 @@
import org.apache.hive.service.rpc.thrift.TSetClientInfoReq;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hive.common.auth.HiveAuthUtils;
-import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
import org.apache.hive.service.auth.HiveAuthConstants;
import org.apache.hive.service.auth.KerberosSaslHelper;
import org.apache.hive.service.auth.PlainSaslHelper;
import org.apache.hive.service.auth.SaslQOP;
import org.apache.hive.service.cli.session.SessionUtils;
-import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
import org.apache.hive.service.rpc.thrift.TCLIService;
import org.apache.hive.service.rpc.thrift.TCancelDelegationTokenReq;
import org.apache.hive.service.rpc.thrift.TCancelDelegationTokenResp;
@@ -168,7 +165,7 @@
}
return ZooKeeperHiveClientHelper.getDirectParamsList(params);
}
-
+
public static List getAllUrlStrings(String zookeeperBasedHS2Url) throws Exception {
List jdbcUrls = new ArrayList<>();
List allConnectionParams = getAllUrls(zookeeperBasedHS2Url);
@@ -182,7 +179,7 @@
}
private static String makeDirectJDBCUrlFromConnectionParams(JdbcConnectionParams cp) {
- // Direct JDBC Url format:
+ // Direct JDBC Url format:
// jdbc:hive2://:/dbName;sess_var_list?hive_conf_list#hive_var_list
StringBuilder url = new StringBuilder("");
if (cp != null) {
@@ -273,7 +270,9 @@
wmPool = sessConfMap.get(JdbcConnectionParams.WM_POOL);
for (String application : JdbcConnectionParams.APPLICATION) {
wmApp = sessConfMap.get(application);
- if (wmApp != null) break;
+ if (wmApp != null) {
+ break;
+ }
}
// add supported protocols
@@ -289,9 +288,7 @@
supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V10);
if (isEmbeddedMode) {
- EmbeddedThriftBinaryCLIService embeddedClient = new EmbeddedThriftBinaryCLIService();
- embeddedClient.init(null, connParams.getHiveConfs());
- client = embeddedClient;
+ client = EmbeddedCLIServicePortal.get(connParams.getHiveConfs());
connParams.getHiveConfs().clear();
// open client session
openSession();
@@ -1201,7 +1198,9 @@
@Override
public String getClientInfo(String name) throws SQLException {
- if (clientInfo == null) return null;
+ if (clientInfo == null) {
+ return null;
+ }
return clientInfo.getProperty(name);
}
@@ -1505,8 +1504,11 @@
if (!autoCommit) {
LOG.warn("Request to set autoCommit to false; Hive does not support autoCommit=false.");
SQLWarning warning = new SQLWarning("Hive does not support autoCommit=false");
- if (warningChain == null) warningChain = warning;
- else warningChain.setNextWarning(warning);
+ if (warningChain == null) {
+ warningChain = warning;
+ } else {
+ warningChain.setNextWarning(warning);
+ }
}
}
@@ -1559,7 +1561,9 @@
Map map = new HashMap<>();
if (clientInfo != null) {
for (Entryorg.apache.hive
- hive-service-rpc
+ hive-service-client${project.version}
diff --git service/src/java/org/apache/hive/service/FilterService.java service/src/java/org/apache/hive/service/FilterService.java
deleted file mode 100644
index 3aff1dc..0000000
--- service/src/java/org/apache/hive/service/FilterService.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-
-/**
- * FilterService.
- *
- */
-public class FilterService implements Service {
-
-
- private final Service service;
- private final long startTime = System.currentTimeMillis();
-
- public FilterService(Service service) {
- this.service = service;
- }
-
- @Override
- public void init(HiveConf config) {
- service.init(config);
- }
-
- @Override
- public void start() {
- service.start();
- }
-
- @Override
- public void stop() {
- service.stop();
- }
-
-
- @Override
- public void register(ServiceStateChangeListener listener) {
- service.register(listener);
- }
-
- @Override
- public void unregister(ServiceStateChangeListener listener) {
- service.unregister(listener);
- }
-
- @Override
- public String getName() {
- return service.getName();
- }
-
- @Override
- public HiveConf getHiveConf() {
- return service.getHiveConf();
- }
-
- @Override
- public STATE getServiceState() {
- return service.getServiceState();
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
-}
diff --git service/src/java/org/apache/hive/service/Service.java service/src/java/org/apache/hive/service/Service.java
deleted file mode 100644
index f989641..0000000
--- service/src/java/org/apache/hive/service/Service.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-
-/**
- * Service.
- *
- */
-public interface Service {
-
- /**
- * Service states
- */
- public enum STATE {
- /** Constructed but not initialized */
- NOTINITED,
-
- /** Initialized but not started or stopped */
- INITED,
-
- /** started and not stopped */
- STARTED,
-
- /** stopped. No further state transitions are permitted */
- STOPPED
- }
-
- /**
- * Initialize the service.
- *
- * The transition must be from {@link STATE#NOTINITED} to {@link STATE#INITED} unless the
- * operation failed and an exception was raised.
- *
- * @param conf
- * the configuration of the service
- */
- void init(HiveConf conf);
-
-
- /**
- * Start the service.
- *
- * The transition should be from {@link STATE#INITED} to {@link STATE#STARTED} unless the
- * operation failed and an exception was raised.
- */
- void start();
-
- /**
- * Stop the service.
- *
- * This operation must be designed to complete regardless of the initial state
- * of the service, including the state of all its internal fields.
- */
- void stop();
-
- /**
- * Register an instance of the service state change events.
- *
- * @param listener
- * a new listener
- */
- void register(ServiceStateChangeListener listener);
-
- /**
- * Unregister a previously instance of the service state change events.
- *
- * @param listener
- * the listener to unregister.
- */
- void unregister(ServiceStateChangeListener listener);
-
- /**
- * Get the name of this service.
- *
- * @return the service name
- */
- String getName();
-
- /**
- * Get the configuration of this service.
- * This is normally not a clone and may be manipulated, though there are no
- * guarantees as to what the consequences of such actions may be
- *
- * @return the current configuration, unless a specific implementation chooses
- * otherwise.
- */
- HiveConf getHiveConf();
-
- /**
- * Get the current service state
- *
- * @return the state of the service
- */
- STATE getServiceState();
-
- /**
- * Get the service start time
- *
- * @return the start time of the service. This will be zero if the service
- * has not yet been started.
- */
- long getStartTime();
-
-}
diff --git service/src/java/org/apache/hive/service/ServiceException.java service/src/java/org/apache/hive/service/ServiceException.java
deleted file mode 100644
index ad6d085..0000000
--- service/src/java/org/apache/hive/service/ServiceException.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service;
-
-/**
- * ServiceException.
- *
- */
-public class ServiceException extends RuntimeException {
-
- public ServiceException(Throwable cause) {
- super(cause);
- }
-
- public ServiceException(String message) {
- super(message);
- }
-
- public ServiceException(String message, Throwable cause) {
- super(message, cause);
- }
-}
diff --git service/src/java/org/apache/hive/service/ServiceStateChangeListener.java service/src/java/org/apache/hive/service/ServiceStateChangeListener.java
deleted file mode 100644
index feee8d6..0000000
--- service/src/java/org/apache/hive/service/ServiceStateChangeListener.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service;
-
-/**
- * ServiceStateChangeListener.
- *
- */
-public interface ServiceStateChangeListener {
-
- /**
- * Callback to notify of a state change. The service will already
- * have changed state before this callback is invoked.
- *
- * This operation is invoked on the thread that initiated the state change,
- * while the service itself in in a sychronized section.
- *
- *
Any long-lived operation here will prevent the service state
- * change from completing in a timely manner.
- *
If another thread is somehow invoked from the listener, and
- * that thread invokes the methods of the service (including
- * subclass-specific methods), there is a risk of a deadlock.
- *
- *
- *
- * @param service the service that has changed.
- */
- void stateChanged(Service service);
-
-}
diff --git service/src/java/org/apache/hive/service/ServiceUtils.java service/src/java/org/apache/hive/service/ServiceUtils.java
deleted file mode 100644
index 49fb5d5..0000000
--- service/src/java/org/apache/hive/service/ServiceUtils.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.slf4j.Logger;
-
-public class ServiceUtils {
-
- /*
- * Get the index separating the user name from domain name (the user's name up
- * to the first '/' or '@').
- *
- * @param userName full user name.
- * @return index of domain match or -1 if not found
- */
- public static int indexOfDomainMatch(String userName) {
- if (userName == null) {
- return -1;
- }
-
- int idx = userName.indexOf('/');
- int idx2 = userName.indexOf('@');
- int endIdx = Math.min(idx, idx2); // Use the earlier match.
- // Unless at least one of '/' or '@' was not found, in
- // which case, user the latter match.
- if (endIdx == -1) {
- endIdx = Math.max(idx, idx2);
- }
- return endIdx;
- }
-
- /**
- * Close the Closeable objects and ignore any {@link IOException} or
- * null pointers. Must only be used for cleanup in exception handlers.
- *
- * @param log the log to record problems to at debug level. Can be null.
- * @param closeables the objects to close
- */
- public static void cleanup(Logger log, java.io.Closeable... closeables) {
- for (java.io.Closeable c : closeables) {
- if (c != null) {
- try {
- c.close();
- } catch(IOException e) {
- if (log != null && log.isDebugEnabled()) {
- log.debug("Exception in closing " + c, e);
- }
- }
- }
- }
- }
-
- public static boolean canProvideProgressLog(HiveConf hiveConf) {
- return ("tez".equals(hiveConf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE)) || "spark"
- .equals(hiveConf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE))) && hiveConf
- .getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_INPLACE_PROGRESS);
- }
-
-}
\ No newline at end of file
diff --git service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
deleted file mode 100644
index ece3c75..0000000
--- service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.auth;
-
-import javax.security.sasl.AuthenticationException;
-
-/**
- * This authentication provider allows any combination of username and password.
- */
-public class AnonymousAuthenticationProviderImpl implements PasswdAuthenticationProvider {
-
- @Override
- public void Authenticate(String user, String password) throws AuthenticationException {
- // no-op authentication
- }
-
-}
diff --git service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
deleted file mode 100644
index e7cabc9..0000000
--- service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth;
-
-import javax.security.sasl.AuthenticationException;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-
-/**
- * This class helps select a {@link PasswdAuthenticationProvider} for a given {@code AuthMethod}.
- */
-public final class AuthenticationProviderFactory {
-
- public enum AuthMethods {
- LDAP("LDAP"),
- PAM("PAM"),
- CUSTOM("CUSTOM"),
- NONE("NONE");
-
- private final String authMethod;
-
- private final HiveConf conf = new HiveConf();
-
- AuthMethods(String authMethod) {
- this.authMethod = authMethod;
- }
-
- public String getAuthMethod() {
- return authMethod;
- }
-
- public HiveConf getConf() {
- return conf;
- }
-
- public static AuthMethods getValidAuthMethod(String authMethodStr)
- throws AuthenticationException {
- for (AuthMethods auth : AuthMethods.values()) {
- if (authMethodStr.equals(auth.getAuthMethod())) {
- return auth;
- }
- }
- throw new AuthenticationException("Not a valid authentication method");
- }
- }
-
- private AuthenticationProviderFactory() {
- }
-
- public static PasswdAuthenticationProvider getAuthenticationProvider(AuthMethods authMethod)
- throws AuthenticationException {
- return getAuthenticationProvider(authMethod, null);
- }
- public static PasswdAuthenticationProvider getAuthenticationProvider(AuthMethods authMethod, HiveConf conf)
- throws AuthenticationException {
- if (authMethod == AuthMethods.LDAP) {
- return new LdapAuthenticationProviderImpl((conf == null) ? AuthMethods.LDAP.getConf() : conf);
- } else if (authMethod == AuthMethods.PAM) {
- return new PamAuthenticationProviderImpl((conf == null) ? AuthMethods.PAM.getConf() : conf);
- } else if (authMethod == AuthMethods.CUSTOM) {
- return new CustomAuthenticationProviderImpl((conf == null) ? AuthMethods.CUSTOM.getConf() : conf);
- } else if (authMethod == AuthMethods.NONE) {
- return new AnonymousAuthenticationProviderImpl();
- } else {
- throw new AuthenticationException("Unsupported authentication method");
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
deleted file mode 100644
index 3d7ccd9..0000000
--- service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth;
-
-import java.lang.reflect.InvocationTargetException;
-
-import javax.security.sasl.AuthenticationException;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.util.ReflectionUtils;
-
-/**
- * This authentication provider implements the {@code CUSTOM} authentication. It allows a {@link
- * PasswdAuthenticationProvider} to be specified at configuration time which may additionally
- * implement {@link org.apache.hadoop.conf.Configurable Configurable} to grab Hive's {@link
- * org.apache.hadoop.conf.Configuration Configuration}.
- */
-public class CustomAuthenticationProviderImpl implements PasswdAuthenticationProvider {
-
- private final PasswdAuthenticationProvider customProvider;
-
- @SuppressWarnings("unchecked")
- CustomAuthenticationProviderImpl(HiveConf conf) {
- Class extends PasswdAuthenticationProvider> customHandlerClass =
- (Class extends PasswdAuthenticationProvider>) conf.getClass(
- HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname,
- PasswdAuthenticationProvider.class);
- PasswdAuthenticationProvider customProvider;
- try {
- customProvider = customHandlerClass.getConstructor(HiveConf.class).newInstance(conf);
- } catch (NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) {
- customProvider = ReflectionUtils.newInstance(customHandlerClass, conf);
- }
- this.customProvider = customProvider;
- }
-
- @Override
- public void Authenticate(String user, String password) throws AuthenticationException {
- customProvider.Authenticate(user, password);
- }
-
-}
diff --git service/src/java/org/apache/hive/service/auth/HiveAuthConstants.java service/src/java/org/apache/hive/service/auth/HiveAuthConstants.java
deleted file mode 100644
index c2fd3a5..0000000
--- service/src/java/org/apache/hive/service/auth/HiveAuthConstants.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.auth;
-
-public class HiveAuthConstants {
- public enum AuthTypes {
- NOSASL("NOSASL"),
- NONE("NONE"),
- LDAP("LDAP"),
- KERBEROS("KERBEROS"),
- CUSTOM("CUSTOM"),
- PAM("PAM");
-
- private final String authType;
-
- AuthTypes(String authType) {
- this.authType = authType;
- }
-
- public String getAuthName() {
- return authType;
- }
- }
-
- public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
- public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken";
-}
diff --git service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
index e07cd7e..fe29b65 100644
--- service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -41,7 +41,7 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hive.service.cli.HiveSQLException;
-import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.hive.service.rpc.thrift.TCLIService;
import org.apache.thrift.TProcessorFactory;
import org.apache.thrift.transport.TSaslServerTransport;
import org.apache.thrift.transport.TTransportException;
@@ -175,7 +175,7 @@
* @return
* @throws LoginException
*/
- public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws LoginException {
+ public TProcessorFactory getAuthProcFactory(TCLIService.Iface service) throws LoginException {
if (isSASLWithKerberizedHadoop()) {
return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
} else {
diff --git service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
deleted file mode 100644
index 7dc11b2..0000000
--- service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.auth;
-
-import java.security.PrivilegedExceptionAction;
-import java.security.SecureRandom;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.StringTokenizer;
-
-import javax.security.auth.Subject;
-
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.http.protocol.BasicHttpContext;
-import org.apache.http.protocol.HttpContext;
-import org.ietf.jgss.GSSContext;
-import org.ietf.jgss.GSSManager;
-import org.ietf.jgss.GSSName;
-import org.ietf.jgss.Oid;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Utility functions for HTTP mode authentication.
- */
-public final class HttpAuthUtils {
- public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
- public static final String AUTHORIZATION = "Authorization";
- public static final String BASIC = "Basic";
- public static final String NEGOTIATE = "Negotiate";
- private static final Logger LOG = LoggerFactory.getLogger(HttpAuthUtils.class);
- private static final String COOKIE_ATTR_SEPARATOR = "&";
- private static final String COOKIE_CLIENT_USER_NAME = "cu";
- private static final String COOKIE_CLIENT_RAND_NUMBER = "rn";
- private static final String COOKIE_KEY_VALUE_SEPARATOR = "=";
- private final static Set COOKIE_ATTRIBUTES =
- new HashSet(Arrays.asList(COOKIE_CLIENT_USER_NAME, COOKIE_CLIENT_RAND_NUMBER));
-
- /**
- * @return Stringified Base64 encoded kerberosAuthHeader on success
- * @throws Exception
- */
- public static String getKerberosServiceTicket(String principal, String host, String serverHttpUrl,
- Subject loggedInSubject) throws Exception {
- String serverPrincipal = HadoopThriftAuthBridge.getBridge().getServerPrincipal(principal, host);
- if (loggedInSubject != null) {
- return Subject.doAs(loggedInSubject, new HttpKerberosClientAction(serverPrincipal, serverHttpUrl));
- } else {
- // JAAS login from ticket cache to setup the client UserGroupInformation
- UserGroupInformation clientUGI = HadoopThriftAuthBridge.getBridge().getCurrentUGIWithConf("kerberos");
- return clientUGI.doAs(new HttpKerberosClientAction(serverPrincipal, serverHttpUrl));
- }
- }
-
- /**
- * Creates and returns a HS2 cookie token.
- * @param clientUserName Client User name.
- * @return An unsigned cookie token generated from input parameters.
- * The final cookie generated is of the following format :
- * cu=<username>&rn=<randomNumber>&s=<cookieSignature>
- */
- public static String createCookieToken(String clientUserName) {
- StringBuilder sb = new StringBuilder();
- sb.append(COOKIE_CLIENT_USER_NAME).append(COOKIE_KEY_VALUE_SEPARATOR).append(clientUserName).
- append(COOKIE_ATTR_SEPARATOR);
- sb.append(COOKIE_CLIENT_RAND_NUMBER).append(COOKIE_KEY_VALUE_SEPARATOR).
- append((new SecureRandom()).nextLong());
- return sb.toString();
- }
-
- /**
- * Parses a cookie token to retrieve client user name.
- * @param tokenStr Token String.
- * @return A valid user name if input is of valid format, else returns null.
- */
- public static String getUserNameFromCookieToken(String tokenStr) {
- Map map = splitCookieToken(tokenStr);
-
- if (!map.keySet().equals(COOKIE_ATTRIBUTES)) {
- LOG.error("Invalid token with missing attributes " + tokenStr);
- return null;
- }
- return map.get(COOKIE_CLIENT_USER_NAME);
- }
-
- /**
- * Splits the cookie token into attributes pairs.
- * @param str input token.
- * @return a map with the attribute pairs of the token if the input is valid.
- * Else, returns null.
- */
- private static Map splitCookieToken(String tokenStr) {
- Map map = new HashMap();
- StringTokenizer st = new StringTokenizer(tokenStr, COOKIE_ATTR_SEPARATOR);
-
- while (st.hasMoreTokens()) {
- String part = st.nextToken();
- int separator = part.indexOf(COOKIE_KEY_VALUE_SEPARATOR);
- if (separator == -1) {
- LOG.error("Invalid token string " + tokenStr);
- return null;
- }
- String key = part.substring(0, separator);
- String value = part.substring(separator + 1);
- map.put(key, value);
- }
- return map;
- }
-
-
- private HttpAuthUtils() {
- throw new UnsupportedOperationException("Can't initialize class");
- }
-
- /**
- * We'll create an instance of this class within a doAs block so that the client's TGT credentials
- * can be read from the Subject
- */
- public static class HttpKerberosClientAction implements PrivilegedExceptionAction {
- public static final String HTTP_RESPONSE = "HTTP_RESPONSE";
- public static final String SERVER_HTTP_URL = "SERVER_HTTP_URL";
- private final String serverPrincipal;
- private final String serverHttpUrl;
- private final Base64 base64codec;
- private final HttpContext httpContext;
-
- public HttpKerberosClientAction(String serverPrincipal, String serverHttpUrl) {
- this.serverPrincipal = serverPrincipal;
- this.serverHttpUrl = serverHttpUrl;
- base64codec = new Base64(0);
- httpContext = new BasicHttpContext();
- httpContext.setAttribute(SERVER_HTTP_URL, serverHttpUrl);
- }
-
- @Override
- public String run() throws Exception {
- // This Oid for Kerberos GSS-API mechanism.
- Oid mechOid = new Oid("1.2.840.113554.1.2.2");
- // Oid for kerberos principal name
- Oid krb5PrincipalOid = new Oid("1.2.840.113554.1.2.2.1");
- GSSManager manager = GSSManager.getInstance();
- // GSS name for server
- GSSName serverName = manager.createName(serverPrincipal, krb5PrincipalOid);
- // Create a GSSContext for authentication with the service.
- // We're passing client credentials as null since we want them to be read from the Subject.
- GSSContext gssContext =
- manager.createContext(serverName, mechOid, null, GSSContext.DEFAULT_LIFETIME);
- gssContext.requestMutualAuth(false);
- // Establish context
- byte[] inToken = new byte[0];
- byte[] outToken = gssContext.initSecContext(inToken, 0, inToken.length);
- gssContext.dispose();
- // Base64 encoded and stringified token for server
- return new String(base64codec.encode(outToken));
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java
deleted file mode 100644
index 060e558..0000000
--- service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License. See accompanying LICENSE file.
- */
-
-package org.apache.hive.service.auth;
-
-public class HttpAuthenticationException extends Exception {
-
- private static final long serialVersionUID = 0;
-
- /**
- * @param cause original exception
- */
- public HttpAuthenticationException(Throwable cause) {
- super(cause);
- }
-
- /**
- * @param msg exception message
- */
- public HttpAuthenticationException(String msg) {
- super(msg);
- }
-
- /**
- * @param msg exception message
- * @param cause original exception
- */
- public HttpAuthenticationException(String msg, Throwable cause) {
- super(msg, cause);
- }
-
-}
diff --git service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
deleted file mode 100644
index 14d2008..0000000
--- service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth;
-
-import java.io.IOException;
-import java.util.Map;
-
-import javax.security.sasl.SaslException;
-
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge.Server;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hive.service.cli.thrift.ThriftCLIService;
-import org.apache.hive.service.rpc.thrift.TCLIService;
-import org.apache.hive.service.rpc.thrift.TCLIService.Iface;
-import org.apache.thrift.TProcessor;
-import org.apache.thrift.TProcessorFactory;
-import org.apache.thrift.transport.TSaslClientTransport;
-import org.apache.thrift.transport.TTransport;
-
-public final class KerberosSaslHelper {
-
- public static TProcessorFactory getKerberosProcessorFactory(Server saslServer,
- ThriftCLIService service) {
- return new CLIServiceProcessorFactory(saslServer, service);
- }
-
- public static TTransport getKerberosTransport(String principal, String host,
- TTransport underlyingTransport, Map saslProps, boolean assumeSubject)
- throws SaslException {
- try {
- String[] names = principal.split("[/@]");
- if (names.length != 3) {
- throw new IllegalArgumentException("Kerberos principal should have 3 parts: " + principal);
- }
-
- if (assumeSubject) {
- return createSubjectAssumedTransport(principal, host, underlyingTransport, saslProps);
- } else {
- HadoopThriftAuthBridge.Client authBridge =
- HadoopThriftAuthBridge.getBridge().createClientWithConf("kerberos");
- return authBridge.createClientTransport(principal, host, "KERBEROS", null,
- underlyingTransport, saslProps);
- }
- } catch (IOException e) {
- throw new SaslException("Failed to open client transport", e);
- }
- }
-
- /**
- * Helper to wrap the {@code underlyingTransport} into an assumed kerberos principal.
- * The function is used for kerberos based authentication, where {@code kerberosAuthType}
- * is set to {@code fromSubject}. If also performs a substitution of {@code _HOST} to the
- * local host name, if required.
- *
- * @param principal The kerberos principal to assume
- * @param host Host, used to replace the {@code _HOST} with
- * @param underlyingTransport The I/O transport to wrap
- * @param saslProps SASL property map
- * @return The wrapped transport
- * @throws IOException
- */
- public static TTransport createSubjectAssumedTransport(String principal, String host,
- TTransport underlyingTransport, Map saslProps) throws IOException {
- String resolvedPrincipal = SecurityUtil.getServerPrincipal(principal, host);
- String[] names = resolvedPrincipal.split("[/@]");
- try {
- TTransport saslTransport =
- new TSaslClientTransport("GSSAPI", null, names[0], names[1], saslProps, null,
- underlyingTransport);
- return new TSubjectAssumingTransport(saslTransport);
- } catch (SaslException se) {
- throw new IOException("Could not instantiate SASL transport", se);
- }
- }
-
- public static TTransport getTokenTransport(String tokenStr, String host,
- TTransport underlyingTransport, Map saslProps) throws SaslException {
- HadoopThriftAuthBridge.Client authBridge =
- HadoopThriftAuthBridge.getBridge().createClientWithConf("kerberos");
-
- try {
- return authBridge.createClientTransport(null, host, "DIGEST", tokenStr, underlyingTransport,
- saslProps);
- } catch (IOException e) {
- throw new SaslException("Failed to open client transport", e);
- }
- }
-
- private KerberosSaslHelper() {
- throw new UnsupportedOperationException("Can't initialize class");
- }
-
- private static class CLIServiceProcessorFactory extends TProcessorFactory {
-
- private final ThriftCLIService service;
- private final Server saslServer;
-
- public CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) {
- super(null);
- this.service = service;
- this.saslServer = saslServer;
- }
-
- @Override
- public TProcessor getProcessor(TTransport trans) {
- TProcessor sqlProcessor = new TCLIService.Processor(service);
- return saslServer.wrapNonAssumingProcessor(sqlProcessor);
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
deleted file mode 100644
index 0f318fb..0000000
--- service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth;
-
-import javax.security.sasl.AuthenticationException;
-import javax.naming.NamingException;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.service.ServiceUtils;
-import org.apache.hive.service.auth.ldap.ChainFilterFactory;
-import org.apache.hive.service.auth.ldap.CustomQueryFilterFactory;
-import org.apache.hive.service.auth.ldap.LdapSearchFactory;
-import org.apache.hive.service.auth.ldap.Filter;
-import org.apache.hive.service.auth.ldap.DirSearch;
-import org.apache.hive.service.auth.ldap.DirSearchFactory;
-import org.apache.hive.service.auth.ldap.FilterFactory;
-import org.apache.hive.service.auth.ldap.GroupFilterFactory;
-import org.apache.hive.service.auth.ldap.LdapUtils;
-import org.apache.hive.service.auth.ldap.UserFilterFactory;
-import org.apache.hive.service.auth.ldap.UserSearchFilterFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class LdapAuthenticationProviderImpl implements PasswdAuthenticationProvider {
-
- private static final Logger LOG = LoggerFactory.getLogger(LdapAuthenticationProviderImpl.class);
-
- private static final List FILTER_FACTORIES = ImmutableList.of(
- new CustomQueryFilterFactory(),
- new ChainFilterFactory(new UserSearchFilterFactory(), new UserFilterFactory(),
- new GroupFilterFactory())
- );
-
- private final HiveConf conf;
- private final Filter filter;
- private final DirSearchFactory searchFactory;
-
- public LdapAuthenticationProviderImpl(HiveConf conf) {
- this(conf, new LdapSearchFactory());
- }
-
- @VisibleForTesting
- LdapAuthenticationProviderImpl(HiveConf conf, DirSearchFactory searchFactory) {
- this.conf = conf;
- this.searchFactory = searchFactory;
- filter = resolveFilter(conf);
- }
-
- @Override
- public void Authenticate(String user, String password) throws AuthenticationException {
- DirSearch search = null;
- String bindUser = this.conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_USER);
- String bindPassword = null;
- try {
- char[] rawPassword = this.conf.getPassword(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_PASSWORD.toString());
- if (rawPassword != null) {
- bindPassword = new String(rawPassword);
- }
- } catch (IOException e) {
- bindPassword = null;
- }
- boolean usedBind = bindUser != null && bindPassword != null;
- if (!usedBind) {
- // If no bind user or bind password was specified,
- // we assume the user we are authenticating has the ability to search
- // the LDAP tree, so we use it as the "binding" account.
- // This is the way it worked before bind users were allowed in the LDAP authenticator,
- // so we keep existing systems working.
- bindUser = user;
- bindPassword = password;
- }
- try {
- search = createDirSearch(bindUser, bindPassword);
- applyFilter(search, user);
- if (usedBind) {
- // If we used the bind user, then we need to authenticate again,
- // this time using the full user name we got during the bind process.
- createDirSearch(search.findUserDn(user), password);
- }
- } catch (NamingException e) {
- throw new AuthenticationException("Unable to find the user in the LDAP tree. " + e.getMessage());
- } finally {
- ServiceUtils.cleanup(LOG, search);
- }
- }
-
- private DirSearch createDirSearch(String user, String password) throws AuthenticationException {
- if (StringUtils.isBlank(user)) {
- throw new AuthenticationException("Error validating LDAP user:"
- + " a null or blank user name has been provided");
- }
- if (StringUtils.isBlank(password) || password.getBytes()[0] == 0) {
- throw new AuthenticationException("Error validating LDAP user:"
- + " a null or blank password has been provided");
- }
- List principals = LdapUtils.createCandidatePrincipals(conf, user);
- for (Iterator iterator = principals.iterator(); iterator.hasNext();) {
- String principal = iterator.next();
- try {
- return searchFactory.getInstance(conf, principal, password);
- } catch (AuthenticationException ex) {
- if (!iterator.hasNext()) {
- throw ex;
- }
- }
- }
- throw new AuthenticationException(
- String.format("No candidate principals for %s was found.", user));
- }
-
- private static Filter resolveFilter(HiveConf conf) {
- for (FilterFactory filterProvider : FILTER_FACTORIES) {
- Filter filter = filterProvider.getInstance(conf);
- if (filter != null) {
- return filter;
- }
- }
- return null;
- }
-
- private void applyFilter(DirSearch client, String user) throws AuthenticationException {
- if (filter != null) {
- if (LdapUtils.hasDomain(user)) {
- filter.apply(client, LdapUtils.extractUserName(user));
- } else {
- filter.apply(client, user);
- }
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java
deleted file mode 100644
index fff378a..0000000
--- service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth;
-
-import javax.security.sasl.AuthenticationException;
-
-import net.sf.jpam.Pam;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-
-public class PamAuthenticationProviderImpl implements PasswdAuthenticationProvider {
-
- private final String pamServiceNames;
-
- PamAuthenticationProviderImpl(HiveConf conf) {
- pamServiceNames = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES);
- }
-
- @Override
- public void Authenticate(String user, String password) throws AuthenticationException {
-
- if (pamServiceNames == null || pamServiceNames.trim().isEmpty()) {
- throw new AuthenticationException("No PAM services are set.");
- }
-
- String errorMsg = "Error authenticating with the PAM service: ";
- String[] pamServices = pamServiceNames.split(",");
- for (String pamService : pamServices) {
- try {
- Pam pam = new Pam(pamService);
- boolean isAuthenticated = pam.authenticateSuccessful(user, password);
- if (!isAuthenticated) {
- throw new AuthenticationException(errorMsg + pamService);
- }
- } catch(Throwable e) {
- // Catch the exception caused by missing jpam.so which otherwise would
- // crashes the thread and causes the client hanging rather than notifying
- // the client nicely
- throw new AuthenticationException(errorMsg + pamService, e);
- }
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
deleted file mode 100644
index fdc6857..0000000
--- service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth;
-
-import javax.security.sasl.AuthenticationException;
-
-public interface PasswdAuthenticationProvider {
-
- /**
- * The Authenticate method is called by the HiveServer2 authentication layer
- * to authenticate users for their requests.
- * If a user is to be granted, return nothing/throw nothing.
- * When a user is to be disallowed, throw an appropriate {@link AuthenticationException}.
- *
- * For an example implementation, see {@link LdapAuthenticationProviderImpl}.
- *
- * @param user The username received over the connection request
- * @param password The password received over the connection request
- *
- * @throws AuthenticationException When a user is found to be
- * invalid by the implementation
- */
- void Authenticate(String user, String password) throws AuthenticationException;
-}
diff --git service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
deleted file mode 100644
index 0742311..0000000
--- service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.security.Security;
-import java.util.HashMap;
-
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.callback.NameCallback;
-import javax.security.auth.callback.PasswordCallback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.auth.login.LoginException;
-import javax.security.sasl.AuthenticationException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.SaslException;
-
-import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
-import org.apache.hive.service.auth.PlainSaslServer.SaslPlainProvider;
-import org.apache.hive.service.cli.thrift.ThriftCLIService;
-import org.apache.hive.service.rpc.thrift.TCLIService.Iface;
-import org.apache.thrift.TProcessor;
-import org.apache.thrift.TProcessorFactory;
-import org.apache.thrift.transport.TSaslClientTransport;
-import org.apache.thrift.transport.TSaslServerTransport;
-import org.apache.thrift.transport.TSocket;
-import org.apache.thrift.transport.TTransport;
-import org.apache.thrift.transport.TTransportFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public final class PlainSaslHelper {
- private static final Logger LOG = LoggerFactory.getLogger(PlainSaslHelper.class);
-
- public static TProcessorFactory getPlainProcessorFactory(ThriftCLIService service) {
- return new SQLPlainProcessorFactory(service);
- }
-
- // Register Plain SASL server provider
- static {
- Security.addProvider(new SaslPlainProvider());
- }
-
- public static TTransportFactory getPlainTransportFactory(String authTypeStr)
- throws LoginException {
- TSaslServerTransport.Factory saslFactory = new TSaslServerTransport.Factory();
- try {
- saslFactory.addServerDefinition("PLAIN", authTypeStr, null, new HashMap(),
- new PlainServerCallbackHandler(authTypeStr));
- } catch (AuthenticationException e) {
- throw new LoginException("Error setting callback handler" + e);
- }
- return saslFactory;
- }
-
- static TTransportFactory getDualPlainTransportFactory(TTransportFactory otherTrans,
- String trustedDomain)
- throws LoginException {
- LOG.info("Created additional transport factory for skipping authentication when client " +
- "connection is from the same domain.");
- return new DualSaslTransportFactory(otherTrans, trustedDomain);
- }
-
- public static TTransport getPlainTransport(String username, String password,
- TTransport underlyingTransport) throws SaslException {
- return new TSaslClientTransport("PLAIN", null, null, null, new HashMap(),
- new PlainCallbackHandler(username, password), underlyingTransport);
- }
-
- // Return true if the remote host is from the trusted domain, i.e. host URL has the same
- // suffix as the trusted domain.
- static public boolean isHostFromTrustedDomain(String remoteHost, String trustedDomain) {
- return remoteHost.endsWith(trustedDomain);
- }
-
- private PlainSaslHelper() {
- throw new UnsupportedOperationException("Can't initialize class");
- }
-
- static final class DualSaslTransportFactory extends TTransportFactory {
- TTransportFactory otherFactory;
- TTransportFactory noAuthFactory;
- String trustedDomain;
-
- DualSaslTransportFactory(TTransportFactory otherFactory, String trustedDomain)
- throws LoginException {
- this.noAuthFactory = getPlainTransportFactory(AuthMethods.NONE.toString());
- this.otherFactory = otherFactory;
- this.trustedDomain = trustedDomain;
- }
-
- @Override
- public TTransport getTransport(final TTransport trans) {
- TSocket tSocket = null;
- // Attempt to avoid authentication if only we can fetch the client IP address and it
- // happens to be from the same domain as the server.
- if (trans instanceof TSocket) {
- tSocket = (TSocket) trans;
- } else if (trans instanceof TSaslServerTransport) {
- TSaslServerTransport saslTrans = (TSaslServerTransport) trans;
- tSocket = (TSocket)(saslTrans.getUnderlyingTransport());
- }
- String remoteHost = tSocket != null ?
- tSocket.getSocket().getInetAddress().getCanonicalHostName() : null;
- if (remoteHost != null && isHostFromTrustedDomain(remoteHost, trustedDomain)) {
- LOG.info("No authentication performed because the connecting host " + remoteHost + " is " +
- "from the trusted domain " + trustedDomain);
- return noAuthFactory.getTransport(trans);
- }
-
- return otherFactory.getTransport(trans);
- }
- }
-
- public static final class PlainServerCallbackHandler implements CallbackHandler {
-
- private final AuthMethods authMethod;
-
- PlainServerCallbackHandler(String authMethodStr) throws AuthenticationException {
- authMethod = AuthMethods.getValidAuthMethod(authMethodStr);
- }
-
- @Override
- public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
- String username = null;
- String password = null;
- AuthorizeCallback ac = null;
-
- for (Callback callback : callbacks) {
- if (callback instanceof NameCallback) {
- NameCallback nc = (NameCallback) callback;
- username = nc.getName();
- } else if (callback instanceof PasswordCallback) {
- PasswordCallback pc = (PasswordCallback) callback;
- password = new String(pc.getPassword());
- } else if (callback instanceof AuthorizeCallback) {
- ac = (AuthorizeCallback) callback;
- } else {
- throw new UnsupportedCallbackException(callback);
- }
- }
- PasswdAuthenticationProvider provider =
- AuthenticationProviderFactory.getAuthenticationProvider(authMethod);
- provider.Authenticate(username, password);
- if (ac != null) {
- ac.setAuthorized(true);
- }
- }
- }
-
- public static class PlainCallbackHandler implements CallbackHandler {
-
- private final String username;
- private final String password;
-
- public PlainCallbackHandler(String username, String password) {
- this.username = username;
- this.password = password;
- }
-
- @Override
- public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
- for (Callback callback : callbacks) {
- if (callback instanceof NameCallback) {
- NameCallback nameCallback = (NameCallback) callback;
- nameCallback.setName(username);
- } else if (callback instanceof PasswordCallback) {
- PasswordCallback passCallback = (PasswordCallback) callback;
- passCallback.setPassword(password.toCharArray());
- } else {
- throw new UnsupportedCallbackException(callback);
- }
- }
- }
- }
-
- private static final class SQLPlainProcessorFactory extends TProcessorFactory {
-
- private final ThriftCLIService service;
-
- SQLPlainProcessorFactory(ThriftCLIService service) {
- super(null);
- this.service = service;
- }
-
- @Override
- public TProcessor getProcessor(TTransport trans) {
- return new TSetIpAddressProcessor(service);
- }
- }
-
-}
diff --git service/src/java/org/apache/hive/service/auth/PlainSaslServer.java service/src/java/org/apache/hive/service/auth/PlainSaslServer.java
deleted file mode 100644
index 0c5ccff..0000000
--- service/src/java/org/apache/hive/service/auth/PlainSaslServer.java
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth;
-
-import java.io.IOException;
-import java.security.Provider;
-import java.util.ArrayDeque;
-import java.util.Deque;
-import java.util.Map;
-
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.callback.NameCallback;
-import javax.security.auth.callback.PasswordCallback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.SaslException;
-import javax.security.sasl.SaslServer;
-import javax.security.sasl.SaslServerFactory;
-
-import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
-
-/**
- * Sun JDK only provides a PLAIN client and no server. This class implements the Plain SASL server
- * conforming to RFC #4616 (http://www.ietf.org/rfc/rfc4616.txt).
- */
-public class PlainSaslServer implements SaslServer {
-
- public static final String PLAIN_METHOD = "PLAIN";
- private String user;
- private final CallbackHandler handler;
-
- PlainSaslServer(CallbackHandler handler, String authMethodStr) throws SaslException {
- this.handler = handler;
- AuthMethods.getValidAuthMethod(authMethodStr);
- }
-
- @Override
- public String getMechanismName() {
- return PLAIN_METHOD;
- }
-
- @Override
- public byte[] evaluateResponse(byte[] response) throws SaslException {
- try {
- // parse the response
- // message = [authzid] UTF8NUL authcid UTF8NUL passwd'
-
- Deque tokenList = new ArrayDeque();
- StringBuilder messageToken = new StringBuilder();
- for (byte b : response) {
- if (b == 0) {
- tokenList.addLast(messageToken.toString());
- messageToken = new StringBuilder();
- } else {
- messageToken.append((char) b);
- }
- }
- tokenList.addLast(messageToken.toString());
-
- // validate response
- if (tokenList.size() < 2 || tokenList.size() > 3) {
- throw new SaslException("Invalid message format");
- }
- String passwd = tokenList.removeLast();
- user = tokenList.removeLast();
- // optional authzid
- String authzId;
- if (tokenList.isEmpty()) {
- authzId = user;
- } else {
- authzId = tokenList.removeLast();
- }
- if (user == null || user.isEmpty()) {
- throw new SaslException("No user name provided");
- }
- if (passwd == null || passwd.isEmpty()) {
- throw new SaslException("No password name provided");
- }
-
- NameCallback nameCallback = new NameCallback("User");
- nameCallback.setName(user);
- PasswordCallback pcCallback = new PasswordCallback("Password", false);
- pcCallback.setPassword(passwd.toCharArray());
- AuthorizeCallback acCallback = new AuthorizeCallback(user, authzId);
-
- Callback[] cbList = {nameCallback, pcCallback, acCallback};
- handler.handle(cbList);
- if (!acCallback.isAuthorized()) {
- throw new SaslException("Authentication failed");
- }
- } catch (IllegalStateException eL) {
- throw new SaslException("Invalid message format", eL);
- } catch (IOException eI) {
- throw new SaslException("Error validating the login", eI);
- } catch (UnsupportedCallbackException eU) {
- throw new SaslException("Error validating the login", eU);
- }
- return null;
- }
-
- @Override
- public boolean isComplete() {
- return user != null;
- }
-
- @Override
- public String getAuthorizationID() {
- return user;
- }
-
- @Override
- public byte[] unwrap(byte[] incoming, int offset, int len) {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public byte[] wrap(byte[] outgoing, int offset, int len) {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public Object getNegotiatedProperty(String propName) {
- return null;
- }
-
- @Override
- public void dispose() {}
-
- public static class SaslPlainServerFactory implements SaslServerFactory {
-
- @Override
- public SaslServer createSaslServer(String mechanism, String protocol, String serverName,
- Map props, CallbackHandler cbh) {
- if (PLAIN_METHOD.equals(mechanism)) {
- try {
- return new PlainSaslServer(cbh, protocol);
- } catch (SaslException e) {
- /* This is to fulfill the contract of the interface which states that an exception shall
- be thrown when a SaslServer cannot be created due to an error but null should be
- returned when a Server can't be created due to the parameters supplied. And the only
- thing PlainSaslServer can fail on is a non-supported authentication mechanism.
- That's why we return null instead of throwing the Exception */
- return null;
- }
- }
- return null;
- }
-
- @Override
- public String[] getMechanismNames(Map props) {
- return new String[] {PLAIN_METHOD};
- }
- }
-
- public static class SaslPlainProvider extends Provider {
-
- public SaslPlainProvider() {
- super("HiveSaslPlain", 1.0, "Hive Plain SASL provider");
- put("SaslServerFactory.PLAIN", SaslPlainServerFactory.class.getName());
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/SaslQOP.java service/src/java/org/apache/hive/service/auth/SaslQOP.java
deleted file mode 100644
index b20ff70..0000000
--- service/src/java/org/apache/hive/service/auth/SaslQOP.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.auth;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Possible values of SASL quality-of-protection value.
- */
-public enum SaslQOP {
- AUTH("auth"), // Authentication only.
- AUTH_INT("auth-int"), // Authentication and integrity checking by using signatures.
- AUTH_CONF("auth-conf"); // Authentication, integrity and confidentiality checking
- // by using signatures and encryption.
-
- public final String saslQop;
-
- private static final Map STR_TO_ENUM = new HashMap();
-
- static {
- for (SaslQOP saslQop : values()) {
- STR_TO_ENUM.put(saslQop.toString(), saslQop);
- }
- }
-
- SaslQOP(String saslQop) {
- this.saslQop = saslQop;
- }
-
- public String toString() {
- return saslQop;
- }
-
- public static SaslQOP fromString(String str) {
- if (str != null) {
- str = str.toLowerCase();
- }
- SaslQOP saslQOP = STR_TO_ENUM.get(str);
- if (saslQOP == null) {
- throw new IllegalArgumentException(
- "Unknown auth type: " + str + " Allowed values are: " + STR_TO_ENUM.keySet());
- }
- return saslQOP;
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
deleted file mode 100644
index 8e4659b..0000000
--- service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.auth;
-
-import org.apache.hive.service.rpc.thrift.TCLIService;
-import org.apache.hive.service.rpc.thrift.TCLIService.Iface;
-import org.apache.thrift.TException;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.transport.TSaslClientTransport;
-import org.apache.thrift.transport.TSaslServerTransport;
-import org.apache.thrift.transport.TSocket;
-import org.apache.thrift.transport.TTransport;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * This class is responsible for setting the ipAddress for operations executed via HiveServer2.
- *
- *
- *
IP address is only set for operations that calls listeners with hookContext
- *
IP address is only set if the underlying transport mechanism is socket
- *
- *
- *
- * @see org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext
- */
-public class TSetIpAddressProcessor extends TCLIService.Processor {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(TSetIpAddressProcessor.class.getName());
-
- public TSetIpAddressProcessor(Iface iface) {
- super(iface);
- }
-
- @Override
- public boolean process(final TProtocol in, final TProtocol out) throws TException {
- setIpAddress(in);
- setUserName(in);
- try {
- return super.process(in, out);
- } finally {
- THREAD_LOCAL_USER_NAME.remove();
- THREAD_LOCAL_IP_ADDRESS.remove();
- }
- }
-
- private void setUserName(final TProtocol in) {
- TTransport transport = in.getTransport();
- if (transport instanceof TSaslServerTransport) {
- String userName = ((TSaslServerTransport) transport).getSaslServer().getAuthorizationID();
- THREAD_LOCAL_USER_NAME.set(userName);
- }
- }
-
- protected void setIpAddress(final TProtocol in) {
- TTransport transport = in.getTransport();
- TSocket tSocket = getUnderlyingSocketFromTransport(transport);
- if (tSocket == null) {
- LOGGER.warn("Unknown Transport, cannot determine ipAddress");
- } else {
- THREAD_LOCAL_IP_ADDRESS.set(tSocket.getSocket().getInetAddress().getHostAddress());
- }
- }
-
- private TSocket getUnderlyingSocketFromTransport(TTransport transport) {
- while (transport != null) {
- if (transport instanceof TSaslServerTransport) {
- transport = ((TSaslServerTransport) transport).getUnderlyingTransport();
- }
- if (transport instanceof TSaslClientTransport) {
- transport = ((TSaslClientTransport) transport).getUnderlyingTransport();
- }
- if (transport instanceof TSocket) {
- return (TSocket) transport;
- }
- }
- return null;
- }
-
- private static final ThreadLocal THREAD_LOCAL_IP_ADDRESS = new ThreadLocal() {
- @Override
- protected String initialValue() {
- return null;
- }
- };
-
- private static final ThreadLocal THREAD_LOCAL_USER_NAME = new ThreadLocal() {
- @Override
- protected String initialValue() {
- return null;
- }
- };
-
- public static String getUserIpAddress() {
- return THREAD_LOCAL_IP_ADDRESS.get();
- }
-
- public static String getUserName() {
- return THREAD_LOCAL_USER_NAME.get();
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java
deleted file mode 100644
index 1cbab81..0000000
--- service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.auth;
-
-import java.security.AccessControlContext;
-import java.security.AccessController;
-import java.security.PrivilegedActionException;
-import java.security.PrivilegedExceptionAction;
-
-import javax.security.auth.Subject;
-
-import org.apache.hadoop.hive.thrift.TFilterTransport;
-import org.apache.thrift.transport.TTransport;
-import org.apache.thrift.transport.TTransportException;
-
-/**
- * This is used on the client side, where the API explicitly opens a transport to
- * the server using the Subject.doAs().
- */
-public class TSubjectAssumingTransport extends TFilterTransport {
-
- public TSubjectAssumingTransport(TTransport wrapped) {
- super(wrapped);
- }
-
- @Override
- public void open() throws TTransportException {
- try {
- AccessControlContext context = AccessController.getContext();
- Subject subject = Subject.getSubject(context);
- Subject.doAs(subject, new PrivilegedExceptionAction() {
- public Void run() {
- try {
- wrapped.open();
- } catch (TTransportException tte) {
- // Wrap the transport exception in an RTE, since Subject.doAs() then goes
- // and unwraps this for us out of the doAs block. We then unwrap one
- // more time in our catch clause to get back the TTE. (ugh)
- throw new RuntimeException(tte);
- }
- return null;
- }
- });
- } catch (PrivilegedActionException ioe) {
- throw new RuntimeException("Received an ioe we never threw!", ioe);
- } catch (RuntimeException rte) {
- if (rte.getCause() instanceof TTransportException) {
- throw (TTransportException) rte.getCause();
- } else {
- throw rte;
- }
- }
- }
-
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/ChainFilterFactory.java service/src/java/org/apache/hive/service/auth/ldap/ChainFilterFactory.java
deleted file mode 100644
index 8d0340f..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/ChainFilterFactory.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import com.google.common.collect.ImmutableList;
-import java.util.ArrayList;
-import java.util.List;
-import javax.security.sasl.AuthenticationException;
-import org.apache.hadoop.hive.conf.HiveConf;
-
-/**
- * A factory that produces a {@link Filter} that is implemented as a chain of other filters.
- * The chain of filters are created as a result of
- * {@link #getInstance(org.apache.hadoop.hive.conf.HiveConf) }
- * method call. The resulting object filters out all users that don't pass all
- * chained filters. The filters will be applied in the order they are mentioned in the factory
- * constructor.
- */
-public class ChainFilterFactory implements FilterFactory {
-
- private final List chainedFactories;
-
- /**
- * Constructs a factory for a chain of filters.
- *
- * @param factories The array of factories that will be used to construct a chain of filters.
- */
- public ChainFilterFactory(FilterFactory... factories) {
- this.chainedFactories = ImmutableList.copyOf(factories);
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public Filter getInstance(HiveConf conf) {
- List filters = new ArrayList<>();
- for (FilterFactory factory : chainedFactories) {
- Filter filter = factory.getInstance(conf);
- if (filter != null) {
- filters.add(filter);
- }
- }
-
- return filters.isEmpty() ? null : new ChainFilter(ImmutableList.copyOf(filters));
- }
-
- private static final class ChainFilter implements Filter {
-
- private final List chainedFilters;
-
- public ChainFilter(List chainedFilters) {
- this.chainedFilters = chainedFilters;
- }
-
- @Override
- public void apply(DirSearch client, String user) throws AuthenticationException {
- for (Filter filter : chainedFilters) {
- filter.apply(client, user);
- }
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/CustomQueryFilterFactory.java service/src/java/org/apache/hive/service/auth/ldap/CustomQueryFilterFactory.java
deleted file mode 100644
index 30ce1a6..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/CustomQueryFilterFactory.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import com.google.common.base.Strings;
-import java.util.List;
-import javax.naming.NamingException;
-import javax.security.sasl.AuthenticationException;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * A factory for a {@link Filter} based on a custom query.
- *
- * The produced filter object filters out all users that are not found in the search result
- * of the query provided in Hive configuration.
- * @see org.apache.hadoop.hive.conf.HiveConf.ConfVars#HIVE_SERVER2_PLAIN_LDAP_CUSTOMLDAPQUERY
- */
-public class CustomQueryFilterFactory implements FilterFactory {
-
- /**
- * {@inheritDoc}
- */
- @Override
- public Filter getInstance(HiveConf conf) {
- String customQuery = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_CUSTOMLDAPQUERY);
-
- if (Strings.isNullOrEmpty(customQuery)) {
- return null;
- }
-
- return new CustomQueryFilter(customQuery);
- }
-
- private static final class CustomQueryFilter implements Filter {
-
- private static final Logger LOG = LoggerFactory.getLogger(CustomQueryFilter.class);
-
- private final String query;
-
- public CustomQueryFilter(String query) {
- this.query = query;
- }
-
- @Override
- public void apply(DirSearch client, String user) throws AuthenticationException {
- List resultList;
- try {
- resultList = client.executeCustomQuery(query);
- } catch (NamingException e) {
- throw new AuthenticationException("LDAP Authentication failed for user", e);
- }
- if (resultList != null) {
- for (String matchedDn : resultList) {
- String shortUserName = LdapUtils.getShortName(matchedDn);
- LOG.info("");
- if (shortUserName.equalsIgnoreCase(user) || matchedDn.equalsIgnoreCase(user)) {
- LOG.info("Authentication succeeded based on result set from LDAP query");
- return;
- }
- }
- }
- LOG.info("Authentication failed based on result set from custom LDAP query");
- throw new AuthenticationException("Authentication failed: LDAP query "
- + "from property returned no data");
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/DirSearch.java service/src/java/org/apache/hive/service/auth/ldap/DirSearch.java
deleted file mode 100644
index da8cc66..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/DirSearch.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import java.io.Closeable;
-import java.util.List;
-import javax.naming.NamingException;
-
-/**
- * The object used for executing queries on the Directory Service.
- */
-public interface DirSearch extends Closeable {
-
- /**
- * Finds user's distinguished name.
- * @param user username
- * @return DN for the specified username
- * @throws NamingException
- */
- String findUserDn(String user) throws NamingException;
-
- /**
- * Finds group's distinguished name.
- * @param group group name or unique identifier
- * @return DN for the specified group name
- * @throws NamingException
- */
- String findGroupDn(String group) throws NamingException;
-
- /**
- * Verifies that specified user is a member of specified group.
- * @param user user id or distinguished name
- * @param groupDn group's DN
- * @return {@code true} if the user is a member of the group, {@code false} - otherwise.
- * @throws NamingException
- */
- boolean isUserMemberOfGroup(String user, String groupDn) throws NamingException;
-
- /**
- * Finds groups that contain the specified user.
- * @param userDn user's distinguished name
- * @return list of groups
- * @throws NamingException
- */
- List findGroupsForUser(String userDn) throws NamingException;
-
- /**
- * Executes an arbitrary query.
- * @param query any query
- * @return list of names in the namespace
- * @throws NamingException
- */
- List executeCustomQuery(String query) throws NamingException;
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/DirSearchFactory.java service/src/java/org/apache/hive/service/auth/ldap/DirSearchFactory.java
deleted file mode 100644
index 43e3ee0..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/DirSearchFactory.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import javax.security.sasl.AuthenticationException;
-import org.apache.hadoop.hive.conf.HiveConf;
-
-/**
- * A factory for {@code DirSearch}.
- */
-public interface DirSearchFactory {
-
- /**
- * Returns an instance of {@code DirSearch}.
- * @param conf Hive configuration
- * @param user username
- * @param password user password
- * @return instance of {@code DirSearch}
- * @throws AuthenticationException
- */
- DirSearch getInstance(HiveConf conf, String user, String password) throws AuthenticationException;
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/Filter.java service/src/java/org/apache/hive/service/auth/ldap/Filter.java
deleted file mode 100644
index 96ea3fd..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/Filter.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import javax.security.sasl.AuthenticationException;
-
-/**
- * The object that filters LDAP users.
- *
- * The assumption is that this user was already authenticated by a previous bind operation.
- */
-public interface Filter {
-
- /**
- * Applies this filter to the authenticated user.
- * @param client LDAP client that will be used for execution of LDAP queries.
- * @param user username
- * @throws AuthenticationException
- */
- void apply(DirSearch client, String user) throws AuthenticationException;
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/FilterFactory.java service/src/java/org/apache/hive/service/auth/ldap/FilterFactory.java
deleted file mode 100644
index aac5c76..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/FilterFactory.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-
-/**
- * Factory for the filter.
- */
-public interface FilterFactory {
-
- /**
- * Returns an instance of the corresponding filter.
- * @param conf Hive properties used to configure the filter.
- * @return the filter or {@code null} if this filter doesn't support provided set of properties
- */
- Filter getInstance(HiveConf conf);
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/GroupFilterFactory.java service/src/java/org/apache/hive/service/auth/ldap/GroupFilterFactory.java
deleted file mode 100644
index 5470ad7..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/GroupFilterFactory.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Joiner;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import javax.naming.NamingException;
-import javax.security.sasl.AuthenticationException;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * A factory for a {@link Filter} based on a list of allowed groups.
- *
- * The produced filter object filters out all users that are not members of at least one of
- * the groups provided in Hive configuration.
- * @see HiveConf.ConfVars#HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER
- */
-public final class GroupFilterFactory implements FilterFactory {
-
- /**
- * {@inheritDoc}
- */
- @Override
- public Filter getInstance(HiveConf conf) {
- Collection groupFilter = conf.getStringCollection(
- HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER.varname);
-
- if (groupFilter.isEmpty()) {
- return null;
- }
-
- if (conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERMEMBERSHIP_KEY) == null) {
- return new GroupMembershipKeyFilter(groupFilter);
- } else {
- return new UserMembershipKeyFilter(groupFilter);
- }
- }
-
- @VisibleForTesting
- static final class GroupMembershipKeyFilter implements Filter {
-
- private static final Logger LOG = LoggerFactory.getLogger(GroupMembershipKeyFilter.class);
-
- private final Set groupFilter = new HashSet<>();
-
- GroupMembershipKeyFilter(Collection groupFilter) {
- this.groupFilter.addAll(groupFilter);
- }
-
- @Override
- public void apply(DirSearch ldap, String user) throws AuthenticationException {
- LOG.info("Authenticating user '{}' using {}", user,
- GroupMembershipKeyFilter.class.getSimpleName());
-
- List memberOf = null;
-
- try {
- String userDn = ldap.findUserDn(user);
- memberOf = ldap.findGroupsForUser(userDn);
- LOG.debug("User {} member of : {}", userDn, memberOf);
- } catch (NamingException e) {
- throw new AuthenticationException("LDAP Authentication failed for user", e);
- }
-
- for (String groupDn : memberOf) {
- String shortName = LdapUtils.getShortName(groupDn);
- if (groupFilter.stream().anyMatch(shortName::equalsIgnoreCase)) {
- LOG.debug("GroupMembershipKeyFilter passes: user '{}' is a member of '{}' group",
- user, groupDn);
- LOG.info("Authentication succeeded based on group membership");
- return;
- }
- }
- LOG.info("Authentication failed based on user membership");
- throw new AuthenticationException("Authentication failed: "
- + "User not a member of specified list");
- }
- }
-
- @VisibleForTesting
- static final class UserMembershipKeyFilter implements Filter {
-
- private static final Logger LOG = LoggerFactory.getLogger(UserMembershipKeyFilter.class);
-
- private final Collection groupFilter;
-
- UserMembershipKeyFilter(Collection groupFilter) {
- this.groupFilter = groupFilter;
- }
-
- @Override
- public void apply(DirSearch ldap, String user) throws AuthenticationException {
- LOG.info("Authenticating user '{}' using {}", user,
- UserMembershipKeyFilter.class.getSimpleName());
-
- List groupDns = new ArrayList<>();
- for (String groupId : groupFilter) {
- try {
- String groupDn = ldap.findGroupDn(groupId);
- groupDns.add(groupDn);
- } catch (NamingException e) {
- LOG.warn("Cannot find DN for group", e);
- LOG.debug("Cannot find DN for group " + groupId, e);
- }
- }
-
- if (groupDns.isEmpty()) {
- String msg = String.format("No DN(s) has been found for any of group(s): %s",
- Joiner.on(',').join(groupFilter));
- LOG.debug(msg);
- throw new AuthenticationException("No DN(s) has been found for any of specified group(s)");
- }
-
- for (String groupDn : groupDns) {
- try {
- if (ldap.isUserMemberOfGroup(user, groupDn)) {
- LOG.debug("UserMembershipKeyFilter passes: user '{}' is a member of '{}' group",
- user, groupDn);
- LOG.info("Authentication succeeded based on user membership");
- return;
- }
- } catch (NamingException e) {
- LOG.warn("Cannot match user and group", e);
- if (LOG.isDebugEnabled()) {
- String msg = String.format("Cannot match user '%s' and group '%s'", user, groupDn);
- LOG.debug(msg, e);
- }
- }
- }
- throw new AuthenticationException(String.format(
- "Authentication failed: User '%s' is not a member of listed groups", user));
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/LdapSearch.java service/src/java/org/apache/hive/service/auth/ldap/LdapSearch.java
deleted file mode 100644
index 9fe4a7c..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/LdapSearch.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import javax.naming.NamingEnumeration;
-import javax.naming.NamingException;
-import javax.naming.directory.DirContext;
-import javax.naming.directory.SearchResult;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Implements search for LDAP.
- */
-public final class LdapSearch implements DirSearch {
-
- private static final Logger LOG = LoggerFactory.getLogger(LdapSearch.class);
-
- private final String baseDn;
- private final List groupBases;
- private final List userBases;
- private final List userPatterns;
-
- private final QueryFactory queries;
-
- private final DirContext ctx;
-
- /**
- * Construct an instance of {@code LdapSearch}.
- * @param conf Hive configuration
- * @param ctx Directory service that will be used for the queries.
- * @throws NamingException
- */
- public LdapSearch(HiveConf conf, DirContext ctx) throws NamingException {
- baseDn = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN);
- userPatterns = LdapUtils.parseDnPatterns(conf,
- HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERDNPATTERN);
- groupBases = LdapUtils.patternsToBaseDns(LdapUtils.parseDnPatterns(conf,
- HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPDNPATTERN));
- userBases = LdapUtils.patternsToBaseDns(userPatterns);
- this.ctx = ctx;
- queries = new QueryFactory(conf);
- }
-
- /**
- * Closes this search object and releases any system resources associated
- * with it. If the search object is already closed then invoking this
- * method has no effect.
- */
- @Override
- public void close() {
- try {
- ctx.close();
- } catch (NamingException e) {
- LOG.warn("Exception when closing LDAP context:", e);
- }
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public String findUserDn(String user) throws NamingException {
- List allLdapNames;
- if (LdapUtils.isDn(user)) {
- String userBaseDn = LdapUtils.extractBaseDn(user);
- String userRdn = LdapUtils.extractFirstRdn(user);
- allLdapNames = execute(Collections.singletonList(userBaseDn),
- queries.findUserDnByRdn(userRdn)).getAllLdapNames();
- } else {
- allLdapNames = findDnByPattern(userPatterns, user);
- if (allLdapNames.isEmpty()) {
- allLdapNames = execute(userBases, queries.findUserDnByName(user)).getAllLdapNames();
- }
- }
-
- if (allLdapNames.size() == 1) {
- return allLdapNames.get(0);
- } else {
- LOG.info("Expected exactly one user result for the user: {}, but got {}. Returning null",
- user, allLdapNames.size());
- LOG.debug("Matched users: {}", allLdapNames);
- return null;
- }
- }
-
- private List findDnByPattern(List patterns, String name) throws NamingException {
- for (String pattern : patterns) {
- String baseDnFromPattern = LdapUtils.extractBaseDn(pattern);
- String rdn = LdapUtils.extractFirstRdn(pattern).replaceAll("%s", name);
- List list = execute(Collections.singletonList(baseDnFromPattern),
- queries.findDnByPattern(rdn)).getAllLdapNames();
- if (!list.isEmpty()) {
- return list;
- }
- }
- return Collections.emptyList();
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public String findGroupDn(String group) throws NamingException {
- return execute(groupBases, queries.findGroupDnById(group)).getSingleLdapName();
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public boolean isUserMemberOfGroup(String user, String groupDn) throws NamingException {
- String userId = LdapUtils.extractUserName(user);
- return execute(userBases, queries.isUserMemberOfGroup(userId, groupDn)).hasSingleResult();
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public List findGroupsForUser(String userDn) throws NamingException {
- String userName = LdapUtils.extractUserName(userDn);
- return execute(groupBases, queries.findGroupsForUser(userName, userDn)).getAllLdapNames();
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public List executeCustomQuery(String query) throws NamingException {
- return execute(Collections.singletonList(baseDn), queries.customQuery(query))
- .getAllLdapNamesAndAttributes();
- }
-
- private SearchResultHandler execute(Collection baseDns, Query query) {
- List> searchResults = new ArrayList<>();
- LOG.debug("Executing a query: '{}' with base DNs {}.", query.getFilter(), baseDns);
- for (String aBaseDn : baseDns) {
- try {
- NamingEnumeration searchResult = ctx.search(aBaseDn, query.getFilter(),
- query.getControls());
- if (searchResult != null) {
- searchResults.add(searchResult);
- }
- } catch (NamingException ex) {
- LOG.debug("Exception happened for query '" + query.getFilter() +
- "' with base DN '" + aBaseDn + "'", ex);
- }
- }
- return new SearchResultHandler(searchResults);
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/LdapSearchFactory.java service/src/java/org/apache/hive/service/auth/ldap/LdapSearchFactory.java
deleted file mode 100644
index c8ae6e1..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/LdapSearchFactory.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import java.util.Hashtable;
-import javax.naming.Context;
-import javax.naming.NamingException;
-import javax.naming.directory.DirContext;
-import javax.naming.directory.InitialDirContext;
-import javax.security.sasl.AuthenticationException;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * A factory for LDAP search objects.
- */
-public final class LdapSearchFactory implements DirSearchFactory {
-
- private static final Logger LOG = LoggerFactory.getLogger(LdapSearchFactory.class);
-
- /**
- * {@inheritDoc}
- */
- @Override
- public DirSearch getInstance(HiveConf conf, String principal, String password)
- throws AuthenticationException {
- try {
- DirContext ctx = createDirContext(conf, principal, password);
- return new LdapSearch(conf, ctx);
- } catch (NamingException e) {
- LOG.debug("Could not connect to the LDAP Server:Authentication failed for {}", principal);
- throw new AuthenticationException("Error validating LDAP user", e);
- }
- }
-
- private static DirContext createDirContext(HiveConf conf, String principal, String password)
- throws NamingException {
- Hashtable env = new Hashtable();
- String ldapUrl = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL);
- env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
- env.put(Context.PROVIDER_URL, ldapUrl);
- env.put(Context.SECURITY_AUTHENTICATION, "simple");
- env.put(Context.SECURITY_CREDENTIALS, password);
- env.put(Context.SECURITY_PRINCIPAL, principal);
- LOG.debug("Connecting using principal {} to ldap url {}", principal, ldapUrl);
- return new InitialDirContext(env);
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/LdapUtils.java service/src/java/org/apache/hive/service/auth/ldap/LdapUtils.java
deleted file mode 100644
index da14657..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/LdapUtils.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.service.ServiceUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Static utility methods related to LDAP authentication module.
- */
-public final class LdapUtils {
-
- private static final Logger LOG = LoggerFactory.getLogger(LdapUtils.class);
-
- /**
- * Extracts a base DN from the provided distinguished name.
- *
- * Example:
- *
- * "ou=CORP,dc=mycompany,dc=com" is the base DN for "cn=user1,ou=CORP,dc=mycompany,dc=com"
- *
- * @param dn distinguished name
- * @return base DN
- */
- public static String extractBaseDn(String dn) {
- final int indexOfFirstDelimiter = dn.indexOf(",");
- if (indexOfFirstDelimiter > -1) {
- return dn.substring(indexOfFirstDelimiter + 1);
- }
- return null;
- }
-
- /**
- * Extracts the first Relative Distinguished Name (RDN).
- *
- * Example:
- *
- * For DN "cn=user1,ou=CORP,dc=mycompany,dc=com" this method will return "cn=user1"
- * @param dn distinguished name
- * @return first RDN
- */
- public static String extractFirstRdn(String dn) {
- return dn.substring(0, dn.indexOf(","));
- }
-
- /**
- * Extracts username from user DN.
- *
- * Examples:
- *
- * @param name name to be checked
- * @return true if the provided name is a distinguished name
- */
- public static boolean isDn(String name) {
- return name.contains("=");
- }
-
- /**
- * Reads and parses DN patterns from Hive configuration.
- *
- * If no patterns are provided in the configuration, then the base DN will be used.
- * @param conf Hive configuration
- * @param var variable to be read
- * @return a list of DN patterns
- * @see HiveConf.ConfVars#HIVE_SERVER2_PLAIN_LDAP_BASEDN
- * @see HiveConf.ConfVars#HIVE_SERVER2_PLAIN_LDAP_GUIDKEY
- * @see HiveConf.ConfVars#HIVE_SERVER2_PLAIN_LDAP_GROUPDNPATTERN
- * @see HiveConf.ConfVars#HIVE_SERVER2_PLAIN_LDAP_USERDNPATTERN
- */
- public static List parseDnPatterns(HiveConf conf, HiveConf.ConfVars var) {
- String patternsString = conf.getVar(var);
- List result = new ArrayList<>();
- if (StringUtils.isBlank(patternsString)) {
- String defaultBaseDn = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN);
- String guidAttr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GUIDKEY);
- if (StringUtils.isNotBlank(defaultBaseDn)) {
- result.add(guidAttr + "=%s," + defaultBaseDn);
- }
- } else {
- String[] patterns = patternsString.split(":");
- for (String pattern : patterns) {
- if (pattern.contains(",") && pattern.contains("=")) {
- result.add(pattern);
- } else {
- LOG.warn("Unexpected format for " + var + "..ignoring " + pattern);
- }
- }
- }
- return result;
- }
-
- private static String patternToBaseDn(String pattern) {
- if (pattern.contains("=%s")) {
- return pattern.split(",", 2)[1];
- }
- return pattern;
- }
-
- /**
- * Converts a collection of Distinguished Name patterns to a collection of base DNs.
- * @param patterns Distinguished Name patterns
- * @return a list of base DNs
- * @see HiveConf.ConfVars#HIVE_SERVER2_PLAIN_LDAP_GROUPDNPATTERN
- * @see HiveConf.ConfVars#HIVE_SERVER2_PLAIN_LDAP_USERDNPATTERN
- */
- public static List patternsToBaseDns(Collection patterns) {
- List result = new ArrayList<>();
- for (String pattern : patterns) {
- result.add(patternToBaseDn(pattern));
- }
- return result;
- }
-
- /**
- * Creates a list of principals to be used for user authentication.
- * @param conf Hive configuration
- * @param user username
- * @return a list of user's principals
- */
- public static List createCandidatePrincipals(HiveConf conf, String user) {
- if (hasDomain(user) || isDn(user)) {
- return Collections.singletonList(user);
- }
-
- String ldapDomain = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN);
- if (StringUtils.isNotBlank(ldapDomain)) {
- return Collections.singletonList(user + "@" + ldapDomain);
- }
-
- List userPatterns = parseDnPatterns(conf,
- HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERDNPATTERN);
- if (userPatterns.isEmpty()) {
- return Collections.singletonList(user);
- }
-
- List candidatePrincipals = new ArrayList<>();
- for (String userPattern : userPatterns) {
- candidatePrincipals.add(userPattern.replaceAll("%s", user));
- }
- return candidatePrincipals;
- }
-
- private LdapUtils() {
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/Query.java service/src/java/org/apache/hive/service/auth/ldap/Query.java
deleted file mode 100644
index 85ffe44..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/Query.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import com.google.common.base.Preconditions;
-import java.util.ArrayList;
-import java.util.List;
-import javax.naming.directory.SearchControls;
-import org.stringtemplate.v4.ST;
-
-/**
- * The object that encompasses all components of a Directory Service search query.
- *
- * @see LdapSearch
- */
-public final class Query {
-
- private final String filter;
- private final SearchControls controls;
-
- /**
- * Constructs an instance of Directory Service search query.
- * @param filter search filter
- * @param controls search controls
- */
- public Query(String filter, SearchControls controls) {
- this.filter = filter;
- this.controls = controls;
- }
-
- /**
- * Returns search filter.
- * @return search filter
- */
- public String getFilter() {
- return filter;
- }
-
- /**
- * Returns search controls.
- * @return search controls
- */
- public SearchControls getControls() {
- return controls;
- }
-
- /**
- * Creates Query Builder.
- * @return query builder.
- */
- public static QueryBuilder builder() {
- return new QueryBuilder();
- }
-
- /**
- * A builder of the {@link Query}.
- */
- public static final class QueryBuilder {
-
- private ST filterTemplate;
- private final SearchControls controls = new SearchControls();
- private final List returningAttributes = new ArrayList<>();
-
- private QueryBuilder() {
- controls.setSearchScope(SearchControls.SUBTREE_SCOPE);
- controls.setReturningAttributes(new String[0]);
- }
-
- /**
- * Sets search filter template.
- * @param filterTemplate search filter template
- * @return the current instance of the builder
- */
- public QueryBuilder filter(String filterTemplate) {
- this.filterTemplate = new ST(filterTemplate);
- return this;
- }
-
- /**
- * Sets mapping between names in the search filter template and actual values.
- * @param key marker in the search filter template.
- * @param value actual value
- * @return the current instance of the builder
- */
- public QueryBuilder map(String key, String value) {
- filterTemplate.add(key, value);
- return this;
- }
-
- /**
- * Sets mapping between names in the search filter template and actual values.
- * @param key marker in the search filter template.
- * @param values array of values
- * @return the current instance of the builder
- */
- public QueryBuilder map(String key, String[] values) {
- filterTemplate.add(key, values);
- return this;
- }
-
- /**
- * Sets attribute that should be returned in results for the query.
- * @param attributeName attribute name
- * @return the current instance of the builder
- */
- public QueryBuilder returnAttribute(String attributeName) {
- returningAttributes.add(attributeName);
- return this;
- }
-
- /**
- * Sets the maximum number of entries to be returned as a result of the search.
- *
- * 0 indicates no limit: all entries will be returned.
- * @param limit The maximum number of entries that will be returned.
- * @return the current instance of the builder
- */
- public QueryBuilder limit(int limit) {
- controls.setCountLimit(limit);
- return this;
- }
-
- private void validate() {
- Preconditions.checkArgument(filterTemplate != null,
- "filter is required for LDAP search query");
- }
-
- private String createFilter() {
- return filterTemplate.render();
- }
-
- private void updateControls() {
- if (!returningAttributes.isEmpty()) {
- controls.setReturningAttributes(returningAttributes
- .toArray(new String[returningAttributes.size()]));
- }
- }
-
- /**
- * Builds an instance of {@link Query}.
- * @return configured directory service query
- */
- public Query build() {
- validate();
- String filter = createFilter();
- updateControls();
- return new Query(filter, controls);
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/QueryFactory.java service/src/java/org/apache/hive/service/auth/ldap/QueryFactory.java
deleted file mode 100644
index e06f112..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/QueryFactory.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-import org.apache.hadoop.hive.conf.HiveConf;
-
-/**
- * A factory for common types of directory service search queries.
- */
-final class QueryFactory {
-
- private static final String[] USER_OBJECT_CLASSES = {"person", "user", "inetOrgPerson"};
-
- private final String guidAttr;
- private final String groupClassAttr;
- private final String groupMembershipAttr;
- private final String userMembershipAttr;
-
- /**
- * Constructs the factory based on provided Hive configuration.
- * @param conf Hive configuration
- */
- public QueryFactory(HiveConf conf) {
- guidAttr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GUIDKEY);
- groupClassAttr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPCLASS_KEY);
- groupMembershipAttr = conf.getVar(
- HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPMEMBERSHIP_KEY);
- userMembershipAttr = conf.getVar(
- HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERMEMBERSHIP_KEY);
- }
-
- /**
- * Returns a query for finding Group DN based on group unique ID.
- * @param groupId group unique identifier
- * @return an instance of {@link Query}
- */
- public Query findGroupDnById(String groupId) {
- return Query.builder()
- .filter("(&(objectClass=)(=))")
- .map("guidAttr", guidAttr)
- .map("groupClassAttr", groupClassAttr)
- .map("groupID", groupId)
- .limit(2)
- .build();
- }
-
- /**
- * Returns a query for finding user DN based on user RDN.
- * @param userRdn user RDN
- * @return an instance of {@link Query}
- */
- public Query findUserDnByRdn(String userRdn) {
- return Query.builder()
- .filter("(&(|)}>)"
- + "())")
- .limit(2)
- .map("classes", USER_OBJECT_CLASSES)
- .map("userRdn", userRdn)
- .build();
- }
-
- /**
- * Returns a query for finding user DN based on DN pattern.
- *
- * Name of this method was derived from the original implementation of LDAP authentication.
- * This method should be replaced by {@link QueryFactory#findUserDnByRdn(java.lang.String).
- *
- * @param rdn user RDN
- * @return an instance of {@link Query}
- */
- public Query findDnByPattern(String rdn) {
- return Query.builder()
- .filter("()")
- .map("rdn", rdn)
- .limit(2)
- .build();
- }
-
- /**
- * Returns a query for finding user DN based on user unique name.
- * @param userName user unique name (uid or sAMAccountName)
- * @return an instance of {@link Query}
- */
- public Query findUserDnByName(String userName) {
- return Query.builder()
- .filter("(&(|)}>)"
- + "(|(uid=)(sAMAccountName=)))")
- .map("classes", USER_OBJECT_CLASSES)
- .map("userName", userName)
- .limit(2)
- .build();
- }
-
- /**
- * Returns a query for finding groups to which the user belongs.
- * @param userName username
- * @param userDn user DN
- * @return an instance of {@link Query}
- */
- public Query findGroupsForUser(String userName, String userDn) {
- return Query.builder()
- .filter("(&(objectClass=)(|(=)"
- + "(=)))")
- .map("groupClassAttr", groupClassAttr)
- .map("groupMembershipAttr", groupMembershipAttr)
- .map("userName", userName)
- .map("userDn", userDn)
- .build();
- }
-
- /**
- * Returns a query for checking whether specified user is a member of specified group.
- *
- * The query requires {@value HiveConf#HIVE_SERVER2_AUTHENTICATION_LDAP_USERMEMBERSHIPKEY_NAME}
- * Hive configuration property to be set.
- *
- * @param userId user unique identifier
- * @param groupDn group DN
- * @return an instance of {@link Query}
- * @see HiveConf.ConfVars#HIVE_SERVER2_PLAIN_LDAP_USERMEMBERSHIP_KEY
- * @throws NullPointerException when
- * {@value HiveConf#HIVE_SERVER2_AUTHENTICATION_LDAP_USERMEMBERSHIPKEY_NAME} is not set.
- */
- public Query isUserMemberOfGroup(String userId, String groupDn) {
- Preconditions.checkState(!Strings.isNullOrEmpty(userMembershipAttr),
- "hive.server2.authentication.ldap.userMembershipKey is not configured.");
- return Query.builder()
- .filter("(&(|)}>)" +
- "(=)(=))")
- .map("classes", USER_OBJECT_CLASSES)
- .map("guidAttr", guidAttr)
- .map("userMembershipAttr", userMembershipAttr)
- .map("userId", userId)
- .map("groupDn", groupDn)
- .limit(2)
- .build();
- }
-
- /**
- * Returns a query object created for the custom filter.
- *
- * This query is configured to return a group membership attribute as part of the search result.
- * @param searchFilter custom search filter
- * @return an instance of {@link Query}
- */
- public Query customQuery(String searchFilter) {
- Query.QueryBuilder builder = Query.builder();
- builder.filter(searchFilter);
- if (!Strings.isNullOrEmpty(groupMembershipAttr)) {
- builder.returnAttribute(groupMembershipAttr);
- }
- return builder.build();
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/SearchResultHandler.java service/src/java/org/apache/hive/service/auth/ldap/SearchResultHandler.java
deleted file mode 100644
index 5f0f3b6..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/SearchResultHandler.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import javax.naming.NamingEnumeration;
-import javax.naming.NamingException;
-import javax.naming.directory.Attribute;
-import javax.naming.directory.SearchResult;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * The object that handles Directory Service search results.
- * In most cases it converts search results into a list of names in the namespace.
- */
-public final class SearchResultHandler {
-
- private static final Logger LOG = LoggerFactory.getLogger(SearchResultHandler.class);
-
- private final Collection> searchResults;
-
- /**
- * Constructs a search result handler object for the provided search results.
- * @param searchResults directory service search results
- */
- public SearchResultHandler(Collection> searchResults) {
- this.searchResults = searchResults;
- }
-
- /**
- * Returns all entries from the search result.
- * @return a list of names in the namespace
- * @throws NamingException
- */
- public List getAllLdapNames() throws NamingException {
- final List result = new ArrayList<>();
- handle(new RecordProcessor() {
- @Override
- public boolean process(SearchResult record) throws NamingException {
- result.add(record.getNameInNamespace());
- return true;
- }
- });
- return result;
- }
-
- /**
- * Checks whether search result contains exactly one entry.
- * @return true if the search result contains a single entry.
- * @throws NamingException
- */
- public boolean hasSingleResult() throws NamingException {
- List allResults = getAllLdapNames();
- return allResults != null && allResults.size() == 1;
- }
-
- /**
- * Returns a single entry from the search result.
- * Throws {@code NamingException} if the search result doesn't contain exactly one entry.
- * @return name in the namespace
- * @throws NamingException
- */
- public String getSingleLdapName() throws NamingException {
- List allLdapNames = getAllLdapNames();
- if (allLdapNames.size() == 1) {
- return allLdapNames.get(0);
- }
- throw new NamingException("Single result was expected");
- }
-
- /**
- * Returns all entries and all attributes for these entries.
- * @return a list that includes all entries and all attributes from these entries.
- * @throws NamingException
- */
- public List getAllLdapNamesAndAttributes() throws NamingException {
- final List result = new ArrayList<>();
- handle(new RecordProcessor() {
- @Override
- public boolean process(SearchResult record) throws NamingException {
- result.add(record.getNameInNamespace());
- NamingEnumeration extends Attribute> allAttributes = record.getAttributes().getAll();
- while(allAttributes.hasMore()) {
- Attribute attribute = allAttributes.next();
- addAllAttributeValuesToResult(attribute.getAll());
- }
- return true;
- }
-
- private void addAllAttributeValuesToResult(NamingEnumeration values) throws NamingException {
- while(values.hasMore()) {
- result.add(String.valueOf(values.next()));
- }
- }
-
- });
- return result;
- }
-
- /**
- * Allows for custom processing of the search results.
- * @param processor {@link RecordProcessor} implementation
- * @throws NamingException
- */
- public void handle(RecordProcessor processor) throws NamingException {
- try {
- for (NamingEnumeration searchResult : searchResults) {
- while (searchResult.hasMore()) {
- if (!processor.process(searchResult.next())) {
- return;
- }
- }
- }
- } finally {
- for (NamingEnumeration searchResult : searchResults) {
- try {
- searchResult.close();
- } catch (NamingException ex) {
- LOG.warn("Failed to close LDAP search result", ex);
- }
- }
- }
- }
-
- /**
- * An interface used by {@link SearchResultHandler} for processing records of
- * a {@link SearchResult} on a per-record basis.
- *
- * Implementations of this interface perform the actual work of processing each record,
- * but don't need to worry about exception handling, closing underlying data structures,
- * and combining results from several search requests.
- * @see SearchResultHandler
- */
- public interface RecordProcessor {
-
- /**
- * Implementations must implement this method to process each record in {@link SearchResult}.
- * @param record the {@code SearchResult} to precess
- * @return {@code true} to continue processing, {@code false} to stop iterating
- * over search results
- * @throws NamingException
- */
- boolean process(SearchResult record) throws NamingException;
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/UserFilterFactory.java service/src/java/org/apache/hive/service/auth/ldap/UserFilterFactory.java
deleted file mode 100644
index c3bcfd9..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/UserFilterFactory.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.Set;
-import javax.security.sasl.AuthenticationException;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * A factory for a {@link Filter} based on a list of allowed users.
- *
- * The produced filter object filters out all users that are not on the provided in
- * Hive configuration list.
- * @see HiveConf.ConfVars#HIVE_SERVER2_PLAIN_LDAP_USERFILTER
- */
-public final class UserFilterFactory implements FilterFactory {
-
- /**
- * {@inheritDoc}
- */
- @Override
- public Filter getInstance(HiveConf conf) {
- Collection userFilter = conf.getStringCollection(
- HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERFILTER.varname);
-
- if (userFilter.isEmpty()) {
- return null;
- }
-
- return new UserFilter(userFilter);
- }
-
- private static final class UserFilter implements Filter {
-
- private static final Logger LOG = LoggerFactory.getLogger(UserFilter.class);
-
- private final Set userFilter = new HashSet<>();
-
- UserFilter(Collection userFilter) {
- for (String userFilterItem : userFilter) {
- this.userFilter.add(userFilterItem.toLowerCase());
- }
- }
-
- @Override
- public void apply(DirSearch ldap, String user) throws AuthenticationException {
- LOG.info("Authenticating user '{}' using user filter", user);
- String userName = LdapUtils.extractUserName(user).toLowerCase();
- if (!userFilter.contains(userName)) {
- LOG.info("Authentication failed based on user membership");
- throw new AuthenticationException("Authentication failed: "
- + "User not a member of specified list");
- }
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/auth/ldap/UserSearchFilterFactory.java service/src/java/org/apache/hive/service/auth/ldap/UserSearchFilterFactory.java
deleted file mode 100644
index 9878121..0000000
--- service/src/java/org/apache/hive/service/auth/ldap/UserSearchFilterFactory.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth.ldap;
-
-import java.util.Collection;
-import javax.naming.NamingException;
-import javax.security.sasl.AuthenticationException;
-import org.apache.hadoop.hive.conf.HiveConf;
-
-/**
- * A factory for a {@link Filter} that check whether provided user could be found in the directory.
- *
- * The produced filter object filters out all users that are not found in the directory.
- */
-public final class UserSearchFilterFactory implements FilterFactory {
-
- /**
- * {@inheritDoc}
- */
- @Override
- public Filter getInstance(HiveConf conf) {
- Collection groupFilter = conf.getStringCollection(
- HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER.varname);
- Collection userFilter = conf.getStringCollection(
- HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERFILTER.varname);
-
- if (groupFilter.isEmpty() && userFilter.isEmpty()) {
- return null;
- }
-
- return new UserSearchFilter();
- }
-
- private static final class UserSearchFilter implements Filter {
- @Override
- public void apply(DirSearch client, String user) throws AuthenticationException {
- try {
- String userDn = client.findUserDn(user);
-
- // This should not be null because we were allowed to bind with this username
- // safe check in case we were able to bind anonymously.
- if (userDn == null) {
- throw new AuthenticationException("Authentication failed: User search failed");
- }
- } catch (NamingException e) {
- throw new AuthenticationException("LDAP Authentication failed for user", e);
- }
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/cli/ColumnBasedSet.java service/src/java/org/apache/hive/service/cli/ColumnBasedSet.java
deleted file mode 100644
index 7bdf765..0000000
--- service/src/java/org/apache/hive/service/cli/ColumnBasedSet.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import java.io.ByteArrayInputStream;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.serde2.thrift.ColumnBuffer;
-import org.apache.hadoop.hive.serde2.thrift.Type;
-import org.apache.hive.service.rpc.thrift.TColumn;
-import org.apache.hive.service.rpc.thrift.TRow;
-import org.apache.hive.service.rpc.thrift.TRowSet;
-import org.apache.thrift.TException;
-import org.apache.thrift.protocol.TCompactProtocol;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.transport.TIOStreamTransport;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-/**
- * ColumnBasedSet.
- */
-public class ColumnBasedSet implements RowSet {
-
- private long startOffset;
-
- private final TypeDescriptor[] descriptors; // non-null only for writing (server-side)
- private final List columns;
- private byte[] blob;
- private boolean isBlobBased = false;
- public static final Logger LOG = LoggerFactory.getLogger(ColumnBasedSet.class);
-
- public ColumnBasedSet(TableSchema schema) {
- descriptors = schema.toTypeDescriptors();
- columns = new ArrayList();
- for (ColumnDescriptor colDesc : schema.getColumnDescriptors()) {
- columns.add(new ColumnBuffer(colDesc.getType()));
- }
- }
-
- public ColumnBasedSet(TRowSet tRowSet) throws TException {
- descriptors = null;
- columns = new ArrayList();
- // Use TCompactProtocol to read serialized TColumns
- if (tRowSet.isSetBinaryColumns()) {
- TProtocol protocol =
- new TCompactProtocol(new TIOStreamTransport(new ByteArrayInputStream(
- tRowSet.getBinaryColumns())));
- // Read from the stream using the protocol for each column in final schema
- for (int i = 0; i < tRowSet.getColumnCount(); i++) {
- TColumn tvalue = new TColumn();
- try {
- tvalue.read(protocol);
- } catch (TException e) {
- LOG.error(e.getMessage(), e);
- throw new TException("Error reading column value from the row set blob", e);
- }
- columns.add(new ColumnBuffer(tvalue));
- }
- }
- else {
- if (tRowSet.getColumns() != null) {
- for (TColumn tvalue : tRowSet.getColumns()) {
- columns.add(new ColumnBuffer(tvalue));
- }
- }
- }
- startOffset = tRowSet.getStartRowOffset();
- }
-
- private ColumnBasedSet(TypeDescriptor[] descriptors, List columns, long startOffset) {
- this.descriptors = descriptors;
- this.columns = columns;
- this.startOffset = startOffset;
- }
-
- public ColumnBasedSet(TableSchema schema, boolean isBlobBased) {
- this(schema);
- this.isBlobBased = isBlobBased;
- }
-
- @Override
- public ColumnBasedSet addRow(Object[] fields) {
- if (isBlobBased) {
- this.blob = (byte[]) fields[0];
- } else {
- for (int i = 0; i < fields.length; i++) {
- TypeDescriptor descriptor = descriptors[i];
- Object field = fields[i];
- if (field != null && descriptor.getType() == Type.DECIMAL_TYPE) {
- int scale = descriptor.getDecimalDigits();
- field = ((HiveDecimal) field).toFormatString(scale);
- }
- columns.get(i).addValue(descriptor.getType(), field);
- }
- }
- return this;
- }
-
- public List getColumns() {
- return columns;
- }
-
- @Override
- public int numColumns() {
- return columns.size();
- }
-
- @Override
- public int numRows() {
- return columns.isEmpty() ? 0 : columns.get(0).size();
- }
-
- @Override
- public ColumnBasedSet extractSubset(int maxRows) {
- int numRows = Math.min(numRows(), maxRows);
-
- List subset = new ArrayList();
- for (int i = 0; i < columns.size(); i++) {
- subset.add(columns.get(i).extractSubset(numRows));
- }
- ColumnBasedSet result = new ColumnBasedSet(descriptors, subset, startOffset);
- startOffset += numRows;
- return result;
- }
-
- @Override
- public long getStartOffset() {
- return startOffset;
- }
-
- @Override
- public void setStartOffset(long startOffset) {
- this.startOffset = startOffset;
- }
-
- public TRowSet toTRowSet() {
- TRowSet tRowSet = new TRowSet(startOffset, new ArrayList());
- if (isBlobBased) {
- tRowSet.setColumns(null);
- tRowSet.setBinaryColumns(blob);
- tRowSet.setColumnCount(numColumns());
- } else {
- for (int i = 0; i < columns.size(); i++) {
- tRowSet.addToColumns(columns.get(i).toTColumn());
- }
- }
- return tRowSet;
- }
-
- @Override
- public Iterator iterator() {
- return new Iterator() {
-
- private int index;
- private final Object[] convey = new Object[numColumns()];
-
- @Override
- public boolean hasNext() {
- return index < numRows();
- }
-
- @Override
- public Object[] next() {
- for (int i = 0; i < columns.size(); i++) {
- convey[i] = columns.get(i).get(index);
- }
- index++;
- return convey;
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException("remove");
- }
- };
- }
-
- public Object[] fill(int index, Object[] convey) {
- for (int i = 0; i < columns.size(); i++) {
- convey[i] = columns.get(i).get(index);
- }
- return convey;
- }
-}
diff --git service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java
deleted file mode 100644
index b6e1a68..0000000
--- service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import org.apache.hadoop.hive.serde2.thrift.Type;
-import org.apache.hive.service.rpc.thrift.TColumnDesc;
-
-
-/**
- * ColumnDescriptor.
- *
- */
-public class ColumnDescriptor {
- private final String name;
- private final String comment;
- private final TypeDescriptor type;
- // ordinal position of this column in the schema
- private final int position;
-
- public ColumnDescriptor(String name, String comment, TypeDescriptor type, int position) {
- this.name = name;
- this.comment = comment;
- this.type = type;
- this.position = position;
- }
-
- public ColumnDescriptor(TColumnDesc tColumnDesc) {
- name = tColumnDesc.getColumnName();
- comment = tColumnDesc.getComment();
- type = new TypeDescriptor(tColumnDesc.getTypeDesc());
- position = tColumnDesc.getPosition();
- }
-
- public static ColumnDescriptor newPrimitiveColumnDescriptor(String name, String comment,
- Type type, int position) {
- // Current usage looks like it's only for metadata columns, but if that changes then
- // this method may need to require a type qualifiers aruments.
- return new ColumnDescriptor(name, comment, new TypeDescriptor(type), position);
- }
-
- public String getName() {
- return name;
- }
-
- public String getComment() {
- return comment;
- }
-
- public TypeDescriptor getTypeDescriptor() {
- return type;
- }
-
- public int getOrdinalPosition() {
- return position;
- }
-
- public TColumnDesc toTColumnDesc() {
- TColumnDesc tColumnDesc = new TColumnDesc();
- tColumnDesc.setColumnName(name);
- tColumnDesc.setComment(comment);
- tColumnDesc.setTypeDesc(type.toTTypeDesc());
- tColumnDesc.setPosition(position);
- return tColumnDesc;
- }
-
- public Type getType() {
- return type.getType();
- }
-
- public boolean isPrimitive() {
- return type.getType().isPrimitiveType();
- }
-
- public String getTypeName() {
- return type.getTypeName();
- }
-}
diff --git service/src/java/org/apache/hive/service/cli/ColumnValue.java service/src/java/org/apache/hive/service/cli/ColumnValue.java
deleted file mode 100644
index 09ca127..0000000
--- service/src/java/org/apache/hive/service/cli/ColumnValue.java
+++ /dev/null
@@ -1,317 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import java.math.BigDecimal;
-
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.HiveChar;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.common.type.TimestampTZ;
-import org.apache.hadoop.hive.serde2.thrift.Type;
-import org.apache.hive.service.rpc.thrift.TBoolValue;
-import org.apache.hive.service.rpc.thrift.TByteValue;
-import org.apache.hive.service.rpc.thrift.TColumnValue;
-import org.apache.hive.service.rpc.thrift.TDoubleValue;
-import org.apache.hive.service.rpc.thrift.TI16Value;
-import org.apache.hive.service.rpc.thrift.TI32Value;
-import org.apache.hive.service.rpc.thrift.TI64Value;
-import org.apache.hive.service.rpc.thrift.TStringValue;
-
-/**
- * Protocols before HIVE_CLI_SERVICE_PROTOCOL_V6 (used by RowBasedSet)
- *
- */
-public class ColumnValue {
-
- private static TColumnValue booleanValue(Boolean value) {
- TBoolValue tBoolValue = new TBoolValue();
- if (value != null) {
- tBoolValue.setValue(value);
- }
- return TColumnValue.boolVal(tBoolValue);
- }
-
- private static TColumnValue byteValue(Byte value) {
- TByteValue tByteValue = new TByteValue();
- if (value != null) {
- tByteValue.setValue(value);
- }
- return TColumnValue.byteVal(tByteValue);
- }
-
- private static TColumnValue shortValue(Short value) {
- TI16Value tI16Value = new TI16Value();
- if (value != null) {
- tI16Value.setValue(value);
- }
- return TColumnValue.i16Val(tI16Value);
- }
-
- private static TColumnValue intValue(Integer value) {
- TI32Value tI32Value = new TI32Value();
- if (value != null) {
- tI32Value.setValue(value);
- }
- return TColumnValue.i32Val(tI32Value);
- }
-
- private static TColumnValue longValue(Long value) {
- TI64Value tI64Value = new TI64Value();
- if (value != null) {
- tI64Value.setValue(value);
- }
- return TColumnValue.i64Val(tI64Value);
- }
-
- private static TColumnValue floatValue(Float value) {
- TDoubleValue tDoubleValue = new TDoubleValue();
- if (value != null) {
- tDoubleValue.setValue(value);
- }
- return TColumnValue.doubleVal(tDoubleValue);
- }
-
- private static TColumnValue doubleValue(Double value) {
- TDoubleValue tDoubleValue = new TDoubleValue();
- if (value != null) {
- tDoubleValue.setValue(value);
- }
- return TColumnValue.doubleVal(tDoubleValue);
- }
-
- private static TColumnValue stringValue(String value) {
- TStringValue tStringValue = new TStringValue();
- if (value != null) {
- tStringValue.setValue(value);
- }
- return TColumnValue.stringVal(tStringValue);
- }
-
- private static TColumnValue stringValue(HiveChar value) {
- TStringValue tStringValue = new TStringValue();
- if (value != null) {
- tStringValue.setValue(value.toString());
- }
- return TColumnValue.stringVal(tStringValue);
- }
-
- private static TColumnValue stringValue(HiveVarchar value) {
- TStringValue tStringValue = new TStringValue();
- if (value != null) {
- tStringValue.setValue(value.toString());
- }
- return TColumnValue.stringVal(tStringValue);
- }
-
- private static TColumnValue dateValue(Date value) {
- TStringValue tStringValue = new TStringValue();
- if (value != null) {
- tStringValue.setValue(value.toString());
- }
- return new TColumnValue(TColumnValue.stringVal(tStringValue));
- }
-
- private static TColumnValue timestampValue(Timestamp value) {
- TStringValue tStringValue = new TStringValue();
- if (value != null) {
- tStringValue.setValue(value.toString());
- }
- return TColumnValue.stringVal(tStringValue);
- }
-
- private static TColumnValue timestampTZValue(TimestampTZ value) {
- TStringValue tStringValue = new TStringValue();
- if (value != null) {
- tStringValue.setValue(value.toString());
- }
- return TColumnValue.stringVal(tStringValue);
- }
-
- private static TColumnValue stringValue(HiveDecimal value, TypeDescriptor typeDescriptor) {
- TStringValue tStrValue = new TStringValue();
- if (value != null) {
- int scale = typeDescriptor.getDecimalDigits();
- tStrValue.setValue(value.toFormatString(scale));
- }
- return TColumnValue.stringVal(tStrValue);
- }
-
- private static TColumnValue stringValue(HiveIntervalYearMonth value) {
- TStringValue tStrValue = new TStringValue();
- if (value != null) {
- tStrValue.setValue(value.toString());
- }
- return TColumnValue.stringVal(tStrValue);
- }
-
- private static TColumnValue stringValue(HiveIntervalDayTime value) {
- TStringValue tStrValue = new TStringValue();
- if (value != null) {
- tStrValue.setValue(value.toString());
- }
- return TColumnValue.stringVal(tStrValue);
- }
-
- public static TColumnValue toTColumnValue(TypeDescriptor typeDescriptor, Object value) {
- Type type = typeDescriptor.getType();
-
- switch (type) {
- case BOOLEAN_TYPE:
- return booleanValue((Boolean)value);
- case TINYINT_TYPE:
- return byteValue((Byte)value);
- case SMALLINT_TYPE:
- return shortValue((Short)value);
- case INT_TYPE:
- return intValue((Integer)value);
- case BIGINT_TYPE:
- return longValue((Long)value);
- case FLOAT_TYPE:
- return floatValue((Float)value);
- case DOUBLE_TYPE:
- return doubleValue((Double)value);
- case STRING_TYPE:
- return stringValue((String)value);
- case CHAR_TYPE:
- return stringValue((HiveChar)value);
- case VARCHAR_TYPE:
- return stringValue((HiveVarchar)value);
- case DATE_TYPE:
- return dateValue((Date)value);
- case TIMESTAMP_TYPE:
- return timestampValue((Timestamp)value);
- case TIMESTAMPLOCALTZ_TYPE:
- return timestampTZValue((TimestampTZ) value);
- case INTERVAL_YEAR_MONTH_TYPE:
- return stringValue((HiveIntervalYearMonth) value);
- case INTERVAL_DAY_TIME_TYPE:
- return stringValue((HiveIntervalDayTime) value);
- case DECIMAL_TYPE:
- return stringValue((HiveDecimal)value, typeDescriptor);
- case BINARY_TYPE:
- return stringValue((String)value);
- case ARRAY_TYPE:
- case MAP_TYPE:
- case STRUCT_TYPE:
- case UNION_TYPE:
- case USER_DEFINED_TYPE:
- return stringValue((String)value);
- case NULL_TYPE:
- return stringValue((String)value);
- default:
- return null;
- }
- }
-
- private static Boolean getBooleanValue(TBoolValue tBoolValue) {
- if (tBoolValue.isSetValue()) {
- return tBoolValue.isValue();
- }
- return null;
- }
-
- private static Byte getByteValue(TByteValue tByteValue) {
- if (tByteValue.isSetValue()) {
- return tByteValue.getValue();
- }
- return null;
- }
-
- private static Short getShortValue(TI16Value tI16Value) {
- if (tI16Value.isSetValue()) {
- return tI16Value.getValue();
- }
- return null;
- }
-
- private static Integer getIntegerValue(TI32Value tI32Value) {
- if (tI32Value.isSetValue()) {
- return tI32Value.getValue();
- }
- return null;
- }
-
- private static Long getLongValue(TI64Value tI64Value) {
- if (tI64Value.isSetValue()) {
- return tI64Value.getValue();
- }
- return null;
- }
-
- private static Double getDoubleValue(TDoubleValue tDoubleValue) {
- if (tDoubleValue.isSetValue()) {
- return tDoubleValue.getValue();
- }
- return null;
- }
-
- private static String getStringValue(TStringValue tStringValue) {
- if (tStringValue.isSetValue()) {
- return tStringValue.getValue();
- }
- return null;
- }
-
- private static Timestamp getTimestampValue(TStringValue tStringValue) {
- if (tStringValue.isSetValue()) {
- return Timestamp.valueOf(tStringValue.getValue());
- }
- return null;
- }
-
- private static byte[] getBinaryValue(TStringValue tString) {
- if (tString.isSetValue()) {
- return tString.getValue().getBytes();
- }
- return null;
- }
-
- private static BigDecimal getBigDecimalValue(TStringValue tStringValue) {
- if (tStringValue.isSetValue()) {
- return new BigDecimal(tStringValue.getValue());
- }
- return null;
- }
-
- public static Object toColumnValue(TColumnValue value) {
- TColumnValue._Fields field = value.getSetField();
- switch (field) {
- case BOOL_VAL:
- return getBooleanValue(value.getBoolVal());
- case BYTE_VAL:
- return getByteValue(value.getByteVal());
- case I16_VAL:
- return getShortValue(value.getI16Val());
- case I32_VAL:
- return getIntegerValue(value.getI32Val());
- case I64_VAL:
- return getLongValue(value.getI64Val());
- case DOUBLE_VAL:
- return getDoubleValue(value.getDoubleVal());
- case STRING_VAL:
- return getStringValue(value.getStringVal());
- }
- throw new IllegalArgumentException("never");
- }
-}
diff --git service/src/java/org/apache/hive/service/cli/GetInfoType.java service/src/java/org/apache/hive/service/cli/GetInfoType.java
deleted file mode 100644
index 9837834..0000000
--- service/src/java/org/apache/hive/service/cli/GetInfoType.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import org.apache.hive.service.rpc.thrift.TGetInfoType;
-
-/**
- * GetInfoType.
- *
- */
-public enum GetInfoType {
- CLI_MAX_DRIVER_CONNECTIONS(TGetInfoType.CLI_MAX_DRIVER_CONNECTIONS),
- CLI_MAX_CONCURRENT_ACTIVITIES(TGetInfoType.CLI_MAX_CONCURRENT_ACTIVITIES),
- CLI_DATA_SOURCE_NAME(TGetInfoType.CLI_DATA_SOURCE_NAME),
- CLI_FETCH_DIRECTION(TGetInfoType.CLI_FETCH_DIRECTION),
- CLI_SERVER_NAME(TGetInfoType.CLI_SERVER_NAME),
- CLI_SEARCH_PATTERN_ESCAPE(TGetInfoType.CLI_SEARCH_PATTERN_ESCAPE),
- CLI_DBMS_NAME(TGetInfoType.CLI_DBMS_NAME),
- CLI_DBMS_VER(TGetInfoType.CLI_DBMS_VER),
- CLI_ACCESSIBLE_TABLES(TGetInfoType.CLI_ACCESSIBLE_TABLES),
- CLI_ACCESSIBLE_PROCEDURES(TGetInfoType.CLI_ACCESSIBLE_PROCEDURES),
- CLI_CURSOR_COMMIT_BEHAVIOR(TGetInfoType.CLI_CURSOR_COMMIT_BEHAVIOR),
- CLI_DATA_SOURCE_READ_ONLY(TGetInfoType.CLI_DATA_SOURCE_READ_ONLY),
- CLI_DEFAULT_TXN_ISOLATION(TGetInfoType.CLI_DEFAULT_TXN_ISOLATION),
- CLI_IDENTIFIER_CASE(TGetInfoType.CLI_IDENTIFIER_CASE),
- CLI_IDENTIFIER_QUOTE_CHAR(TGetInfoType.CLI_IDENTIFIER_QUOTE_CHAR),
- CLI_MAX_COLUMN_NAME_LEN(TGetInfoType.CLI_MAX_COLUMN_NAME_LEN),
- CLI_MAX_CURSOR_NAME_LEN(TGetInfoType.CLI_MAX_CURSOR_NAME_LEN),
- CLI_MAX_SCHEMA_NAME_LEN(TGetInfoType.CLI_MAX_SCHEMA_NAME_LEN),
- CLI_MAX_CATALOG_NAME_LEN(TGetInfoType.CLI_MAX_CATALOG_NAME_LEN),
- CLI_MAX_TABLE_NAME_LEN(TGetInfoType.CLI_MAX_TABLE_NAME_LEN),
- CLI_SCROLL_CONCURRENCY(TGetInfoType.CLI_SCROLL_CONCURRENCY),
- CLI_TXN_CAPABLE(TGetInfoType.CLI_TXN_CAPABLE),
- CLI_USER_NAME(TGetInfoType.CLI_USER_NAME),
- CLI_TXN_ISOLATION_OPTION(TGetInfoType.CLI_TXN_ISOLATION_OPTION),
- CLI_INTEGRITY(TGetInfoType.CLI_INTEGRITY),
- CLI_GETDATA_EXTENSIONS(TGetInfoType.CLI_GETDATA_EXTENSIONS),
- CLI_NULL_COLLATION(TGetInfoType.CLI_NULL_COLLATION),
- CLI_ALTER_TABLE(TGetInfoType.CLI_ALTER_TABLE),
- CLI_ORDER_BY_COLUMNS_IN_SELECT(TGetInfoType.CLI_ORDER_BY_COLUMNS_IN_SELECT),
- CLI_SPECIAL_CHARACTERS(TGetInfoType.CLI_SPECIAL_CHARACTERS),
- CLI_MAX_COLUMNS_IN_GROUP_BY(TGetInfoType.CLI_MAX_COLUMNS_IN_GROUP_BY),
- CLI_MAX_COLUMNS_IN_INDEX(TGetInfoType.CLI_MAX_COLUMNS_IN_INDEX),
- CLI_MAX_COLUMNS_IN_ORDER_BY(TGetInfoType.CLI_MAX_COLUMNS_IN_ORDER_BY),
- CLI_MAX_COLUMNS_IN_SELECT(TGetInfoType.CLI_MAX_COLUMNS_IN_SELECT),
- CLI_MAX_COLUMNS_IN_TABLE(TGetInfoType.CLI_MAX_COLUMNS_IN_TABLE),
- CLI_MAX_INDEX_SIZE(TGetInfoType.CLI_MAX_INDEX_SIZE),
- CLI_MAX_ROW_SIZE(TGetInfoType.CLI_MAX_ROW_SIZE),
- CLI_MAX_STATEMENT_LEN(TGetInfoType.CLI_MAX_STATEMENT_LEN),
- CLI_MAX_TABLES_IN_SELECT(TGetInfoType.CLI_MAX_TABLES_IN_SELECT),
- CLI_MAX_USER_NAME_LEN(TGetInfoType.CLI_MAX_USER_NAME_LEN),
- CLI_OJ_CAPABILITIES(TGetInfoType.CLI_OJ_CAPABILITIES),
-
- CLI_XOPEN_CLI_YEAR(TGetInfoType.CLI_XOPEN_CLI_YEAR),
- CLI_CURSOR_SENSITIVITY(TGetInfoType.CLI_CURSOR_SENSITIVITY),
- CLI_DESCRIBE_PARAMETER(TGetInfoType.CLI_DESCRIBE_PARAMETER),
- CLI_CATALOG_NAME(TGetInfoType.CLI_CATALOG_NAME),
- CLI_COLLATION_SEQ(TGetInfoType.CLI_COLLATION_SEQ),
- CLI_MAX_IDENTIFIER_LEN(TGetInfoType.CLI_MAX_IDENTIFIER_LEN),
- CLI_ODBC_KEYWORDS(TGetInfoType.CLI_ODBC_KEYWORDS);
-
- private final TGetInfoType tInfoType;
-
- GetInfoType(TGetInfoType tInfoType) {
- this.tInfoType = tInfoType;
- }
-
- public static GetInfoType getGetInfoType(TGetInfoType tGetInfoType) {
- for (GetInfoType infoType : values()) {
- if (tGetInfoType.equals(infoType.tInfoType)) {
- return infoType;
- }
- }
- throw new IllegalArgumentException("Unrecognized Thrift TGetInfoType value: " + tGetInfoType);
- }
-
- public TGetInfoType toTGetInfoType() {
- return tInfoType;
- }
-
-}
diff --git service/src/java/org/apache/hive/service/cli/HiveSQLException.java service/src/java/org/apache/hive/service/cli/HiveSQLException.java
deleted file mode 100644
index 5f9ff43..0000000
--- service/src/java/org/apache/hive/service/cli/HiveSQLException.java
+++ /dev/null
@@ -1,250 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hive.service.rpc.thrift.TStatus;
-import org.apache.hive.service.rpc.thrift.TStatusCode;
-
-/**
- * HiveSQLException.
- *
- */
-public class HiveSQLException extends SQLException {
-
- /**
- *
- */
- private static final long serialVersionUID = -6095254671958748094L;
-
- /**
- *
- */
- public HiveSQLException() {
- super();
- }
-
- /**
- * @param reason
- */
- public HiveSQLException(String reason) {
- super(reason);
- }
-
- /**
- * @param cause
- */
- public HiveSQLException(Throwable cause) {
- super(cause);
- }
-
- /**
- * @param reason
- * @param sqlState
- */
- public HiveSQLException(String reason, String sqlState) {
- super(reason, sqlState);
- }
-
- /**
- * @param reason
- * @param cause
- */
- public HiveSQLException(String reason, Throwable cause) {
- super(reason, cause);
- }
-
- /**
- * @param reason
- * @param sqlState
- * @param vendorCode
- */
- public HiveSQLException(String reason, String sqlState, int vendorCode) {
- super(reason, sqlState, vendorCode);
- }
-
- /**
- * @param reason
- * @param sqlState
- * @param cause
- */
- public HiveSQLException(String reason, String sqlState, Throwable cause) {
- super(reason, sqlState, cause);
- }
-
- /**
- * @param reason
- * @param sqlState
- * @param vendorCode
- * @param cause
- */
- public HiveSQLException(String reason, String sqlState, int vendorCode, Throwable cause) {
- super(reason, sqlState, vendorCode, cause);
- }
-
- public HiveSQLException(TStatus status) {
- // TODO: set correct vendorCode field
- super(status.getErrorMessage(), status.getSqlState(), status.getErrorCode());
- if (status.getInfoMessages() != null) {
- initCause(toCause(status.getInfoMessages()));
- }
- }
-
-
-
-/**
- * Converts current object to a {@link TStatus} object
- * @return a {@link TStatus} object
- */
- public TStatus toTStatus() {
- // TODO: convert sqlState, etc.
- TStatus tStatus = new TStatus(TStatusCode.ERROR_STATUS);
- tStatus.setSqlState(getSQLState());
- tStatus.setErrorCode(getErrorCode());
- tStatus.setErrorMessage(getMessage());
- tStatus.setInfoMessages(toString(this));
- return tStatus;
- }
-
- /**
- * Converts the specified {@link Exception} object into a {@link TStatus} object
- * @param e a {@link Exception} object
- * @return a {@link TStatus} object
- */
- public static TStatus toTStatus(Exception e) {
- if (e instanceof HiveSQLException) {
- return ((HiveSQLException)e).toTStatus();
- }
- TStatus tStatus = new TStatus(TStatusCode.ERROR_STATUS);
- tStatus.setErrorMessage(e.getMessage());
- tStatus.setInfoMessages(toString(e));
- return tStatus;
- }
-
- /**
- * Converts a {@link Throwable} object into a flattened list of texts including its stack trace
- * and the stack traces of the nested causes.
- * @param ex a {@link Throwable} object
- * @return a flattened list of texts including the {@link Throwable} object's stack trace
- * and the stack traces of the nested causes.
- */
- public static List toString(Throwable ex) {
- return toString(ex, null);
- }
-
- private static List toString(Throwable cause, StackTraceElement[] parent) {
- StackTraceElement[] trace = cause.getStackTrace();
- int m = trace.length - 1;
- if (parent != null) {
- int n = parent.length - 1;
- while (m >= 0 && n >= 0 && trace[m].equals(parent[n])) {
- m--; n--;
- }
- }
- List detail = enroll(cause, trace, m);
- cause = cause.getCause();
- if (cause != null) {
- detail.addAll(toString(cause, trace));
- }
- return detail;
- }
-
- private static List enroll(Throwable ex, StackTraceElement[] trace, int max) {
- List details = new ArrayList();
- StringBuilder builder = new StringBuilder();
- builder.append('*').append(ex.getClass().getName()).append(':');
- builder.append(ex.getMessage()).append(':');
- builder.append(trace.length).append(':').append(max);
- details.add(builder.toString());
- for (int i = 0; i <= max; i++) {
- builder.setLength(0);
- builder.append(trace[i].getClassName()).append(':');
- builder.append(trace[i].getMethodName()).append(':');
- String fileName = trace[i].getFileName();
- builder.append(fileName == null ? "" : fileName).append(':');
- builder.append(trace[i].getLineNumber());
- details.add(builder.toString());
- }
- return details;
- }
-
- /**
- * Converts a flattened list of texts including the stack trace and the stack
- * traces of the nested causes into a {@link Throwable} object.
- * @param details a flattened list of texts including the stack trace and the stack
- * traces of the nested causes
- * @return a {@link Throwable} object
- */
- public static Throwable toCause(List details) {
- return toStackTrace(details, null, 0);
- }
-
- private static Throwable toStackTrace(List details, StackTraceElement[] parent, int index) {
- String detail = details.get(index++);
- if (!detail.startsWith("*")) {
- return null; // should not be happened. ignore remaining
- }
- int i1 = detail.indexOf(':');
- int i3 = detail.lastIndexOf(':');
- int i2 = detail.substring(0, i3).lastIndexOf(':');
- String exceptionClass = detail.substring(1, i1);
- String exceptionMessage = detail.substring(i1 + 1, i2);
- Throwable ex = newInstance(exceptionClass, exceptionMessage);
-
- int length = Integer.parseInt(detail.substring(i2 + 1, i3));
- int unique = Integer.parseInt(detail.substring(i3 + 1));
-
- int i = 0;
- StackTraceElement[] trace = new StackTraceElement[length];
- for (; i <= unique; i++) {
- detail = details.get(index++);
- int j1 = detail.indexOf(':');
- int j3 = detail.lastIndexOf(':');
- int j2 = detail.substring(0, j3).lastIndexOf(':');
- String className = detail.substring(0, j1);
- String methodName = detail.substring(j1 + 1, j2);
- String fileName = detail.substring(j2 + 1, j3);
- if (fileName.isEmpty()) {
- fileName = null;
- }
- int lineNumber = Integer.parseInt(detail.substring(j3 + 1));
- trace[i] = new StackTraceElement(className, methodName, fileName, lineNumber);
- }
- int common = trace.length - i;
- if (common > 0) {
- System.arraycopy(parent, parent.length - common, trace, trace.length - common, common);
- }
- if (details.size() > index) {
- ex.initCause(toStackTrace(details, trace, index));
- }
- ex.setStackTrace(trace);
- return ex;
- }
-
- private static Throwable newInstance(String className, String message) {
- try {
- return (Throwable)Class.forName(className).getConstructor(String.class).newInstance(message);
- } catch (Exception e) {
- return new RuntimeException(className + ":" + message);
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/cli/RowBasedSet.java service/src/java/org/apache/hive/service/cli/RowBasedSet.java
deleted file mode 100644
index abfb8d5..0000000
--- service/src/java/org/apache/hive/service/cli/RowBasedSet.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.hive.service.rpc.thrift.TColumnValue;
-import org.apache.hive.service.rpc.thrift.TRow;
-import org.apache.hive.service.rpc.thrift.TRowSet;
-
-/**
- * RowBasedSet
- */
-public class RowBasedSet implements RowSet {
-
- private long startOffset;
-
- private final TypeDescriptor[] descriptors; // non-null only for writing (server-side)
- private final RemovableList rows;
-
- public RowBasedSet(TableSchema schema) {
- descriptors = schema.toTypeDescriptors();
- rows = new RemovableList();
- }
-
- public RowBasedSet(TRowSet tRowSet) {
- descriptors = null;
- rows = new RemovableList(tRowSet.getRows());
- startOffset = tRowSet.getStartRowOffset();
- }
-
- private RowBasedSet(TypeDescriptor[] descriptors, List rows, long startOffset) {
- this.descriptors = descriptors;
- this.rows = new RemovableList(rows);
- this.startOffset = startOffset;
- }
-
- @Override
- public RowBasedSet addRow(Object[] fields) {
- TRow tRow = new TRow();
- for (int i = 0; i < fields.length; i++) {
- tRow.addToColVals(ColumnValue.toTColumnValue(descriptors[i], fields[i]));
- }
- rows.add(tRow);
- return this;
- }
-
- @Override
- public int numColumns() {
- return rows.isEmpty() ? 0 : rows.get(0).getColVals().size();
- }
-
- @Override
- public int numRows() {
- return rows.size();
- }
-
- public RowBasedSet extractSubset(int maxRows) {
- int numRows = Math.min(numRows(), maxRows);
- RowBasedSet result = new RowBasedSet(descriptors, rows.subList(0, numRows), startOffset);
- rows.removeRange(0, numRows);
- startOffset += numRows;
- return result;
- }
-
- public long getStartOffset() {
- return startOffset;
- }
-
- public void setStartOffset(long startOffset) {
- this.startOffset = startOffset;
- }
-
- public int getSize() {
- return rows.size();
- }
-
- public TRowSet toTRowSet() {
- TRowSet tRowSet = new TRowSet();
- tRowSet.setStartRowOffset(startOffset);
- tRowSet.setRows(new ArrayList(rows));
- return tRowSet;
- }
-
- @Override
- public Iterator iterator() {
- return new Iterator() {
-
- final Iterator iterator = rows.iterator();
- final Object[] convey = new Object[numColumns()];
-
- @Override
- public boolean hasNext() {
- return iterator.hasNext();
- }
-
- @Override
- public Object[] next() {
- TRow row = iterator.next();
- List values = row.getColVals();
- for (int i = 0; i < values.size(); i++) {
- convey[i] = ColumnValue.toColumnValue(values.get(i));
- }
- return convey;
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException("remove");
- }
- };
- }
-
- private static class RemovableList extends ArrayList {
- public RemovableList() { super(); }
- public RemovableList(List rows) { super(rows); }
- @Override
- public void removeRange(int fromIndex, int toIndex) {
- super.removeRange(fromIndex, toIndex);
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/cli/RowSet.java service/src/java/org/apache/hive/service/cli/RowSet.java
deleted file mode 100644
index d61775a..0000000
--- service/src/java/org/apache/hive/service/cli/RowSet.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import org.apache.hive.service.rpc.thrift.TRowSet;
-
-public interface RowSet extends Iterable {
-
- RowSet addRow(Object[] fields);
-
- RowSet extractSubset(int maxRows);
-
- int numColumns();
-
- int numRows();
-
- long getStartOffset();
-
- void setStartOffset(long startOffset);
-
- TRowSet toTRowSet();
-}
diff --git service/src/java/org/apache/hive/service/cli/RowSetFactory.java service/src/java/org/apache/hive/service/cli/RowSetFactory.java
deleted file mode 100644
index fdc9ecd..0000000
--- service/src/java/org/apache/hive/service/cli/RowSetFactory.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import org.apache.hive.service.rpc.thrift.TProtocolVersion;
-import org.apache.hive.service.rpc.thrift.TRowSet;
-import org.apache.thrift.TException;
-
-import static org.apache.hive.service.rpc.thrift.TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6;
-
-public class RowSetFactory {
-
- // This call is accessed from server side
- public static RowSet create(TableSchema schema, TProtocolVersion version, boolean isBlobBased) {
- if (version.getValue() >= HIVE_CLI_SERVICE_PROTOCOL_V6.getValue()) {
- return new ColumnBasedSet(schema, isBlobBased);
- }
- return new RowBasedSet(schema);
- }
-
- // This call is accessed from client (jdbc) side
- public static RowSet create(TRowSet results, TProtocolVersion version) throws TException {
- if (version.getValue() >= HIVE_CLI_SERVICE_PROTOCOL_V6.getValue()) {
- return new ColumnBasedSet(results);
- }
- return new RowBasedSet(results);
- }
-}
diff --git service/src/java/org/apache/hive/service/cli/TableSchema.java service/src/java/org/apache/hive/service/cli/TableSchema.java
deleted file mode 100644
index d881218..0000000
--- service/src/java/org/apache/hive/service/cli/TableSchema.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Schema;
-import org.apache.hadoop.hive.serde2.thrift.Type;
-import org.apache.hive.service.rpc.thrift.TColumnDesc;
-import org.apache.hive.service.rpc.thrift.TTableSchema;
-
-/**
- * TableSchema.
- *
- */
-public class TableSchema {
- private final List columns = new ArrayList();
-
- public TableSchema() {
- }
-
- public TableSchema(int numColumns) {
- // TODO: remove this constructor
- }
-
- public TableSchema(TTableSchema tTableSchema) {
- for (TColumnDesc tColumnDesc : tTableSchema.getColumns()) {
- columns.add(new ColumnDescriptor(tColumnDesc));
- }
- }
-
- public TableSchema(List fieldSchemas) {
- int pos = 1;
- for (FieldSchema field : fieldSchemas) {
- columns.add(new ColumnDescriptor(field.getName(), field.getComment(), new TypeDescriptor(
- field.getType()), pos++));
- }
- }
-
- public TableSchema(Schema schema) {
- this(schema.getFieldSchemas());
- }
-
- public List getColumnDescriptors() {
- return new ArrayList(columns);
- }
-
- public ColumnDescriptor getColumnDescriptorAt(int pos) {
- return columns.get(pos);
- }
-
- public int getSize() {
- return columns.size();
- }
-
- public void clear() {
- columns.clear();
- }
-
-
- public TTableSchema toTTableSchema() {
- TTableSchema tTableSchema = new TTableSchema();
- for (ColumnDescriptor col : columns) {
- tTableSchema.addToColumns(col.toTColumnDesc());
- }
- return tTableSchema;
- }
-
- public TypeDescriptor[] toTypeDescriptors() {
- TypeDescriptor[] types = new TypeDescriptor[columns.size()];
- for (int i = 0; i < types.length; i++) {
- types[i] = columns.get(i).getTypeDescriptor();
- }
- return types;
- }
-
- public TableSchema addPrimitiveColumn(String columnName, Type columnType, String columnComment) {
- columns.add(ColumnDescriptor.newPrimitiveColumnDescriptor(columnName, columnComment, columnType, columns.size() + 1));
- return this;
- }
-
- public TableSchema addStringColumn(String columnName, String columnComment) {
- columns.add(ColumnDescriptor.newPrimitiveColumnDescriptor(columnName, columnComment, Type.STRING_TYPE, columns.size() + 1));
- return this;
- }
-}
diff --git service/src/java/org/apache/hive/service/cli/TypeDescriptor.java service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
deleted file mode 100644
index cb17450..0000000
--- service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import java.util.List;
-
-import org.apache.hadoop.hive.serde2.thrift.Type;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hive.service.rpc.thrift.TPrimitiveTypeEntry;
-import org.apache.hive.service.rpc.thrift.TTypeDesc;
-import org.apache.hive.service.rpc.thrift.TTypeEntry;
-
-/**
- * TypeDescriptor.
- *
- */
-public class TypeDescriptor {
-
- private final Type type;
- private String typeName = null;
- private TypeQualifiers typeQualifiers = null;
-
- public TypeDescriptor(Type type) {
- this.type = type;
- }
-
- public TypeDescriptor(TTypeDesc tTypeDesc) {
- List tTypeEntries = tTypeDesc.getTypes();
- TPrimitiveTypeEntry top = tTypeEntries.get(0).getPrimitiveEntry();
- this.type = Type.getType(top.getType());
- if (top.isSetTypeQualifiers()) {
- setTypeQualifiers(TypeQualifiers.fromTTypeQualifiers(top.getTypeQualifiers()));
- }
- }
-
- public TypeDescriptor(String typeName) {
- this.type = Type.getType(typeName);
- if (this.type.isComplexType()) {
- this.typeName = typeName;
- } else if (this.type.isQualifiedType()) {
- PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(typeName);
- setTypeQualifiers(TypeQualifiers.fromTypeInfo(pti));
- }
- }
-
- public Type getType() {
- return type;
- }
-
- public TTypeDesc toTTypeDesc() {
- TPrimitiveTypeEntry primitiveEntry = new TPrimitiveTypeEntry(type.toTType());
- if (getTypeQualifiers() != null) {
- primitiveEntry.setTypeQualifiers(getTypeQualifiers().toTTypeQualifiers());
- }
- TTypeEntry entry = TTypeEntry.primitiveEntry(primitiveEntry);
-
- TTypeDesc desc = new TTypeDesc();
- desc.addToTypes(entry);
- return desc;
- }
-
- public String getTypeName() {
- if (typeName != null) {
- return typeName;
- } else {
- return type.getName();
- }
- }
-
- public TypeQualifiers getTypeQualifiers() {
- return typeQualifiers;
- }
-
- public void setTypeQualifiers(TypeQualifiers typeQualifiers) {
- this.typeQualifiers = typeQualifiers;
- }
-
- /**
- * The column size for this type.
- * For numeric data this is the maximum precision.
- * For character data this is the length in characters.
- * For datetime types this is the length in characters of the String representation
- * (assuming the maximum allowed precision of the fractional seconds component).
- * For binary data this is the length in bytes.
- * Null is returned for for data types where the column size is not applicable.
- */
- public Integer getColumnSize() {
- if (type.isNumericType()) {
- return getPrecision();
- }
- switch (type) {
- case STRING_TYPE:
- case BINARY_TYPE:
- return Integer.MAX_VALUE;
- case CHAR_TYPE:
- case VARCHAR_TYPE:
- return typeQualifiers.getCharacterMaximumLength();
- case DATE_TYPE:
- return 10;
- case TIMESTAMP_TYPE:
- return 29;
- case TIMESTAMPLOCALTZ_TYPE:
- return 31;
- default:
- return null;
- }
- }
-
- /**
- * Maximum precision for numeric types.
- * Returns null for non-numeric types.
- * @return
- */
- public Integer getPrecision() {
- if (this.type == Type.DECIMAL_TYPE) {
- return typeQualifiers.getPrecision();
- }
- return this.type.getMaxPrecision();
- }
-
- /**
- * The number of fractional digits for this type.
- * Null is returned for data types where this is not applicable.
- */
- public Integer getDecimalDigits() {
- switch (this.type) {
- case BOOLEAN_TYPE:
- case TINYINT_TYPE:
- case SMALLINT_TYPE:
- case INT_TYPE:
- case BIGINT_TYPE:
- return 0;
- case FLOAT_TYPE:
- return 7;
- case DOUBLE_TYPE:
- return 15;
- case DECIMAL_TYPE:
- return typeQualifiers.getScale();
- case TIMESTAMP_TYPE:
- return 9;
- default:
- return null;
- }
- }
-}
diff --git service/src/java/org/apache/hive/service/cli/TypeQualifiers.java service/src/java/org/apache/hive/service/cli/TypeQualifiers.java
deleted file mode 100644
index 6ba865a..0000000
--- service/src/java/org/apache/hive/service/cli/TypeQualifiers.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
-import org.apache.hive.service.rpc.thrift.TCLIServiceConstants;
-import org.apache.hive.service.rpc.thrift.TTypeQualifierValue;
-import org.apache.hive.service.rpc.thrift.TTypeQualifiers;
-
-/**
- * This class holds type qualifier information for a primitive type,
- * such as char/varchar length or decimal precision/scale.
- */
-public class TypeQualifiers {
- private Integer characterMaximumLength;
- private Integer precision;
- private Integer scale;
-
- public TypeQualifiers() {}
-
- public Integer getCharacterMaximumLength() {
- return characterMaximumLength;
- }
- public void setCharacterMaximumLength(int characterMaximumLength) {
- this.characterMaximumLength = characterMaximumLength;
- }
-
- public TTypeQualifiers toTTypeQualifiers() {
- TTypeQualifiers ret = null;
-
- Map qMap = new HashMap();
- if (getCharacterMaximumLength() != null) {
- TTypeQualifierValue val = new TTypeQualifierValue();
- val.setI32Value(getCharacterMaximumLength().intValue());
- qMap.put(TCLIServiceConstants.CHARACTER_MAXIMUM_LENGTH, val);
- }
-
- if (precision != null) {
- TTypeQualifierValue val = new TTypeQualifierValue();
- val.setI32Value(precision.intValue());
- qMap.put(TCLIServiceConstants.PRECISION, val);
- }
-
- if (scale != null) {
- TTypeQualifierValue val = new TTypeQualifierValue();
- val.setI32Value(scale.intValue());
- qMap.put(TCLIServiceConstants.SCALE, val);
- }
-
- if (qMap.size() > 0) {
- ret = new TTypeQualifiers(qMap);
- }
-
- return ret;
- }
-
- public static TypeQualifiers fromTTypeQualifiers(TTypeQualifiers ttq) {
- TypeQualifiers ret = null;
- if (ttq != null) {
- ret = new TypeQualifiers();
- Map tqMap = ttq.getQualifiers();
-
- if (tqMap.containsKey(TCLIServiceConstants.CHARACTER_MAXIMUM_LENGTH)) {
- ret.setCharacterMaximumLength(
- tqMap.get(TCLIServiceConstants.CHARACTER_MAXIMUM_LENGTH).getI32Value());
- }
-
- if (tqMap.containsKey(TCLIServiceConstants.PRECISION)) {
- ret.setPrecision(tqMap.get(TCLIServiceConstants.PRECISION).getI32Value());
- }
-
- if (tqMap.containsKey(TCLIServiceConstants.SCALE)) {
- ret.setScale(tqMap.get(TCLIServiceConstants.SCALE).getI32Value());
- }
- }
- return ret;
- }
-
- public static TypeQualifiers fromTypeInfo(PrimitiveTypeInfo pti) {
- TypeQualifiers result = null;
- if (pti instanceof VarcharTypeInfo) {
- result = new TypeQualifiers();
- result.setCharacterMaximumLength(((VarcharTypeInfo)pti).getLength());
- } else if (pti instanceof CharTypeInfo) {
- result = new TypeQualifiers();
- result.setCharacterMaximumLength(((CharTypeInfo)pti).getLength());
- } else if (pti instanceof DecimalTypeInfo) {
- result = new TypeQualifiers();
- result.setPrecision(((DecimalTypeInfo)pti).precision());
- result.setScale(((DecimalTypeInfo)pti).scale());
- }
- return result;
- }
-
- public Integer getPrecision() {
- return precision;
- }
-
- public void setPrecision(Integer precision) {
- this.precision = precision;
- }
-
- public Integer getScale() {
- return scale;
- }
-
- public void setScale(Integer scale) {
- this.scale = scale;
- }
-
-}
diff --git service/src/java/org/apache/hive/service/cli/session/SessionUtils.java service/src/java/org/apache/hive/service/cli/session/SessionUtils.java
deleted file mode 100644
index 00d3112..0000000
--- service/src/java/org/apache/hive/service/cli/session/SessionUtils.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.cli.session;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier;
-import org.apache.hadoop.hive.metastore.security.DelegationTokenSelector;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.security.token.TokenSelector;
-
-public class SessionUtils {
- /**
- * Get the string form of the token given a token signature. The signature is used as the value of
- * the "service" field in the token for lookup. Ref: AbstractDelegationTokenSelector in Hadoop. If
- * there exists such a token in the token cache (credential store) of the job, the lookup returns
- * that. This is relevant only when running against a "secure" hadoop release The method gets hold
- * of the tokens if they are set up by hadoop - this should happen on the map/reduce tasks if the
- * client added the tokens into hadoop's credential store in the front end during job submission.
- * The method will select the hive delegation token among the set of tokens and return the string
- * form of it
- *
- * @param tokenSignature
- * @return the string form of the token found
- * @throws IOException
- */
- public static String getTokenStrForm(String tokenSignature) throws IOException {
- UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
- TokenSelector extends TokenIdentifier> tokenSelector = new DelegationTokenSelector();
-
- Token extends TokenIdentifier> token = tokenSelector.selectToken(
- tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
- return token != null ? token.encodeToUrlString() : null;
- }
-
- /**
- * Create a delegation token object for the given token string and service. Add the token to given
- * UGI
- *
- * @param ugi
- * @param tokenStr
- * @param tokenService
- * @throws IOException
- */
- public static void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService)
- throws IOException {
- Token delegationToken = createToken(tokenStr, tokenService);
- ugi.addToken(delegationToken);
- }
-
- /**
- * Add a given service to delegation token string.
- *
- * @param tokenStr
- * @param tokenService
- * @return
- * @throws IOException
- */
- public static String addServiceToToken(String tokenStr, String tokenService) throws IOException {
- Token delegationToken = createToken(tokenStr, tokenService);
- return delegationToken.encodeToUrlString();
- }
-
- /**
- * Create a new token using the given string and service
- *
- * @param tokenStr
- * @param tokenService
- * @return
- * @throws IOException
- */
- private static Token createToken(String tokenStr, String tokenService)
- throws IOException {
- Token delegationToken = new Token();
- delegationToken.decodeFromUrlString(tokenStr);
- delegationToken.setService(new Text(tokenService));
- return delegationToken;
- }
-}
diff --git service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java
index 7ab7aee..a7d0de0 100644
--- service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java
+++ service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java
@@ -18,8 +18,6 @@
package org.apache.hive.service.cli.thrift;
-import java.util.Map;
-
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hive.service.cli.CLIService;
import org.apache.hive.service.cli.ICLIService;
@@ -40,26 +38,6 @@
@Override
public synchronized void init(HiveConf hiveConf) {
- init(hiveConf, null);
- }
-
- public synchronized void init(HiveConf hiveConf, Map confOverlay) {
- // Null HiveConf is passed in jdbc driver side code since driver side is supposed to be
- // independent of conf object. Create new HiveConf object here in this case.
- if (hiveConf == null) {
- hiveConf = new HiveConf();
- }
- // Set the specific parameters if needed
- if (confOverlay != null && !confOverlay.isEmpty()) {
- // apply overlay query specific settings, if any
- for (Map.Entry confEntry : confOverlay.entrySet()) {
- try {
- hiveConf.set(confEntry.getKey(), confEntry.getValue());
- } catch (IllegalArgumentException e) {
- throw new RuntimeException("Error applying statement specific settings", e);
- }
- }
- }
cliService.init(hiveConf);
cliService.start();
super.init(hiveConf);
diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index 8f13fb3..2292a43 100644
--- service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -112,7 +112,7 @@
import org.apache.hive.service.rpc.thrift.TRenewDelegationTokenResp;
import org.apache.hive.service.rpc.thrift.TStatus;
import org.apache.hive.service.rpc.thrift.TStatusCode;
-import org.apache.hive.service.server.HiveServer2;
+import org.apache.hive.service.server.HiveServer2ClientUtils;
import org.apache.thrift.TException;
import org.apache.thrift.server.ServerContext;
import org.apache.thrift.server.TServer;
@@ -182,7 +182,7 @@
// Initialize common server configs needed in both binary & http modes
String portString;
// HTTP mode
- if (HiveServer2.isHTTPTransportMode(hiveConf)) {
+ if (HiveServer2ClientUtils.isHTTPTransportMode(hiveConf)) {
workerKeepAliveTime =
hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME,
TimeUnit.SECONDS);
diff --git service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java
deleted file mode 100644
index 95c1388..0000000
--- service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java
+++ /dev/null
@@ -1,381 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.server;
-
-import static org.apache.hive.service.server.HiveServer2.INSTANCE_URI_CONFIG;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ExecutorService;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.curator.framework.recipes.cache.PathChildrenCache;
-import org.apache.curator.framework.recipes.leader.LeaderLatch;
-import org.apache.curator.framework.recipes.leader.LeaderLatchListener;
-import org.apache.curator.utils.CloseableUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.llap.registry.ServiceRegistry;
-import org.apache.hadoop.hive.registry.ServiceInstanceSet;
-import org.apache.hadoop.hive.registry.ServiceInstanceStateChangeListener;
-import org.apache.hadoop.hive.registry.impl.ZkRegistryBase;
-import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
-import org.apache.hadoop.registry.client.types.Endpoint;
-import org.apache.hadoop.registry.client.types.ServiceRecord;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hive.service.ServiceException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Preconditions;
-
-public class HS2ActivePassiveHARegistry extends ZkRegistryBase implements
- ServiceRegistry, HiveServer2HAInstanceSet, HiveServer2.FailoverHandler {
- private static final Logger LOG = LoggerFactory.getLogger(HS2ActivePassiveHARegistry.class);
- static final String ACTIVE_ENDPOINT = "activeEndpoint";
- static final String PASSIVE_ENDPOINT = "passiveEndpoint";
- private static final String SASL_LOGIN_CONTEXT_NAME = "HS2ActivePassiveHAZooKeeperClient";
- private static final String INSTANCE_PREFIX = "instance-";
- private static final String INSTANCE_GROUP = "instances";
- private static final String LEADER_LATCH_PATH = "/_LEADER";
- private LeaderLatch leaderLatch;
- private Map registeredListeners = new HashMap<>();
- private String latchPath;
- private ServiceRecord srv;
- private boolean isClient;
- private final String uniqueId;
-
- // There are 2 paths under which the instances get registered
- // 1) Standard path used by ZkRegistryBase where all instances register themselves (also stores metadata)
- // Secure: /hs2ActivePassiveHA-sasl/instances/instance-0000000000
- // Unsecure: /hs2ActivePassiveHA-unsecure/instances/instance-0000000000
- // 2) Leader latch path used for HS2 HA Active/Passive configuration where all instances register under _LEADER
- // path but only one among them is the leader
- // Secure: /hs2ActivePassiveHA-sasl/_LEADER/xxxx-latch-0000000000
- // Unsecure: /hs2ActivePassiveHA-unsecure/_LEADER/xxxx-latch-0000000000
- static HS2ActivePassiveHARegistry create(Configuration conf, boolean isClient) {
- String zkNameSpace = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_REGISTRY_NAMESPACE);
- Preconditions.checkArgument(!StringUtils.isBlank(zkNameSpace),
- HiveConf.ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_REGISTRY_NAMESPACE.varname + " cannot be null or empty");
- String principal = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
- String keytab = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
- String zkNameSpacePrefix = zkNameSpace + "-";
- return new HS2ActivePassiveHARegistry(null, zkNameSpacePrefix, LEADER_LATCH_PATH, principal, keytab,
- isClient ? null : SASL_LOGIN_CONTEXT_NAME, conf, isClient);
- }
-
- private HS2ActivePassiveHARegistry(final String instanceName, final String zkNamespacePrefix,
- final String leaderLatchPath,
- final String krbPrincipal, final String krbKeytab, final String saslContextName, final Configuration conf,
- final boolean isClient) {
- super(instanceName, conf, null, zkNamespacePrefix, null, INSTANCE_PREFIX, INSTANCE_GROUP,
- saslContextName, krbPrincipal, krbKeytab, null);
- this.isClient = isClient;
- if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_IN_TEST) &&
- conf.get(ZkRegistryBase.UNIQUE_IDENTIFIER) != null) {
- this.uniqueId = conf.get(ZkRegistryBase.UNIQUE_IDENTIFIER);
- } else {
- this.uniqueId = UNIQUE_ID.toString();
- }
- this.latchPath = leaderLatchPath;
- this.leaderLatch = getNewLeaderLatchPath();
- }
-
- @Override
- public void start() throws IOException {
- super.start();
- if (!isClient) {
- this.srv = getNewServiceRecord();
- register();
- registerLeaderLatchListener(new HS2LeaderLatchListener(), null);
- try {
- // all participating instances uses the same latch path, and curator randomly chooses one instance to be leader
- // which can be verified via leaderLatch.hasLeadership()
- leaderLatch.start();
- } catch (Exception e) {
- throw new IOException(e);
- }
- LOG.info("Registered HS2 with ZK. service record: {}", srv);
- } else {
- populateCache();
- LOG.info("Populating instances cache for client");
- }
- }
-
- @Override
- protected void unregisterInternal() {
- super.unregisterInternal();
- }
-
- @Override
- public String register() throws IOException {
- updateEndpoint(srv, PASSIVE_ENDPOINT);
- return registerServiceRecord(srv, uniqueId);
- }
-
- @Override
- public void unregister() {
- CloseableUtils.closeQuietly(leaderLatch);
- unregisterInternal();
- }
-
- @Override
- public void updateRegistration(Iterable> attributes) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- private void populateCache() throws IOException {
- PathChildrenCache pcc = ensureInstancesCache(0);
- populateCache(pcc, false);
- }
-
- @Override
- public ServiceInstanceSet getInstances(final String component, final long clusterReadyTimeoutMs)
- throws IOException {
- throw new IOException("Not supported to get instances by component name");
- }
-
- private void addActiveEndpointToServiceRecord() throws IOException {
- addEndpointToServiceRecord(getNewServiceRecord(), ACTIVE_ENDPOINT);
- }
-
- private void addPassiveEndpointToServiceRecord() throws IOException {
- addEndpointToServiceRecord(getNewServiceRecord(), PASSIVE_ENDPOINT);
- }
-
- private void addEndpointToServiceRecord(
- final ServiceRecord srv, final String endpointName) throws IOException {
- updateEndpoint(srv, endpointName);
- updateServiceRecord(srv, doCheckAcls, true);
- }
-
- private void updateEndpoint(final ServiceRecord srv, final String endpointName) {
- final String instanceUri = srv.get(INSTANCE_URI_CONFIG);
- final String[] tokens = instanceUri.split(":");
- final String hostname = tokens[0];
- final int port = Integer.parseInt(tokens[1]);
- Endpoint urlEndpoint = RegistryTypeUtils.ipcEndpoint(endpointName, new InetSocketAddress(hostname, port));
- srv.addInternalEndpoint(urlEndpoint);
- LOG.info("Added {} endpoint to service record", urlEndpoint);
- }
-
- @Override
- public void stop() {
- CloseableUtils.closeQuietly(leaderLatch);
- super.stop();
- }
-
- @Override
- protected HiveServer2Instance createServiceInstance(final ServiceRecord srv) throws IOException {
- Endpoint activeEndpoint = srv.getInternalEndpoint(HS2ActivePassiveHARegistry.ACTIVE_ENDPOINT);
- return new HiveServer2Instance(srv, activeEndpoint != null ? ACTIVE_ENDPOINT : PASSIVE_ENDPOINT);
- }
-
- @Override
- public synchronized void registerStateChangeListener(
- final ServiceInstanceStateChangeListener listener)
- throws IOException {
- super.registerStateChangeListener(listener);
- }
-
- @Override
- public ApplicationId getApplicationId() throws IOException {
- throw new IOException("Not supported until HS2 runs as YARN application");
- }
-
- @Override
- protected String getZkPathUser(final Configuration conf) {
- return currentUser();
- }
-
- private boolean hasLeadership() {
- return leaderLatch.hasLeadership();
- }
-
- @Override
- public void failover() throws Exception {
- if (hasLeadership()) {
- LOG.info("Failover request received for HS2 instance: {}. Restarting leader latch..", uniqueId);
- leaderLatch.close(LeaderLatch.CloseMode.NOTIFY_LEADER);
- leaderLatch = getNewLeaderLatchPath();
- // re-attach all registered listeners
- for (Map.Entry registeredListener : registeredListeners.entrySet()) {
- if (registeredListener.getValue() == null) {
- leaderLatch.addListener(registeredListener.getKey());
- } else {
- leaderLatch.addListener(registeredListener.getKey(), registeredListener.getValue());
- }
- }
- leaderLatch.start();
- LOG.info("Failover complete. Leader latch restarted successfully. New leader: {}",
- leaderLatch.getLeader().getId());
- } else {
- LOG.warn("Failover request received for HS2 instance: {} that is not leader. Skipping..", uniqueId);
- }
- }
-
- /**
- * Returns a new instance of leader latch path but retains the same uniqueId. This is only used when HS2 startsup or
- * when a manual failover is triggered (in which case uniqueId will still remain as the instance has not restarted)
- *
- * @return - new leader latch
- */
- private LeaderLatch getNewLeaderLatchPath() {
- return new LeaderLatch(zooKeeperClient, latchPath, uniqueId, LeaderLatch.CloseMode.NOTIFY_LEADER);
- }
-
- private class HS2LeaderLatchListener implements LeaderLatchListener {
-
- // leadership state changes and sending out notifications to listener happens inside synchronous method in curator.
- // Do only lightweight actions in main-event handler thread. Time consuming operations are handled via separate
- // executor service registered via registerLeaderLatchListener().
- @Override
- public void isLeader() {
- // only leader publishes instance uri as endpoint which will be used by clients to make connections to HS2 via
- // service discovery.
- try {
- if (!hasLeadership()) {
- LOG.info("isLeader notification received but hasLeadership returned false.. awaiting..");
- leaderLatch.await();
- }
- addActiveEndpointToServiceRecord();
- LOG.info("HS2 instance in ACTIVE mode. Service record: {}", srv);
- } catch (Exception e) {
- throw new ServiceException("Unable to add active endpoint to service record", e);
- }
- }
-
- @Override
- public void notLeader() {
- try {
- if (hasLeadership()) {
- LOG.info("notLeader notification received but hasLeadership returned true.. awaiting..");
- leaderLatch.await();
- }
- addPassiveEndpointToServiceRecord();
- LOG.info("HS2 instance lost leadership. Switched to PASSIVE standby mode. Service record: {}", srv);
- } catch (Exception e) {
- throw new ServiceException("Unable to add passive endpoint to service record", e);
- }
- }
- }
-
- @Override
- public HiveServer2Instance getLeader() {
- for (HiveServer2Instance hs2Instance : getAll()) {
- if (hs2Instance.isLeader()) {
- return hs2Instance;
- }
- }
- return null;
- }
-
- @Override
- public Collection getAll() {
- return getAllInternal();
- }
-
- @Override
- public HiveServer2Instance getInstance(final String instanceId) {
- for (HiveServer2Instance hs2Instance : getAll()) {
- if (hs2Instance.getWorkerIdentity().equals(instanceId)) {
- return hs2Instance;
- }
- }
- return null;
- }
-
- @Override
- public Set getByHost(final String host) {
- return getByHostInternal(host);
- }
-
- @Override
- public int size() {
- return sizeInternal();
- }
-
- /**
- * If leadership related notifications is desired, use this method to register leader latch listener.
- *
- * @param latchListener - listener
- * @param executorService - event handler executor service
- */
- void registerLeaderLatchListener(final LeaderLatchListener latchListener, final ExecutorService executorService) {
- registeredListeners.put(latchListener, executorService);
- if (executorService == null) {
- leaderLatch.addListener(latchListener);
- } else {
- leaderLatch.addListener(latchListener, executorService);
- }
- }
-
- private Map getConfsToPublish() {
- final Map confsToPublish = new HashMap<>();
- // Hostname
- confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname,
- conf.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname));
- // Web port
- confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_PORT.varname,
- conf.get(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_PORT.varname));
- // Hostname:port
- confsToPublish.put(INSTANCE_URI_CONFIG, conf.get(INSTANCE_URI_CONFIG));
- confsToPublish.put(UNIQUE_IDENTIFIER, uniqueId);
- // Transport mode
- confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname,
- conf.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname));
- // Transport specific confs
- if (HiveServer2.isHTTPTransportMode(conf)) {
- confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT.varname,
- conf.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT.varname));
- confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname,
- conf.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname));
- } else {
- confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT.varname,
- conf.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT.varname));
- confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP.varname,
- conf.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP.varname));
- }
- // Auth specific confs
- confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION.varname,
- conf.get(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION.varname));
- if (HiveServer2.isKerberosAuthMode(conf)) {
- confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname,
- conf.get(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname));
- }
- // SSL conf
- confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_USE_SSL.varname,
- conf.get(HiveConf.ConfVars.HIVE_SERVER2_USE_SSL.varname));
- return confsToPublish;
- }
-
- private ServiceRecord getNewServiceRecord() {
- ServiceRecord srv = new ServiceRecord();
- final Map confsToPublish = getConfsToPublish();
- for (Map.Entry entry : confsToPublish.entrySet()) {
- srv.set(entry.getKey(), entry.getValue());
- }
- return srv;
- }
-}
diff --git service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistryClient.java service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistryClient.java
deleted file mode 100644
index 122742e..0000000
--- service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistryClient.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License. See accompanying LICENSE file.
- */
-
-package org.apache.hive.service.server;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.registry.impl.ZkRegistryBase;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Preconditions;
-
-public class HS2ActivePassiveHARegistryClient {
- private static final Logger LOG = LoggerFactory.getLogger(HS2ActivePassiveHARegistryClient.class);
- private static final Map hs2Registries = new HashMap<>();
-
- /**
- * Helper method to get a HiveServer2HARegistry instance to read from the registry. Only used by clients (JDBC),
- * service discovery to connect to active HS2 instance in Active/Passive HA configuration.
- *
- * @param conf {@link Configuration} instance which contains service registry information.
- * @return HiveServer2HARegistry
- */
- public static synchronized HS2ActivePassiveHARegistry getClient(Configuration conf) throws IOException {
- String namespace = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_REGISTRY_NAMESPACE);
- Preconditions.checkArgument(!StringUtils.isBlank(namespace),
- HiveConf.ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_REGISTRY_NAMESPACE.varname + " cannot be null or empty");
- String nsKey = ZkRegistryBase.getRootNamespace(conf, null, namespace + "-");
- HS2ActivePassiveHARegistry registry = hs2Registries.get(nsKey);
- if (registry == null) {
- registry = HS2ActivePassiveHARegistry.create(conf, true);
- registry.start();
- hs2Registries.put(nsKey, registry);
- LOG.info("Added registry client to cache with namespace: {}", nsKey);
- } else {
- LOG.info("Returning cached registry client for namespace: {}", nsKey);
- }
- return registry;
- }
-}
diff --git service/src/java/org/apache/hive/service/server/HiveServer2.java service/src/java/org/apache/hive/service/server/HiveServer2.java
index 8f73c60..5a174cc 100644
--- service/src/java/org/apache/hive/service/server/HiveServer2.java
+++ service/src/java/org/apache/hive/service/server/HiveServer2.java
@@ -51,7 +51,6 @@
import org.apache.curator.framework.recipes.leader.LeaderLatch;
import org.apache.curator.framework.recipes.leader.LeaderLatchListener;
import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.JvmPauseMonitor;
import org.apache.hadoop.hive.common.LogUtils;
import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
@@ -99,6 +98,7 @@
import org.apache.hive.http.security.PamAuthenticator;
import org.apache.hive.service.CompositeService;
import org.apache.hive.service.ServiceException;
+import org.apache.hive.service.auth.HiveAuthConstants;
import org.apache.hive.service.cli.CLIService;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.session.HiveSession;
@@ -136,7 +136,6 @@
public class HiveServer2 extends CompositeService {
private static CountDownLatch deleteSignal;
private static final Logger LOG = LoggerFactory.getLogger(HiveServer2.class);
- public static final String INSTANCE_URI_CONFIG = "hive.server2.instance.uri";
private static final int SHUTDOWN_TIME = 60;
private CLIService cliService;
private ThriftCLIService thriftCLIService;
@@ -222,7 +221,7 @@
hiveServer2.stop();
}
};
- if (isHTTPTransportMode(hiveConf)) {
+ if (HiveServer2ClientUtils.isHTTPTransportMode(hiveConf)) {
thriftCLIService = new ThriftHttpCLIService(cliService, oomHook);
} else {
thriftCLIService = new ThriftBinaryCLIService(cliService, oomHook);
@@ -294,7 +293,7 @@
serviceUri = getServerInstanceURI();
addConfsToPublish(hiveConf, confsToPublish, serviceUri);
if (activePassiveHA) {
- hiveConf.set(INSTANCE_URI_CONFIG, serviceUri);
+ hiveConf.set(HiveAuthConstants.INSTANCE_URI_CONFIG, serviceUri);
leaderLatchListener = new HS2LeaderLatchListener(this, SessionState.get());
leaderActionsExecutorService = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setDaemon(true)
.setNameFormat("Leader Actions Handler Thread").build());
@@ -450,25 +449,6 @@
return resourcePlan;
}
- public static boolean isHTTPTransportMode(Configuration hiveConf) {
- String transportMode = System.getenv("HIVE_SERVER2_TRANSPORT_MODE");
- if (transportMode == null) {
- transportMode = hiveConf.get(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname);
- }
- if (transportMode != null && (transportMode.equalsIgnoreCase("http"))) {
- return true;
- }
- return false;
- }
-
- public static boolean isKerberosAuthMode(Configuration hiveConf) {
- String authMode = hiveConf.get(ConfVars.HIVE_SERVER2_AUTHENTICATION.varname);
- if (authMode != null && (authMode.equalsIgnoreCase("KERBEROS"))) {
- return true;
- }
- return false;
- }
-
/**
* ACLProvider for providing appropriate ACLs to CuratorFrameworkFactory
*/
@@ -510,12 +490,12 @@
confsToPublish.put(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname,
hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST));
// Hostname:port
- confsToPublish.put(INSTANCE_URI_CONFIG, serviceUri);
+ confsToPublish.put(HiveAuthConstants.INSTANCE_URI_CONFIG, serviceUri);
// Transport mode
confsToPublish.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname,
hiveConf.getVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE));
// Transport specific confs
- if (isHTTPTransportMode(hiveConf)) {
+ if (HiveServer2ClientUtils.isHTTPTransportMode(hiveConf)) {
confsToPublish.put(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT.varname,
Integer.toString(hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT)));
confsToPublish.put(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname,
@@ -529,7 +509,7 @@
// Auth specific confs
confsToPublish.put(ConfVars.HIVE_SERVER2_AUTHENTICATION.varname,
hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION));
- if (isKerberosAuthMode(hiveConf)) {
+ if (HiveServer2ClientUtils.isKerberosAuthMode(hiveConf)) {
confsToPublish.put(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname,
hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL));
}
@@ -568,23 +548,6 @@
return cliService != null ? cliService.getSessionManager().getOpenSessionCount() : 0;
}
- interface FailoverHandler {
- void failover() throws Exception;
- }
-
- public static class FailoverHandlerCallback implements FailoverHandler {
- private HS2ActivePassiveHARegistry hs2HARegistry;
-
- FailoverHandlerCallback(HS2ActivePassiveHARegistry hs2HARegistry) {
- this.hs2HARegistry = hs2HARegistry;
- }
-
- @Override
- public void failover() throws Exception {
- hs2HARegistry.failover();
- }
- }
-
/**
* The watcher class shuts down the server if there are no more active client
* sessions at the time of receiving a 'NodeDeleted' notification from ZooKeeper.
diff --git service/src/java/org/apache/hive/service/server/HiveServer2HAInstanceSet.java service/src/java/org/apache/hive/service/server/HiveServer2HAInstanceSet.java
deleted file mode 100644
index b31d63c..0000000
--- service/src/java/org/apache/hive/service/server/HiveServer2HAInstanceSet.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.server;
-
-import org.apache.hadoop.hive.registry.ServiceInstanceSet;
-
-public interface HiveServer2HAInstanceSet extends ServiceInstanceSet {
-
- /**
- * In Active/Passive setup, returns current active leader.
- *
- * @return leader instance
- */
- HiveServer2Instance getLeader();
-}
diff --git service/src/java/org/apache/hive/service/server/HiveServer2Instance.java service/src/java/org/apache/hive/service/server/HiveServer2Instance.java
deleted file mode 100644
index 558e809..0000000
--- service/src/java/org/apache/hive/service/server/HiveServer2Instance.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.server;
-
-import java.io.IOException;
-import java.util.Objects;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.registry.impl.ServiceInstanceBase;
-import org.apache.hadoop.registry.client.types.Endpoint;
-import org.apache.hadoop.registry.client.types.ServiceRecord;
-
-import com.google.common.base.Preconditions;
-
-public class HiveServer2Instance extends ServiceInstanceBase {
- private boolean isLeader;
- private String transportMode;
- private String httpEndpoint;
-
- // empty c'tor to make jackson happy
- public HiveServer2Instance() {
-
- }
-
- public HiveServer2Instance(final ServiceRecord srv, final String endPointName) throws IOException {
- super(srv, endPointName);
-
- Endpoint activeEndpoint = srv.getInternalEndpoint(HS2ActivePassiveHARegistry.ACTIVE_ENDPOINT);
- Endpoint passiveEndpoint = srv.getInternalEndpoint(HS2ActivePassiveHARegistry.PASSIVE_ENDPOINT);
- this.isLeader = activeEndpoint != null;
- Preconditions.checkArgument(activeEndpoint == null || passiveEndpoint == null,
- "Incorrect service record. Both active and passive endpoints cannot be non-null!");
- this.transportMode = srv.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname);
- if (transportMode.equalsIgnoreCase("http")) {
- this.httpEndpoint = srv.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname);
- } else {
- this.httpEndpoint = "";
- }
- }
-
- public boolean isLeader() {
- return isLeader;
- }
-
- public String getTransportMode() {
- return transportMode;
- }
-
- public String getHttpEndpoint() {
- return httpEndpoint;
- }
-
- public void setLeader(final boolean leader) {
- isLeader = leader;
- }
-
- public void setTransportMode(final String transportMode) {
- this.transportMode = transportMode;
- }
-
- public void setHttpEndpoint(final String httpEndpoint) {
- this.httpEndpoint = httpEndpoint;
- }
-
- @Override
- public boolean equals(final Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
-
- HiveServer2Instance other = (HiveServer2Instance) o;
- return super.equals(o) && isLeader == other.isLeader
- && Objects.equals(transportMode, other.transportMode)
- && Objects.equals(httpEndpoint, other.httpEndpoint);
- }
-
- @Override
- public int hashCode() {
- return super.hashCode() + Objects.hashCode(isLeader) + Objects.hashCode(transportMode) + Objects.hashCode(httpEndpoint);
- }
-
- @Override
- public String toString() {
- String result = "instanceId: " + getWorkerIdentity() + " isLeader: " + isLeader + " host: " + getHost() +
- " port: " + getRpcPort() + " transportMode: " + transportMode;
- if (httpEndpoint != null) {
- result += " httpEndpoint: " + httpEndpoint;
- }
- return result;
- }
-}
diff --git service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java
index acb58c8..ba63e04 100644
--- service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java
+++ service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java
@@ -27,7 +27,7 @@
import org.apache.hive.http.HttpConstants;
import org.apache.hive.http.HttpServer;
-import org.apache.hive.service.server.HiveServer2;
+import org.apache.hive.service.server.FailoverHandlerCallback;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -123,7 +123,7 @@
return;
}
- HiveServer2.FailoverHandlerCallback failoverHandler = (HiveServer2.FailoverHandlerCallback) context
+ FailoverHandlerCallback failoverHandler = (FailoverHandlerCallback) context
.getAttribute("hs2.failover.callback");
try {
String msg = "Failover successful!";