diff --git common/src/java/org/apache/hadoop/hive/common/cli/HiveFileProcessor.java common/src/java/org/apache/hadoop/hive/common/cli/HiveFileProcessor.java
new file mode 100644
index 0000000..d746df2
--- /dev/null
+++ common/src/java/org/apache/hadoop/hive/common/cli/HiveFileProcessor.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.cli;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.io.IOUtils;
+
+/**
+ * HiveFileProcessor is used for processing a file consist of hive executable
+ * statements
+ */
+public abstract class HiveFileProcessor implements IHiveFileProcessor {
+
+ public int processFile(String fileName) throws IOException {
+ BufferedReader bufferedReader = null;
+ try {
+ bufferedReader = loadFile(fileName);
+ return (processReader(bufferedReader));
+ } finally {
+ IOUtils.closeStream(bufferedReader);
+ }
+ }
+
+ /**
+ * load commands into buffered reader from the file
+ * @param fileName
+ * @return
+ * @throws IOException
+ */
+ protected abstract BufferedReader loadFile(String fileName)
+ throws IOException;
+
+ /**
+ * execute the buffered reader which stores the commands
+ * @param reader the buffered reader
+ * @return the return code of the execution result
+ * @throws IOException
+ */
+ protected int processReader(BufferedReader reader) throws IOException {
+ String line;
+ StringBuilder qsb = new StringBuilder();
+ while ((line = reader.readLine()) != null) {
+ if (!line.startsWith("--")) {
+ qsb.append(line);
+ }
+ }
+ return processLine(qsb.toString());
+ }
+
+ /**
+ * process the hive command by lines
+ * @param line contains a legal hive command
+ * @return the return code of the execution result
+ */
+ protected int processLine(String line) {
+ int lastRet = 0, ret = 0;
+ String command = "";
+ for (String oneCmd : line.split(";")) {
+ if (StringUtils.indexOf(oneCmd, "\\") != -1) {
+ command += StringUtils.join(oneCmd.split("\\\\"));
+ } else {
+ command += oneCmd;
+ }
+ if (StringUtils.isBlank(command)) {
+ continue;
+ }
+
+ ret = processCmd(command);
+ command = "";
+ lastRet = ret;
+ if (ret != 0) {
+ return ret;
+ }
+ }
+ return lastRet;
+ }
+
+ /**
+ * define the processor for each hive command supported by hive
+ * @param cmd
+ * @return the return code of the execution result
+ */
+ protected abstract int processCmd(String cmd);
+}
diff --git common/src/java/org/apache/hadoop/hive/common/cli/IHiveFileProcessor.java common/src/java/org/apache/hadoop/hive/common/cli/IHiveFileProcessor.java
new file mode 100644
index 0000000..294f993
--- /dev/null
+++ common/src/java/org/apache/hadoop/hive/common/cli/IHiveFileProcessor.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.cli;
+
+import java.io.IOException;
+
+/**
+ * Hive file processor is used for processing a executable hive file
+ */
+public interface IHiveFileProcessor {
+ /**
+ * Parse the file and execute the hive commands in it.
+ * @param fileName the name of the file
+ * @exception IOException if an I/O error occurs.
+ */
+ public int processFile(String fileName) throws IOException;
+}
diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 8bff2a9..912ad07 100644
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -868,6 +868,9 @@
HIVE_LOG4J_FILE("hive.log4j.file", ""),
HIVE_EXEC_LOG4J_FILE("hive.exec.log4j.file", ""),
+ // hive global init file location
+ HIVE_GLOBAL_INIT_FILE_LOCATION("hive.globel.init.file.location", System.getenv("HIVE_CONF_DIR")),
+
// prefix used to auto generated column aliases (this should be started with '_')
HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL("hive.autogen.columnalias.prefix.label", "_c"),
HIVE_AUTOGEN_COLUMNALIAS_PREFIX_INCLUDEFUNCNAME(
diff --git conf/hive-default.xml.template conf/hive-default.xml.template
index 4944dfc..34f1147 100644
--- conf/hive-default.xml.template
+++ conf/hive-default.xml.template
@@ -2143,6 +2143,14 @@
+ hive.globel.init.file.location
+
+ The location of HS2 global init file (.hiverc).
+ If the property is not set, then HS2 will search the file in $HIVE_CONF_DIR/.
+ If the property is set, the value must be a valid path where the init file is located.
+
+
+
hive.exec.infer.bucket.sort
false
diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index 6a7ee7a..b365c77 100644
--- service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -18,7 +18,11 @@
package org.apache.hive.service.cli.session;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
import java.io.IOException;
+import java.io.InputStreamReader;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@@ -26,6 +30,8 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.cli.HiveFileProcessor;
+import org.apache.hadoop.hive.common.cli.IHiveFileProcessor;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
@@ -82,7 +88,8 @@
private final Set opHandleSet = new HashSet();
public HiveSessionImpl(TProtocolVersion protocol, String username, String password,
- HiveConf serverhiveConf, Map sessionConfMap, String ipAddress) {
+ HiveConf serverhiveConf, Map sessionConfMap, String ipAddress,
+ OperationManager operationManager) {
this.username = username;
this.password = password;
this.sessionHandle = new SessionHandle(protocol);
@@ -101,12 +108,65 @@ public HiveSessionImpl(TProtocolVersion protocol, String username, String passwo
sessionState.setIsHiveServerQuery(true);
SessionState.start(sessionState);
+ setOperationManager(operationManager);
+
+ //process global init file: .hiverc
+ processGlobalInitFile();
+ SessionState.setCurrentSessionState(sessionState);
+
//set conf properties specified by user from client side
if (sessionConfMap != null) {
configureSession(sessionConfMap);
}
}
+ /**
+ * It is used for processing hiverc file from hive server2 side.
+ */
+ private class GlobalHivercFileProcessor extends HiveFileProcessor {
+ @Override
+ protected BufferedReader loadFile(String fileName) throws IOException {
+ FileInputStream initStream = null;
+ BufferedReader bufferedReader = null;
+ initStream = new FileInputStream(fileName);
+ bufferedReader = new BufferedReader(new InputStreamReader(initStream));
+ return bufferedReader;
+ }
+
+ @Override
+ protected int processCmd(String cmd) {
+ int rc = 0;
+ String cmd_trimed = cmd.trim();
+ try {
+ executeStatementInternal(cmd_trimed, null, false, true);
+ } catch (HiveSQLException e) {
+ rc = -1;
+ LOG.warn("Failed to execute HQL command in global .hiverc file.", e);
+ }
+ return rc;
+ }
+ }
+
+ private void processGlobalInitFile() {
+ IHiveFileProcessor processor = new GlobalHivercFileProcessor();
+
+ try {
+ if (hiveConf.getVar(ConfVars.HIVE_GLOBAL_INIT_FILE_LOCATION) != null) {
+ String hiverc = hiveConf.getVar(ConfVars.HIVE_GLOBAL_INIT_FILE_LOCATION)
+ + File.separator + SessionManager.HIVERCFILE;
+ if (new File(hiverc).exists()) {
+ LOG.info("Running global init file: " + hiverc);
+ int rc = processor.processFile(hiverc);
+ if (rc != 0) {
+ LOG.warn("Failed on initializing global .hiverc file");
+ }
+ }
+ }
+ } catch (IOException e) {
+ LOG.warn("Failed on initializing global .hiverc file", e);
+ }
+ }
+
private void configureSession(Map sessionConfMap) {
for (Map.Entry entry : sessionConfMap.entrySet()) {
String key = entry.getKey();
@@ -223,23 +283,23 @@ public GetInfoValue getInfo(GetInfoType getInfoType)
@Override
public OperationHandle executeStatement(String statement, Map confOverlay)
throws HiveSQLException {
- return executeStatementInternal(statement, confOverlay, false);
+ return executeStatementInternal(statement, confOverlay, false, false);
}
@Override
public OperationHandle executeStatementAsync(String statement, Map confOverlay)
throws HiveSQLException {
- return executeStatementInternal(statement, confOverlay, true);
+ return executeStatementInternal(statement, confOverlay, true, false);
}
private OperationHandle executeStatementInternal(String statement, Map confOverlay,
- boolean runAsync)
+ boolean runAsync, boolean initialize)
throws HiveSQLException {
acquire();
OperationManager operationManager = getOperationManager();
ExecuteStatementOperation operation = operationManager
- .newExecuteStatementOperation(getSession(), statement, confOverlay, runAsync);
+ .newExecuteStatementOperation(initialize ? this : getSession(), statement, confOverlay, runAsync);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
index e79b129..2a94fba 100644
--- service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
+++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
@@ -28,6 +28,7 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.operation.OperationManager;
import org.apache.hive.service.cli.thrift.TProtocolVersion;
/**
@@ -45,8 +46,8 @@
public HiveSessionImplwithUGI(TProtocolVersion protocol, String username, String password,
HiveConf hiveConf, Map sessionConf, String ipAddress,
- String delegationToken) throws HiveSQLException {
- super(protocol, username, password, hiveConf, sessionConf, ipAddress);
+ OperationManager operationManager, String delegationToken) throws HiveSQLException {
+ super(protocol, username, password, hiveConf, sessionConf, ipAddress, operationManager);
setSessionUGI(username);
setDelegationToken(delegationToken);
}
diff --git service/src/java/org/apache/hive/service/cli/session/SessionManager.java service/src/java/org/apache/hive/service/cli/session/SessionManager.java
index 6650c05..c377e85 100644
--- service/src/java/org/apache/hive/service/cli/session/SessionManager.java
+++ service/src/java/org/apache/hive/service/cli/session/SessionManager.java
@@ -47,6 +47,7 @@
public class SessionManager extends CompositeService {
private static final Log LOG = LogFactory.getLog(CompositeService.class);
+ public static final String HIVERCFILE = ".hiverc";
private HiveConf hiveConf;
private final Map handleToSession =
new ConcurrentHashMap();
@@ -122,15 +123,14 @@ public SessionHandle openSession(TProtocolVersion protocol, String username, Str
HiveSession session;
if (withImpersonation) {
HiveSessionImplwithUGI hiveSessionUgi = new HiveSessionImplwithUGI(protocol, username, password,
- hiveConf, sessionConf, TSetIpAddressProcessor.getUserIpAddress(), delegationToken);
+ hiveConf, sessionConf, TSetIpAddressProcessor.getUserIpAddress(), operationManager, delegationToken);
session = HiveSessionProxy.getProxy(hiveSessionUgi, hiveSessionUgi.getSessionUgi());
hiveSessionUgi.setProxySession(session);
} else {
session = new HiveSessionImpl(protocol, username, password, hiveConf, sessionConf,
- TSetIpAddressProcessor.getUserIpAddress());
+ TSetIpAddressProcessor.getUserIpAddress(), operationManager);
}
session.setSessionManager(this);
- session.setOperationManager(operationManager);
session.open();
handleToSession.put(session.getSessionHandle(), session);
diff --git service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java
new file mode 100644
index 0000000..b963897
--- /dev/null
+++ service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.cli.session;
+
+import java.io.File;
+import java.lang.reflect.Field;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.cli.*;
+import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
+import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
+import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestSessionGlobalInitFile extends TestCase {
+
+ private FakeEmbeddedThriftBinaryCLIService service;
+ private ThriftCLIServiceClient client;
+ private File initFile;
+ private String tmpDir;
+
+ /**
+ * This class is almost the same as EmbeddedThriftBinaryCLIService,
+ * except its constructor having a HiveConf param for test usage.
+ */
+ private class FakeEmbeddedThriftBinaryCLIService extends ThriftBinaryCLIService {
+ public FakeEmbeddedThriftBinaryCLIService(HiveConf hiveConf) {
+ super(new CLIService());
+ isEmbedded = true;
+ cliService.init(hiveConf);
+ cliService.start();
+ }
+
+ public ICLIService getService() {
+ return cliService;
+ }
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+
+ // create and put .hiverc sample file to default directory
+ initFile = File.createTempFile("test", "hive");
+ tmpDir =
+ initFile.getParentFile().getAbsoluteFile() + File.separator
+ + "TestSessionGlobalInitFile";
+ initFile.delete();
+ FileUtils.deleteDirectory(new File(tmpDir));
+
+ initFile =
+ new File(tmpDir + File.separator + SessionManager.HIVERCFILE);
+ initFile.getParentFile().mkdirs();
+ initFile.createNewFile();
+
+ String[] fileContent =
+ new String[] { "-- global init hive file for test", "set a=1;",
+ "set hiveconf:b=1;", "set hivevar:c=1;", "set d\\", " =1;",
+ "add jar " + initFile.getAbsolutePath() };
+ FileUtils.writeLines(initFile, Arrays.asList(fileContent));
+
+ // set up service and client
+ HiveConf hiveConf = new HiveConf();
+ hiveConf.setVar(HiveConf.ConfVars.HIVE_GLOBAL_INIT_FILE_LOCATION,
+ initFile.getParentFile().getAbsolutePath());
+ service = new FakeEmbeddedThriftBinaryCLIService(hiveConf);
+ service.init(new HiveConf());
+ client = new ThriftCLIServiceClient(service);
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ // restore
+ FileUtils.deleteDirectory(new File(tmpDir));
+ }
+
+ @Test
+ public void testSessionGlobalInitFile() throws Exception {
+ /**
+ * create session, and fetch the property set in global init file. Test if
+ * the global init file .hiverc is loaded correctly by checking the expected
+ * setting property.
+ */
+ SessionHandle sessionHandle = client.openSession(null, null, null);
+
+ verifyInitProperty("a", "1", sessionHandle);
+ verifyInitProperty("b", "1", sessionHandle);
+ verifyInitProperty("c", "1", sessionHandle);
+ verifyInitProperty("hivevar:c", "1", sessionHandle);
+ verifyInitProperty("d", "1", sessionHandle);
+
+ /**
+ * TODO: client.executeStatement do not support listing resources command
+ * (beeline> list jar)
+ */
+ // Assert.assertEquals("expected uri", api.getAddedResource("jar"));
+
+ client.closeSession(sessionHandle);
+ }
+
+ @Test
+ public void testSessionGlobalInitFileWithUser() throws Exception {
+ //Test when the session is opened by a user. (HiveSessionImplwithUGI)
+ SessionHandle sessionHandle = client.openSession("hive", "passward", null);
+ verifyInitProperty("a", "1", sessionHandle);
+ client.closeSession(sessionHandle);
+ }
+
+ @Test
+ public void testSessionGlobalInitFileAndConfOverlay() throws Exception {
+ // Test if the user session specific conf overlaying global init conf.
+ Map confOverlay = new HashMap();
+ confOverlay.put("a", "2");
+ confOverlay.put("set:hiveconf:b", "2");
+ confOverlay.put("set:hivevar:c", "2");
+
+ SessionHandle sessionHandle = client.openSession(null, null, confOverlay);
+ verifyInitProperty("a", "2", sessionHandle);
+ verifyInitProperty("b", "2", sessionHandle);
+ verifyInitProperty("c", "2", sessionHandle);
+ client.closeSession(sessionHandle);
+
+ sessionHandle = client.openSession("hive", "passward", confOverlay);
+ verifyInitProperty("a", "2", sessionHandle);
+ client.closeSession(sessionHandle);
+ }
+
+ private void verifyInitProperty(String key, String value,
+ SessionHandle sessionHandle) throws Exception {
+ OperationHandle operationHandle =
+ client.executeStatement(sessionHandle, "set " + key, null);
+ RowSet rowSet = client.fetchResults(operationHandle);
+ Assert.assertEquals(1, rowSet.numRows());
+ // we know rowSet has only one element
+ Assert.assertEquals(key + "=" + value, rowSet.iterator().next()[0]);
+ }
+}