diff --git a/build-common.xml b/build-common.xml
index f10f340..300bd61 100644
--- a/build-common.xml
+++ b/build-common.xml
@@ -59,7 +59,7 @@
-
+
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index 4239392..9f959e5 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -32,12 +32,12 @@
import java.util.Set;
import jline.ArgumentCompletor;
+import jline.ArgumentCompletor.AbstractArgumentDelimiter;
+import jline.ArgumentCompletor.ArgumentDelimiter;
import jline.Completor;
import jline.ConsoleReader;
import jline.History;
import jline.SimpleCompletor;
-import jline.ArgumentCompletor.AbstractArgumentDelimiter;
-import jline.ArgumentCompletor.ArgumentDelimiter;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
@@ -714,6 +714,8 @@ public static int run(String[] args) throws Exception {
} catch (FileNotFoundException e) {
System.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
return 3;
+ } finally {
+ ss.close();
}
ConsoleReader reader = new ConsoleReader();
@@ -747,26 +749,28 @@ public static int run(String[] args) throws Exception {
String curPrompt = prompt + curDB;
String dbSpaces = spacesForString(curDB);
- while ((line = reader.readLine(curPrompt + "> ")) != null) {
- if (!prefix.equals("")) {
- prefix += '\n';
- }
- if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
- line = prefix + line;
- ret = cli.processLine(line, true);
- prefix = "";
- curDB = getFormattedDb(conf, ss);
- curPrompt = prompt + curDB;
- dbSpaces = dbSpaces.length() == curDB.length() ? dbSpaces : spacesForString(curDB);
- } else {
- prefix = prefix + line;
- curPrompt = prompt2 + dbSpaces;
- continue;
+ try {
+ while ((line = reader.readLine(curPrompt + "> ")) != null) {
+ if (!prefix.equals("")) {
+ prefix += '\n';
+ }
+ if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
+ line = prefix + line;
+ ret = cli.processLine(line, true);
+ prefix = "";
+ curDB = getFormattedDb(conf, ss);
+ curPrompt = prompt + curDB;
+ dbSpaces = dbSpaces.length() == curDB.length() ? dbSpaces : spacesForString(curDB);
+ } else {
+ prefix = prefix + line;
+ curPrompt = prompt2 + dbSpaces;
+ continue;
+ }
}
+ } finally {
+ ss.close();
}
- ss.close();
-
return ret;
}
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java b/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java
index dfb30e2..3b82d65 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java
@@ -77,11 +77,6 @@
private Hive hive; // currently only used (and init'ed) in getCurrentDbName
- public CliSessionState() {
- super();
- remoteMode = false;
- }
-
public CliSessionState(HiveConf conf) {
super(conf);
remoteMode = false;
@@ -110,8 +105,10 @@ public int getPort() {
return port;
}
+ @Override
public void close() {
try {
+ super.close();
if (remoteMode) {
client.clean();
transport.close();
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 5c1b283..7454f99 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -200,7 +200,8 @@
DYNAMICPARTITIONMAXPARTS("hive.exec.max.dynamic.partitions", 1000),
DYNAMICPARTITIONMAXPARTSPERNODE("hive.exec.max.dynamic.partitions.pernode", 100),
MAXCREATEDFILES("hive.exec.max.created.files", 100000L),
- DOWNLOADED_RESOURCES_DIR("hive.downloaded.resources.dir", "/tmp/"+System.getProperty("user.name")+"/hive_resources"),
+ DOWNLOADED_RESOURCES_DIR("hive.downloaded.resources.dir",
+ "/tmp/${hive.session.id}_resources"),
DEFAULTPARTITIONNAME("hive.exec.default.partition.name", "__HIVE_DEFAULT_PARTITION__"),
DEFAULT_ZOOKEEPER_PARTITION_NAME("hive.lockmgr.zookeeper.default.partition.name", "__HIVE_DEFAULT_ZOOKEEPER_PARTITION__"),
// Whether to show a link to the most failed task + debugging tips
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 3d43451..2f364fc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -24,14 +24,14 @@
import java.io.PrintStream;
import java.net.URI;
import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.UUID;
+import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -57,6 +57,7 @@
* configuration information
*/
public class SessionState {
+ private static final Log LOG = LogFactory.getLog(SessionState.class);
/**
* current configuration.
@@ -179,16 +180,16 @@ public void setIsVerbose(boolean isVerbose) {
this.isVerbose = isVerbose;
}
- public SessionState() {
- this(null);
- }
-
public SessionState(HiveConf conf) {
this.conf = conf;
isSilent = conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT);
ls = new LineageState();
overriddenConfigurations = new HashMap();
overriddenConfigurations.putAll(HiveConf.getConfSystemProperties());
+ // if there isn't already a session name, go ahead and create it.
+ if (StringUtils.isEmpty(conf.getVar(HiveConf.ConfVars.HIVESESSIONID))) {
+ conf.setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId());
+ }
}
public void setCmd(String cmdString) {
@@ -242,12 +243,6 @@ public static SessionState start(SessionState startSs) {
tss.set(startSs);
- if (StringUtils.isEmpty(startSs.getConf().getVar(
- HiveConf.ConfVars.HIVESESSIONID))) {
- startSs.getConf()
- .setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId());
- }
-
if (startSs.hiveHist == null) {
startSs.hiveHist = new HiveHistory(startSs);
}
@@ -297,15 +292,13 @@ public HiveHistory getHiveHistory() {
return hiveHist;
}
+ /**
+ * Create a session ID. Looks like:
+ * $user_$pid@$host_$date
+ * @return the unique string
+ */
private static String makeSessionId() {
- GregorianCalendar gc = new GregorianCalendar();
- String userid = System.getProperty("user.name");
-
- return userid
- + "_"
- + String.format("%1$4d%2$02d%3$02d%4$02d%5$02d", gc.get(Calendar.YEAR),
- gc.get(Calendar.MONTH) + 1, gc.get(Calendar.DAY_OF_MONTH), gc
- .get(Calendar.HOUR_OF_DAY), gc.get(Calendar.MINUTE));
+ return UUID.randomUUID().toString();
}
/**
@@ -588,35 +581,15 @@ public static boolean canDownloadResource(String value) {
private String downloadResource(String value, boolean convertToUnix) {
if (canDownloadResource(value)) {
getConsole().printInfo("converting to local " + value);
- String location = getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR);
-
+ File resourceDir = new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR));
String destinationName = new Path(value).getName();
- String prefix = destinationName;
- String postfix = null;
- int index = destinationName.lastIndexOf(".");
- if (index > 0) {
- prefix = destinationName.substring(0, index);
- postfix = destinationName.substring(index);
- }
- if (prefix.length() < 3) {
- prefix += ".tmp"; // prefix should be longer than 3
- }
-
- File resourceDir = new File(location);
- if (resourceDir.exists() && !resourceDir.isDirectory()) {
- throw new RuntimeException("The resource directory is not a directory, " +
- "resourceDir is set to " + resourceDir);
+ File destinationFile = new File(resourceDir, destinationName);
+ if (resourceDir.exists() && ! resourceDir.isDirectory()) {
+ throw new RuntimeException("The resource directory is not a directory, resourceDir is set to" + resourceDir);
}
if (!resourceDir.exists() && !resourceDir.mkdirs()) {
throw new RuntimeException("Couldn't create directory " + resourceDir);
}
-
- File destinationFile;
- try {
- destinationFile = File.createTempFile(prefix, postfix, resourceDir);
- } catch (Exception e) {
- throw new RuntimeException("Failed to create temporary file for " + value, e);
- }
try {
FileSystem fs = FileSystem.get(new URI(value), conf);
fs.copyToLocalFile(new Path(value), new Path(destinationFile.getCanonicalPath()));
@@ -756,4 +729,17 @@ public void addLocalMapRedErrors(String id, List localMapRedErrors) {
public void setLocalMapRedErrors(Map> localMapRedErrors) {
this.localMapRedErrors = localMapRedErrors;
}
+
+ public void close() {
+ String resourceDirPath = getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR);
+ try {
+ File resourceDir = new File(resourceDirPath);
+ LOG.debug("Removing resource dir " + resourceDir);
+ if (resourceDir.exists()) {
+ FileUtils.deleteDirectory(resourceDir);
+ }
+ } catch (IOException e) {
+ LOG.info("Error removing session resource dir " + resourceDirPath, e);
+ }
+ }
}
diff --git a/ql/src/test/queries/clientpositive/remote_script.q b/ql/src/test/queries/clientpositive/remote_script.q
new file mode 100644
index 0000000..926601c
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/remote_script.q
@@ -0,0 +1,13 @@
+dfs -put ../data/scripts/newline.py /newline.py;
+add file hdfs:///newline.py;
+set hive.transform.escape.input=true;
+
+create table tmp_tmp(key string, value string) stored as rcfile;
+insert overwrite table tmp_tmp
+SELECT TRANSFORM(key, value) USING
+'python newline.py' AS key, value FROM src limit 6;
+
+select * from tmp_tmp ORDER BY key ASC, value ASC;
+
+dfs -rmr /newline.py;
+drop table tmp_tmp;
diff --git a/ql/src/test/results/clientpositive/remote_script.q.out b/ql/src/test/results/clientpositive/remote_script.q.out
new file mode 100644
index 0000000..8806b2b
--- /dev/null
+++ b/ql/src/test/results/clientpositive/remote_script.q.out
@@ -0,0 +1,50 @@
+PREHOOK: query: create table tmp_tmp(key string, value string) stored as rcfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tmp_tmp(key string, value string) stored as rcfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp_tmp
+PREHOOK: query: insert overwrite table tmp_tmp
+SELECT TRANSFORM(key, value) USING
+'python newline.py' AS key, value FROM src limit 6
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@tmp_tmp
+POSTHOOK: query: insert overwrite table tmp_tmp
+SELECT TRANSFORM(key, value) USING
+'python newline.py' AS key, value FROM src limit 6
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@tmp_tmp
+POSTHOOK: Lineage: tmp_tmp.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tmp_tmp.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select * from tmp_tmp ORDER BY key ASC, value ASC
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tmp_tmp
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tmp_tmp ORDER BY key ASC, value ASC
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tmp_tmp
+#### A masked pattern was here ####
+POSTHOOK: Lineage: tmp_tmp.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tmp_tmp.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+1 2 NULL
+1 2 NULL
+1 NULL
+2 NULL
+1 NULL
+2 NULL
+1 NULL
+2 NULL
+1 NULL
+2 NULL
+#### A masked pattern was here ####
+PREHOOK: query: drop table tmp_tmp
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tmp_tmp
+PREHOOK: Output: default@tmp_tmp
+POSTHOOK: query: drop table tmp_tmp
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tmp_tmp
+POSTHOOK: Output: default@tmp_tmp
+POSTHOOK: Lineage: tmp_tmp.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tmp_tmp.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index 3a6231c..19513df 100644
--- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -24,7 +24,10 @@
import java.util.Map;
import java.util.Set;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -64,6 +67,8 @@
private static final String FETCH_WORK_SERDE_CLASS =
"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe";
+ private static final Log LOG = LogFactory.getLog(HiveSessionImpl.class);
+
private SessionManager sessionManager;
private OperationManager operationManager;
@@ -79,7 +84,9 @@ public HiveSessionImpl(String username, String password, Map ses
hiveConf.set(entry.getKey(), entry.getValue());
}
}
-
+ // set an explicit session name to control the download directory name
+ hiveConf.set(ConfVars.HIVESESSIONID.varname,
+ sessionHandle.getHandleIdentifier().toString());
sessionState = new SessionState(hiveConf);
}
@@ -301,6 +308,7 @@ public void close() throws HiveSQLException {
hiveHist.closeStream();
}
} finally {
+ sessionState.close();
release();
}
}