diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index 2e7adaee6b..325a82d462 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -61,6 +61,7 @@ import org.apache.hadoop.hive.common.cli.ShellCmdExecutor; import org.apache.hadoop.hive.common.io.CachingPrintStream; import org.apache.hadoop.hive.common.io.FetchConverter; +import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.Validator; @@ -722,8 +723,8 @@ public int run(String[] args) throws Exception { CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); ss.in = System.in; try { - ss.out = new PrintStream(System.out, true, "UTF-8"); - ss.info = new PrintStream(System.err, true, "UTF-8"); + ss.out = new SessionStream(System.out, true, "UTF-8"); + ss.info = new SessionStream(System.err, true, "UTF-8"); ss.err = new CachingPrintStream(System.err, true, "UTF-8"); } catch (UnsupportedEncodingException e) { return 3; diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java index 8419c331d8..4a2bae5c7c 100644 --- a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java +++ b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java @@ -47,6 +47,7 @@ import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -110,10 +111,10 @@ public void testThatCliDriverDoesNotStripComments() throws Exception { // Capture stdout and stderr ByteArrayOutputStream dataOut = new ByteArrayOutputStream(); - PrintStream out = new PrintStream(dataOut); + SessionStream out = new SessionStream(dataOut); System.setOut(out); ByteArrayOutputStream dataErr = new ByteArrayOutputStream(); - PrintStream err = new PrintStream(dataErr); + SessionStream err = new SessionStream(dataErr); System.setErr(err); CliSessionState ss = new CliSessionState(new HiveConf()); @@ -175,7 +176,7 @@ private PrintStream headerPrintingTestDriver(Schema mockSchema) { when(proc.getSchema()).thenReturn(mockSchema); CliSessionState mockSS = mock(CliSessionState.class); - PrintStream mockOut = mock(PrintStream.class); + SessionStream mockOut = mock(SessionStream.class); mockSS.out = mockOut; @@ -245,8 +246,8 @@ public void testRun() throws Exception { public void testQuit() throws Exception { CliSessionState ss = new CliSessionState(new HiveConf()); - ss.err = System.err; - ss.out = System.out; + ss.err = new SessionStream(System.err); + ss.out = new SessionStream(System.out); try { CliSessionState.start(ss); @@ -276,7 +277,7 @@ public void testProcessSelectDatabase() throws Exception { CliSessionState sessinState = new CliSessionState(new HiveConf()); CliSessionState.start(sessinState); ByteArrayOutputStream data = new ByteArrayOutputStream(); - sessinState.err = new PrintStream(data); + sessinState.err = new SessionStream(data); sessinState.database = "database"; CliDriver driver = new CliDriver(); @@ -311,8 +312,8 @@ public void testprocessInitFiles() throws Exception { ByteArrayOutputStream data = new ByteArrayOutputStream(); - sessionState.err = new PrintStream(data); - sessionState.out = System.out; + sessionState.err = new SessionStream(data); + sessionState.out = new SessionStream(System.out); try { CliSessionState.start(sessionState); CliDriver cliDriver = new CliDriver(); diff --git a/common/src/java/org/apache/hadoop/hive/common/io/CachingPrintStream.java b/common/src/java/org/apache/hadoop/hive/common/io/CachingPrintStream.java index a7c6e6ebff..70dab87dff 100644 --- a/common/src/java/org/apache/hadoop/hive/common/io/CachingPrintStream.java +++ b/common/src/java/org/apache/hadoop/hive/common/io/CachingPrintStream.java @@ -20,13 +20,12 @@ import java.io.FileNotFoundException; import java.io.OutputStream; -import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; // A printStream that stores messages logged to it in a list. -public class CachingPrintStream extends PrintStream { +public class CachingPrintStream extends SessionStream { List output = new ArrayList(); diff --git a/common/src/java/org/apache/hadoop/hive/common/io/FetchConverter.java b/common/src/java/org/apache/hadoop/hive/common/io/FetchConverter.java index 01eef60e79..62b3d2d286 100644 --- a/common/src/java/org/apache/hadoop/hive/common/io/FetchConverter.java +++ b/common/src/java/org/apache/hadoop/hive/common/io/FetchConverter.java @@ -19,10 +19,9 @@ package org.apache.hadoop.hive.common.io; import java.io.OutputStream; -import java.io.PrintStream; import java.io.UnsupportedEncodingException; -public abstract class FetchConverter extends PrintStream { +public abstract class FetchConverter extends SessionStream { protected volatile boolean queryfound; protected volatile boolean fetchStarted; diff --git a/common/src/java/org/apache/hadoop/hive/common/io/SessionStream.java b/common/src/java/org/apache/hadoop/hive/common/io/SessionStream.java new file mode 100644 index 0000000000..a186a6addd --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/io/SessionStream.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.io; + +import java.io.OutputStream; +import java.io.PrintStream; +import java.io.UnsupportedEncodingException; + +/** + * The Session uses this stream instead of PrintStream to prevent closing of System out and System err. + * Ref: HIVE-21033 + */ +public class SessionStream extends PrintStream { + + public SessionStream(OutputStream out) { + super(out); + } + + public SessionStream(OutputStream out, boolean autoFlush, String encoding) throws UnsupportedEncodingException { + super(out, autoFlush, encoding); + } + + @Override + public void close() { + if (out != System.out && out != System.err) { + //Don't close if system out or system err + super.close(); + } + } +} diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java index d7a9bb094d..7a5896442b 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java @@ -23,7 +23,6 @@ import java.io.FileReader; import java.io.IOException; import java.io.OutputStream; -import java.io.PrintStream; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.util.ArrayList; @@ -38,6 +37,7 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.cli.Parser; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.common.io.SessionStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.permission.FsPermission; @@ -70,8 +70,8 @@ public static void main(String[] args) { CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); ss.in = System.in; try { - ss.out = new PrintStream(System.out, true, "UTF-8"); - ss.err = new PrintStream(System.err, true, "UTF-8"); + ss.out = new SessionStream(System.out, true, "UTF-8"); + ss.err = new SessionStream(System.err, true, "UTF-8"); } catch (UnsupportedEncodingException e) { System.exit(1); } diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java index 872d7afd23..5f5c9f761a 100644 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java +++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java @@ -43,6 +43,7 @@ import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.ql.DriverFactory; @@ -184,7 +185,7 @@ public void setup() throws Exception { driver = DriverFactory.newDriver(hiveConf); - SessionState.get().out = System.out; + SessionState.get().out = new SessionStream(System.out); createTable(BASIC_TABLE, "a int, b string"); createTableInSpecifiedPath(ENCRYPTED_TABLE, "a int, b string", diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java index 9b50fd4f30..5fd0ef9161 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.history; -import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.lang.reflect.Proxy; import java.util.LinkedList; @@ -32,6 +31,7 @@ import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; +import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.Warehouse; @@ -135,8 +135,8 @@ public void testSimpleQuery() { CliSessionState ss = new CliSessionState(hconf); ss.in = System.in; try { - ss.out = new PrintStream(System.out, true, "UTF-8"); - ss.err = new PrintStream(System.err, true, "UTF-8"); + ss.out = new SessionStream(System.out, true, "UTF-8"); + ss.err = new SessionStream(System.err, true, "UTF-8"); } catch (UnsupportedEncodingException e) { System.exit(3); } diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java index 8f1abb64b5..fa359f9f8f 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.cli.CliDriver; import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; @@ -67,8 +68,8 @@ public static void beforeTest() throws Exception { // once SessionState for thread is set, CliDriver picks conf from it CliSessionState ss = new CliSessionState(conf); - ss.err = System.err; - ss.out = System.out; + ss.err = new SessionStream(System.err); + ss.out = new SessionStream(System.out); SessionState.start(ss); TestCLIAuthzSessionContext.driver = new CliDriver(); } diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 886f85e5b3..08c33e9595 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -74,10 +74,7 @@ import org.apache.hadoop.hive.cli.CliDriver; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.cli.control.AbstractCliConfig; -import org.apache.hadoop.hive.common.io.CachingPrintStream; -import org.apache.hadoop.hive.common.io.DigestPrintStream; -import org.apache.hadoop.hive.common.io.SortAndDigestPrintStream; -import org.apache.hadoop.hive.common.io.SortPrintStream; +import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.llap.LlapItUtils; @@ -1169,7 +1166,7 @@ private void setSessionOutputs(String fileName, CliSessionState ss, File outf) t } else if (qSortNHashQuerySet.contains(fileName)) { ss.out = new SortAndDigestPrintStream(fo, "UTF-8"); } else { - ss.out = new PrintStream(fo, true, "UTF-8"); + ss.out = new SessionStream(fo, true, "UTF-8"); } ss.err = new CachingPrintStream(fo, true, "UTF-8"); ss.setIsSilent(true); @@ -1203,8 +1200,8 @@ private CliSessionState startSessionState(boolean canReuseSession) throws IOExce CliSessionState ss = new CliSessionState(conf); ss.in = System.in; - ss.out = System.out; - ss.err = System.out; + ss.out = new SessionStream(System.out); + ss.err = new SessionStream(System.out); SessionState oldSs = SessionState.get(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index e406060642..de5cd8b992 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -55,6 +55,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.common.log.ProgressMonitor; import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; @@ -179,9 +180,9 @@ * Streams to read/write from. */ public InputStream in; - public PrintStream out; - public PrintStream info; - public PrintStream err; + public SessionStream out; + public SessionStream info; + public SessionStream err; /** * Standard output from any child process(es). */ diff --git a/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java index e85cf2d58d..b8d79bd475 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java @@ -19,12 +19,12 @@ package org.apache.hadoop.hive.ql.processors; import java.io.ByteArrayOutputStream; -import java.io.PrintStream; import java.lang.reflect.Field; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.SystemVariables; import org.apache.hadoop.hive.ql.session.SessionState; @@ -55,9 +55,9 @@ public static void before() throws Exception { } @Before - public void setupTest() { + public void setupTest() throws Exception { baos = new ByteArrayOutputStream(); - state.out = new PrintStream(baos); + state.out = new SessionStream(baos); processor = new SetProcessor(); } diff --git a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java index 99eaf02be3..ae902789df 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java @@ -24,13 +24,13 @@ import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; -import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.commons.lang3.CharEncoding; +import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.metastore.api.Schema; import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; @@ -50,7 +50,6 @@ public class HiveCommandOperation extends ExecuteStatementOperation { private final CommandProcessor commandProcessor; private TableSchema resultSchema = null; - private boolean closeSessionStreams = true; // Only close file based streams, not System.out and System.err. /** * For processors other than Hive queries (Driver), they output to session.out (a temp file) @@ -72,20 +71,19 @@ private void setupSessionIO(SessionState sessionState) { sessionState.in = null; // hive server's session input stream is not used // open a per-session file in auto-flush mode for writing temp results and tmp error output sessionState.out = - new PrintStream(new FileOutputStream(sessionState.getTmpOutputFile()), true, CharEncoding.UTF_8); + new SessionStream(new FileOutputStream(sessionState.getTmpOutputFile()), true, CharEncoding.UTF_8); sessionState.err = - new PrintStream(new FileOutputStream(sessionState.getTmpErrOutputFile()), true,CharEncoding.UTF_8); + new SessionStream(new FileOutputStream(sessionState.getTmpErrOutputFile()), true,CharEncoding.UTF_8); } catch (IOException e) { LOG.error("Error in creating temp output file ", e); // Close file streams to avoid resource leaking ServiceUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); - closeSessionStreams = false; try { sessionState.in = null; - sessionState.out = new PrintStream(System.out, true, CharEncoding.UTF_8); - sessionState.err = new PrintStream(System.err, true, CharEncoding.UTF_8); + sessionState.out = new SessionStream(System.out, true, CharEncoding.UTF_8); + sessionState.err = new SessionStream(System.err, true, CharEncoding.UTF_8); } catch (UnsupportedEncodingException ee) { LOG.error("Error creating PrintStream", e); ee.printStackTrace(); @@ -97,9 +95,7 @@ private void setupSessionIO(SessionState sessionState) { private void tearDownSessionIO() { - if (closeSessionStreams) { - ServiceUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); - } + ServiceUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); } @Override diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java index f975199bf5..429dbcdbcf 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java @@ -20,7 +20,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.security.PrivilegedExceptionAction; import java.sql.SQLException; @@ -40,6 +39,7 @@ import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang3.CharEncoding; import org.apache.hadoop.hive.common.LogUtils; +import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -55,7 +55,6 @@ import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; @@ -139,9 +138,9 @@ public boolean shouldRunAsync() { private void setupSessionIO(SessionState sessionState) { try { sessionState.in = null; // hive server's session input stream is not used - sessionState.out = new PrintStream(System.out, true, CharEncoding.UTF_8); - sessionState.info = new PrintStream(System.err, true, CharEncoding.UTF_8); - sessionState.err = new PrintStream(System.err, true, CharEncoding.UTF_8); + sessionState.out = new SessionStream(System.out, true, CharEncoding.UTF_8); + sessionState.info = new SessionStream(System.err, true, CharEncoding.UTF_8); + sessionState.err = new SessionStream(System.err, true, CharEncoding.UTF_8); } catch (UnsupportedEncodingException e) { LOG.error("Error creating PrintStream", e); e.printStackTrace();