diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java index 33c4cd8..cbbec36 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.io.OutputStreamWriter; +import java.io.PrintStream; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Arrays; @@ -437,7 +438,8 @@ static void printJsonData(Configuration conf, String filename) throws IOException, JSONException { Path path = new Path(filename); Reader reader = OrcFile.createReader(path.getFileSystem(conf), path); - OutputStreamWriter out = new OutputStreamWriter(System.out, "UTF-8"); + PrintStream printStream = System.out; + OutputStreamWriter out = new OutputStreamWriter(printStream, "UTF-8"); RecordReader rows = reader.rows(null); Object row = null; List types = reader.getTypes(); @@ -447,6 +449,9 @@ static void printJsonData(Configuration conf, printObject(writer, row, types, 0); out.write("\n"); out.flush(); + if (printStream.checkError()) { + throw new IOException("Error encountered when writing to stdout."); + } } } } diff --git ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java index e77e24d..68d503e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java @@ -26,6 +26,8 @@ import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; +import java.io.IOException; +import java.io.OutputStream; import java.io.PrintStream; import java.sql.Date; import java.sql.Timestamp; @@ -249,6 +251,54 @@ public void testDataDump() throws Exception { assertEquals("{\"b\":true,\"bt\":10,\"s\":100,\"i\":1000,\"l\":10000,\"f\":4,\"d\":20,\"de\":\"4.2222\",\"t\":\"2014-11-25 18:09:24\",\"dt\":\"2014-11-25\",\"str\":\"string\",\"c\":\"hello \",\"vc\":\"hello\",\"m\":[{\"_key\":\"k1\",\"_value\":\"v1\"}],\"a\":[100,200],\"st\":{\"i\":10,\"s\":\"foo\"}}", lines[0]); assertEquals("{\"b\":false,\"bt\":20,\"s\":200,\"i\":2000,\"l\":20000,\"f\":8,\"d\":40,\"de\":\"2.2222\",\"t\":\"2014-11-25 18:02:44\",\"dt\":\"2014-09-28\",\"str\":\"abcd\",\"c\":\"world \",\"vc\":\"world\",\"m\":[{\"_key\":\"k3\",\"_value\":\"v3\"}],\"a\":[200,300],\"st\":{\"i\":20,\"s\":\"bar\"}}", lines[1]); } + + @Test(expected = IOException.class) + public void testDataDumpThrowsIOException() throws Exception { + PrintStream origOut = System.out; + try { + ObjectInspector inspector; + synchronized (TestOrcFile.class) { + inspector = ObjectInspectorFactory.getReflectionObjectInspector + (AllTypesRecord.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + } + Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector, + 100000, CompressionKind.NONE, 10000, 1000); + Map m = new HashMap(2); + m.put("k1", "v1"); + writer.addRow(new AllTypesRecord( + true, + (byte) 10, + (short) 100, + 1000, + 10000L, + 4.0f, + 20.0, + HiveDecimal.create("4.2222"), + new Timestamp(1416967764000L), + new Date(1416967764000L), + "string", + new HiveChar("hello", 5), + new HiveVarchar("hello", 10), + m, + Arrays.asList(100, 200), + new AllTypesRecord.Struct(10, "foo"))); + + writer.close(); + + OutputStream myOut = new OutputStream() { + @Override + public void write(int b) throws IOException { + throw new IOException(); + } + }; + + // replace stdout and run command + System.setOut(new PrintStream(myOut)); + FileDump.main(new String[]{testFilePath.toString(), "-d"}); + } finally { + System.setOut(origOut); + } + } // Test that if the fraction of rows that have distinct strings is greater than the configured // threshold dictionary encoding is turned off. If dictionary encoding is turned off the length