From bb56acd17d92fef6c199160041a7ad86afe5e38e Mon Sep 17 00:00:00 2001 From: Elliott Clark Date: Tue, 15 Mar 2016 09:52:24 -0700 Subject: [PATCH] HBASE-15460 Fix infer issues in hbase-common --- .../hadoop/hbase/io/encoding/EncodedDataBlock.java | 20 ++++-- .../encoding/HFileBlockDefaultDecodingContext.java | 76 ++++++++++++---------- .../apache/hadoop/hbase/nio/SingleByteBuff.java | 6 +- .../java/org/apache/hadoop/hbase/util/Base64.java | 61 +++++++++++++---- .../org/apache/hadoop/hbase/util/ClassSize.java | 5 +- .../hadoop/hbase/util/DynamicClassLoader.java | 28 ++++---- .../java/org/apache/hadoop/hbase/util/JVM.java | 74 +++++++++++++++------ .../hbase/util/test/LoadTestKVGenerator.java | 5 +- 8 files changed, 182 insertions(+), 93 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java index 1f7f8fa..0fc0cb5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java @@ -164,22 +164,28 @@ public class EncodedDataBlock { */ public static int getCompressedSize(Algorithm algo, Compressor compressor, byte[] inputBuffer, int offset, int length) throws IOException { - DataOutputStream compressedStream = new DataOutputStream( - new IOUtils.NullOutputStream()); - if (compressor != null) { - compressor.reset(); - } + + // Create streams + // Storing them so we can close them + final IOUtils.NullOutputStream nullOutputStream = new IOUtils.NullOutputStream(); + final DataOutputStream compressedStream = new DataOutputStream(nullOutputStream); OutputStream compressingStream = null; + try { - compressingStream = algo.createCompressionStream( - compressedStream, compressor, 0); + if (compressor != null) { + compressor.reset(); + } + + compressingStream = algo.createCompressionStream(compressedStream, compressor, 0); compressingStream.write(inputBuffer, offset, length); compressingStream.flush(); return compressedStream.size(); } finally { + nullOutputStream.close(); + compressedStream.close(); if (compressingStream != null) compressingStream.close(); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java index 30382d9..fa57ed7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java @@ -52,47 +52,53 @@ public class HFileBlockDefaultDecodingContext implements @Override public void prepareDecoding(int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader, ByteBuff blockBufferWithoutHeader, ByteBuff onDiskBlock) throws IOException { - InputStream in = new DataInputStream(new ByteBuffInputStream(onDiskBlock)); + final ByteBuffInputStream byteBuffInputStream = new ByteBuffInputStream(onDiskBlock); + InputStream dataInputStream = new DataInputStream(byteBuffInputStream); - Encryption.Context cryptoContext = fileContext.getEncryptionContext(); - if (cryptoContext != Encryption.Context.NONE) { + try { + Encryption.Context cryptoContext = fileContext.getEncryptionContext(); + if (cryptoContext != Encryption.Context.NONE) { - Cipher cipher = cryptoContext.getCipher(); - Decryptor decryptor = cipher.getDecryptor(); - decryptor.setKey(cryptoContext.getKey()); + Cipher cipher = cryptoContext.getCipher(); + Decryptor decryptor = cipher.getDecryptor(); + decryptor.setKey(cryptoContext.getKey()); - // Encrypted block format: - // +--------------------------+ - // | byte iv length | - // +--------------------------+ - // | iv data ... | - // +--------------------------+ - // | encrypted block data ... | - // +--------------------------+ + // Encrypted block format: + // +--------------------------+ + // | byte iv length | + // +--------------------------+ + // | iv data ... | + // +--------------------------+ + // | encrypted block data ... | + // +--------------------------+ - int ivLength = in.read(); - if (ivLength > 0) { - byte[] iv = new byte[ivLength]; - IOUtils.readFully(in, iv); - decryptor.setIv(iv); - // All encrypted blocks will have a nonzero IV length. If we see an IV - // length of zero, this means the encoding context had 0 bytes of - // plaintext to encode. - decryptor.reset(); - in = decryptor.createDecryptionStream(in); + int ivLength = dataInputStream.read(); + if (ivLength > 0) { + byte[] iv = new byte[ivLength]; + IOUtils.readFully(dataInputStream, iv); + decryptor.setIv(iv); + // All encrypted blocks will have a nonzero IV length. If we see an IV + // length of zero, this means the encoding context had 0 bytes of + // plaintext to encode. + decryptor.reset(); + dataInputStream = decryptor.createDecryptionStream(dataInputStream); + } + onDiskSizeWithoutHeader -= Bytes.SIZEOF_BYTE + ivLength; } - onDiskSizeWithoutHeader -= Bytes.SIZEOF_BYTE + ivLength; - } - Compression.Algorithm compression = fileContext.getCompression(); - assert blockBufferWithoutHeader.hasArray(); - if (compression != Compression.Algorithm.NONE) { - Compression.decompress(blockBufferWithoutHeader.array(), - blockBufferWithoutHeader.arrayOffset(), in, onDiskSizeWithoutHeader, - uncompressedSizeWithoutHeader, compression); - } else { - IOUtils.readFully(in, blockBufferWithoutHeader.array(), - blockBufferWithoutHeader.arrayOffset(), onDiskSizeWithoutHeader); + Compression.Algorithm compression = fileContext.getCompression(); + assert blockBufferWithoutHeader.hasArray(); + if (compression != Compression.Algorithm.NONE) { + Compression.decompress(blockBufferWithoutHeader.array(), + blockBufferWithoutHeader.arrayOffset(), dataInputStream, onDiskSizeWithoutHeader, + uncompressedSizeWithoutHeader, compression); + } else { + IOUtils.readFully(dataInputStream, blockBufferWithoutHeader.array(), + blockBufferWithoutHeader.arrayOffset(), onDiskSizeWithoutHeader); + } + } finally { + byteBuffInputStream.close(); + dataInputStream.close(); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java index 946962b..9ad28dc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java @@ -200,8 +200,10 @@ public class SingleByteBuff extends ByteBuff { // create a copy. ObjectIntPair pair = new ObjectIntPair(); src.asSubByteBuffer(srcOffset, length, pair); - ByteBufferUtils.copyFromBufferToBuffer(pair.getFirst(), this.buf, pair.getSecond(), offset, - length); + if (pair.getFirst() != null) { + ByteBufferUtils.copyFromBufferToBuffer(pair.getFirst(), this.buf, pair.getSecond(), offset, + length); + } } return this; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java index a22133d..b825c0f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java @@ -1092,7 +1092,9 @@ public class Base64 { */ public static byte[] decodeFromFile(String filename) { byte[] decodedData = null; - Base64InputStream bis = null; + BufferedInputStream bufferedInputStream = null; + FileInputStream fileInputStream = null; + Base64InputStream base64InputStream = null; try { File file = new File(filename); byte[] buffer; @@ -1107,14 +1109,14 @@ public class Base64 { buffer = new byte[(int) file.length()]; // Open a stream - - bis = new Base64InputStream(new BufferedInputStream( - new FileInputStream(file)), DECODE); + fileInputStream = new FileInputStream(file); + bufferedInputStream = new BufferedInputStream(fileInputStream); + base64InputStream = new Base64InputStream(bufferedInputStream, DECODE); // Read until done int length = 0; - for (int numBytes; (numBytes = bis.read(buffer, length, 4096)) >= 0; ) { + for (int numBytes; (numBytes = base64InputStream.read(buffer, length, 4096)) >= 0; ) { length += numBytes; } @@ -1127,9 +1129,23 @@ public class Base64 { LOG.error("Error decoding from file " + filename, e); } finally { - if (bis != null) { + if (fileInputStream != null) { try { - bis.close(); + fileInputStream.close(); + } catch (Exception e) { + LOG.error("error closing FileInputStream", e); + } + } + if (bufferedInputStream != null) { + try { + bufferedInputStream.close(); + } catch (Exception e) { + LOG.error("error closing BufferedInputStream", e); + } + } + if (base64InputStream != null) { + try { + base64InputStream.close(); } catch (Exception e) { LOG.error("error closing Base64InputStream", e); } @@ -1149,7 +1165,10 @@ public class Base64 { */ public static String encodeFromFile(String filename) { String encodedData = null; - Base64InputStream bis = null; + FileInputStream fileInputStream = null; + BufferedInputStream bufferedInputStream = null; + Base64InputStream base64InputStream = null; + try { File file = new File(filename); @@ -1159,12 +1178,13 @@ public class Base64 { // Open a stream - bis = new Base64InputStream(new BufferedInputStream( - new FileInputStream(file)), ENCODE); + fileInputStream = new FileInputStream(file); + bufferedInputStream = new BufferedInputStream(fileInputStream); + base64InputStream = new Base64InputStream(bufferedInputStream, ENCODE); // Read until done int length = 0; - for (int numBytes; (numBytes = bis.read(buffer, length, 4096)) >= 0; ) { + for (int numBytes; (numBytes = base64InputStream.read(buffer, length, 4096)) >= 0; ) { length += numBytes; } @@ -1176,9 +1196,24 @@ public class Base64 { LOG.error("Error encoding from file " + filename, e); } finally { - if (bis != null) { + // Can't leak exceptions but still need to clean things up. + if (fileInputStream != null) { try { - bis.close(); + fileInputStream.close(); + } catch (Exception e) { + LOG.error("error closing FileInputStream", e); + } + } + if (bufferedInputStream != null) { + try { + bufferedInputStream.close(); + } catch (Exception e) { + LOG.error("error closing BufferedInputStream", e); + } + } + if (base64InputStream != null) { + try { + base64InputStream.close(); } catch (Exception e) { LOG.error("error closing Base64InputStream", e); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java index fdd0fae..3dce955 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java @@ -117,7 +117,7 @@ public class ClassSize { static { final String version = System.getProperty("java.version"); // Verify String looks like this: 1.6.0_29 - if (!version.matches("\\d\\.\\d\\..*")) { + if (version == null || !version.matches("\\d\\.\\d\\..*")) { throw new RuntimeException("Unexpected version format: " + version); } // Convert char to int @@ -331,7 +331,8 @@ public class ClassSize { * know this too. */ public static boolean is32BitJVM() { - return System.getProperty("sun.arch.data.model").equals("32"); + final String model = System.getProperty("sun.arch.data.model"); + return model != null && model.equals("32"); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java index 214c917..595cc5b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java @@ -179,19 +179,21 @@ public class DynamicClassLoader extends ClassLoaderBase { private synchronized void loadNewJars() { // Refresh local jar file lists - for (File file: localDir.listFiles()) { - String fileName = file.getName(); - if (jarModifiedTime.containsKey(fileName)) { - continue; - } - if (file.isFile() && fileName.endsWith(".jar")) { - jarModifiedTime.put(fileName, Long.valueOf(file.lastModified())); - try { - URL url = file.toURI().toURL(); - addURL(url); - } catch (MalformedURLException mue) { - // This should not happen, just log it - LOG.warn("Failed to load new jar " + fileName, mue); + if (localDir != null) { + for (File file : localDir.listFiles()) { + String fileName = file.getName(); + if (jarModifiedTime.containsKey(fileName)) { + continue; + } + if (file.isFile() && fileName.endsWith(".jar")) { + jarModifiedTime.put(fileName, Long.valueOf(file.lastModified())); + try { + URL url = file.toURI().toURL(); + addURL(url); + } catch (MalformedURLException mue) { + // This should not happen, just log it + LOG.warn("Failed to load new jar " + fileName, mue); + } } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java index 2d6065b..d77a977 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java @@ -45,13 +45,19 @@ public class JVM { private OperatingSystemMXBean osMbean; private static final boolean ibmvendor = - System.getProperty("java.vendor").contains("IBM"); - private static final boolean windows = - System.getProperty("os.name").startsWith("Windows"); + System.getProperty("java.vendor") != null && + System.getProperty("java.vendor").contains("IBM"); + private static final boolean windows = + System.getProperty("os.name") != null && + System.getProperty("os.name").startsWith("Windows"); private static final boolean linux = - System.getProperty("os.name").startsWith("Linux"); + System.getProperty("os.name") != null && + System.getProperty("os.name").startsWith("Linux"); + private static final boolean amd64 = + System.getProperty("os.arch") != null && + System.getProperty("os.arch").contains("amd64"); + private static final String JVMVersion = System.getProperty("java.version"); - private static final boolean amd64 = System.getProperty("os.arch").contains("amd64"); /** * Constructor. Get the running Operating System instance @@ -138,8 +144,9 @@ public class JVM { ofdc = runUnixMXBeanMethod("getOpenFileDescriptorCount"); return (ofdc != null ? ofdc.longValue () : -1); } - InputStream in = null; - BufferedReader output = null; + InputStream inputStream = null; + InputStreamReader inputStreamReader = null; + BufferedReader bufferedReader = null; try { //need to get the PID number of the process first RuntimeMXBean rtmbean = ManagementFactory.getRuntimeMXBean(); @@ -150,24 +157,32 @@ public class JVM { Process p = Runtime.getRuntime().exec( new String[] { "bash", "-c", "ls /proc/" + pidhost[0] + "/fdinfo | wc -l" }); - in = p.getInputStream(); - output = new BufferedReader(new InputStreamReader(in)); + inputStream = p.getInputStream(); + inputStreamReader = new InputStreamReader(inputStream); + bufferedReader = new BufferedReader(inputStreamReader); String openFileDesCount; - if ((openFileDesCount = output.readLine()) != null) + if ((openFileDesCount = bufferedReader.readLine()) != null) return Long.parseLong(openFileDesCount); } catch (IOException ie) { LOG.warn("Not able to get the number of open file descriptors", ie); } finally { - if (output != null) { + if (bufferedReader != null) { try { - output.close(); + bufferedReader.close(); } catch (IOException e) { - LOG.warn("Not able to close the InputStream", e); + LOG.warn("Not able to close the BufferedReader", e); } } - if (in != null){ + if (inputStreamReader != null){ + try { + inputStreamReader.close(); + } catch (IOException e) { + LOG.warn("Not able to close the InputStreamReader", e); + } + } + if (inputStream != null){ try { - in.close(); + inputStream.close(); } catch (IOException e) { LOG.warn("Not able to close the InputStream", e); } @@ -210,21 +225,40 @@ public class JVM { return 0; } - BufferedReader input = null; + InputStream inputStream = null; + InputStreamReader inputStreamReader = null; + BufferedReader bufferedReader = null; + try { int count = 0; Process p = Runtime.getRuntime().exec("ps -e"); - input = new BufferedReader(new InputStreamReader(p.getInputStream())); - while (input.readLine() != null) { + inputStream = p.getInputStream(); + inputStreamReader = new InputStreamReader(inputStream); + bufferedReader = new BufferedReader(inputStreamReader); + while (bufferedReader.readLine() != null) { count++; } return count - 1; // -1 because there is a headline } catch (IOException e) { return -1; } finally { - if (input != null){ + if (bufferedReader != null) { try { - input.close(); + bufferedReader.close(); + } catch (IOException e) { + LOG.warn("Not able to close the BufferedReader", e); + } + } + if (inputStreamReader != null){ + try { + inputStreamReader.close(); + } catch (IOException e) { + LOG.warn("Not able to close the InputStreamReader", e); + } + } + if (inputStream != null){ + try { + inputStream.close(); } catch (IOException e) { LOG.warn("Not able to close the InputStream", e); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java index 9e9b507..4edd270 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java @@ -107,7 +107,10 @@ public class LoadTestKVGenerator { private static byte[] getValueForRowColumn(int dataSize, byte[]... seedStrings) { long seed = dataSize; for (byte[] str : seedStrings) { - seed += Bytes.toString(str).hashCode(); + final String bytesString = Bytes.toString(str); + if (bytesString != null) { + seed += bytesString.hashCode(); + } } Random seededRandom = new Random(seed); byte[] randomBytes = new byte[dataSize]; -- 2.7.2