diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/SimpleAllocator.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/SimpleAllocator.java index d8f59d1..51eb34e 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/SimpleAllocator.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/SimpleAllocator.java @@ -32,9 +32,10 @@ private final boolean isDirect; private static Field cleanerField; static { - ByteBuffer tmp = ByteBuffer.allocateDirect(1); try { - cleanerField = tmp.getClass().getDeclaredField("cleaner"); + // TODO: To make it work for JDK9 use CleanerUtil from https://issues.apache.org/jira/browse/HADOOP-12760 + final Class dbClazz = Class.forName("java.nio.DirectByteBuffer"); + cleanerField = dbClazz.getDeclaredField("cleaner"); cleanerField.setAccessible(true); } catch (Throwable t) { LlapIoImpl.LOG.warn("Cannot initialize DirectByteBuffer cleaner", t); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java index 0ac3ec5..6170310 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.io.orc.encoded; import java.io.IOException; +import java.lang.reflect.Field; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; @@ -43,11 +44,13 @@ import org.apache.orc.impl.StreamName; import org.apache.orc.StripeInformation; import org.apache.orc.impl.BufferChunk; -import org.apache.hadoop.hive.llap.DebugUtils; import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch; import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.PoolFactory; import org.apache.orc.OrcProto; +import sun.misc.Cleaner; + + /** * Encoded reader implementation. * @@ -80,6 +83,17 @@ */ class EncodedReaderImpl implements EncodedReader { public static final Logger LOG = LoggerFactory.getLogger(EncodedReaderImpl.class); + private static Field cleanerField; + static { + try { + // TODO: To make it work for JDK9 use CleanerUtil from https://issues.apache.org/jira/browse/HADOOP-12760 + final Class dbClazz = Class.forName("java.nio.DirectByteBuffer"); + cleanerField = dbClazz.getDeclaredField("cleaner"); + cleanerField.setAccessible(true); + } catch (Throwable t) { + cleanerField = null; + } + } private static final Object POOLS_CREATION_LOCK = new Object(); private static Pools POOLS; private static class Pools { @@ -683,6 +697,18 @@ public DiskRangeList readEncodedStream(long baseOffset, DiskRangeList start, lon copyUncompressedChunk(chunk.originalData, dest); } + if (chunk.originalData.isDirect()) { + if (cleanerField != null) { + try { + ((Cleaner) cleanerField.get(chunk.originalData)).clean(); + } catch (Exception e) { + // leave it for GC to clean up + LOG.warn("Unable to clean direct buffers using Cleaner."); + cleanerField = null; + } + } + } + chunk.originalData = null; if (isTracingEnabled) { LOG.trace("Locking " + chunk.getBuffer() + " due to reuse (after decompression)");