diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java new file mode 100644 index 0000000..f2b3cdb --- /dev/null +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java @@ -0,0 +1,398 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.hbase.io.compress; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.FilterOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.io.compress.CodecPool; +import org.apache.hadoop.io.compress.CompressionCodec; +import org.apache.hadoop.io.compress.CompressionInputStream; +import org.apache.hadoop.io.compress.CompressionOutputStream; +import org.apache.hadoop.io.compress.Compressor; +import org.apache.hadoop.io.compress.Decompressor; +import org.apache.hadoop.io.compress.DefaultCodec; +import org.apache.hadoop.io.compress.GzipCodec; +import org.apache.hadoop.util.ReflectionUtils; + +/** + * Compression related stuff. + * Copied from hadoop-3315 tfile. + */ +@InterfaceAudience.Private +public final class Compression { + static final Log LOG = LogFactory.getLog(Compression.class); + + /** + * Prevent the instantiation of class. + */ + private Compression() { + super(); + } + + static class FinishOnFlushCompressionStream extends FilterOutputStream { + public FinishOnFlushCompressionStream(CompressionOutputStream cout) { + super(cout); + } + + @Override + public void write(byte b[], int off, int len) throws IOException { + out.write(b, off, len); + } + + @Override + public void flush() throws IOException { + CompressionOutputStream cout = (CompressionOutputStream) out; + cout.finish(); + cout.flush(); + cout.resetState(); + } + } + + /** + * Returns the classloader to load the Codec class from. + * @return + */ + private static ClassLoader getClassLoaderForCodec() { + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + if (cl == null) { + cl = Compression.class.getClassLoader(); + } + if (cl == null) { + cl = ClassLoader.getSystemClassLoader(); + } + if (cl == null) { + throw new RuntimeException("A ClassLoader to load the Codec could not be determined"); + } + return cl; + } + + /** + * Compression algorithms. The ordinal of these cannot change or else you + * risk breaking all existing HFiles out there. Even the ones that are + * not compressed! (They use the NONE algorithm) + */ + public static enum Algorithm { + LZO("lzo") { + // Use base type to avoid compile-time dependencies. + private transient CompressionCodec lzoCodec; + + @Override + CompressionCodec getCodec(Configuration conf) { + if (lzoCodec == null) { + try { + Class externalCodec = + getClassLoaderForCodec().loadClass("com.hadoop.compression.lzo.LzoCodec"); + lzoCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, + new Configuration(conf)); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + return lzoCodec; + } + }, + GZ("gz") { + private transient GzipCodec codec; + + @Override + DefaultCodec getCodec(Configuration conf) { + if (codec == null) { + codec = new ReusableStreamGzipCodec(); + codec.setConf(new Configuration(conf)); + } + + return codec; + } + }, + + NONE("none") { + @Override + DefaultCodec getCodec(Configuration conf) { + return null; + } + + @Override + public synchronized InputStream createDecompressionStream( + InputStream downStream, Decompressor decompressor, + int downStreamBufferSize) throws IOException { + if (downStreamBufferSize > 0) { + return new BufferedInputStream(downStream, downStreamBufferSize); + } + // else { + // Make sure we bypass FSInputChecker buffer. + // return new BufferedInputStream(downStream, 1024); + // } + // } + return downStream; + } + + @Override + public synchronized OutputStream createCompressionStream( + OutputStream downStream, Compressor compressor, + int downStreamBufferSize) throws IOException { + if (downStreamBufferSize > 0) { + return new BufferedOutputStream(downStream, downStreamBufferSize); + } + + return downStream; + } + }, + SNAPPY("snappy") { + // Use base type to avoid compile-time dependencies. + private transient CompressionCodec snappyCodec; + + @Override + CompressionCodec getCodec(Configuration conf) { + if (snappyCodec == null) { + try { + Class externalCodec = + getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.SnappyCodec"); + snappyCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + return snappyCodec; + } + }, + LZ4("lz4") { + // Use base type to avoid compile-time dependencies. + private transient CompressionCodec lz4Codec; + + @Override + CompressionCodec getCodec(Configuration conf) { + if (lz4Codec == null) { + try { + Class externalCodec = + getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.Lz4Codec"); + lz4Codec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + return lz4Codec; + } + }; + + private final Configuration conf; + private final String compressName; + // data input buffer size to absorb small reads from application. + private static final int DATA_IBUF_SIZE = 1 * 1024; + // data output buffer size to absorb small writes from application. + private static final int DATA_OBUF_SIZE = 4 * 1024; + + Algorithm(String name) { + this.conf = new Configuration(); + this.conf.setBoolean("hadoop.native.lib", true); + this.compressName = name; + } + + abstract CompressionCodec getCodec(Configuration conf); + + public InputStream createDecompressionStream( + InputStream downStream, Decompressor decompressor, + int downStreamBufferSize) throws IOException { + CompressionCodec codec = getCodec(conf); + // Set the internal buffer size to read from down stream. + if (downStreamBufferSize > 0) { + ((Configurable)codec).getConf().setInt("io.file.buffer.size", + downStreamBufferSize); + } + CompressionInputStream cis = + codec.createInputStream(downStream, decompressor); + BufferedInputStream bis2 = new BufferedInputStream(cis, DATA_IBUF_SIZE); + return bis2; + + } + + public OutputStream createCompressionStream( + OutputStream downStream, Compressor compressor, int downStreamBufferSize) + throws IOException { + OutputStream bos1 = null; + if (downStreamBufferSize > 0) { + bos1 = new BufferedOutputStream(downStream, downStreamBufferSize); + } + else { + bos1 = downStream; + } + CompressionOutputStream cos = + createPlainCompressionStream(bos1, compressor); + BufferedOutputStream bos2 = + new BufferedOutputStream(new FinishOnFlushCompressionStream(cos), + DATA_OBUF_SIZE); + return bos2; + } + + /** + * Creates a compression stream without any additional wrapping into + * buffering streams. + */ + public CompressionOutputStream createPlainCompressionStream( + OutputStream downStream, Compressor compressor) throws IOException { + CompressionCodec codec = getCodec(conf); + ((Configurable)codec).getConf().setInt("io.file.buffer.size", 32 * 1024); + return codec.createOutputStream(downStream, compressor); + } + + public Compressor getCompressor() { + CompressionCodec codec = getCodec(conf); + if (codec != null) { + Compressor compressor = CodecPool.getCompressor(codec); + if (compressor != null) { + if (compressor.finished()) { + // Somebody returns the compressor to CodecPool but is still using + // it. + LOG + .warn("Compressor obtained from CodecPool is already finished()"); + // throw new AssertionError( + // "Compressor obtained from CodecPool is already finished()"); + } + compressor.reset(); + } + return compressor; + } + return null; + } + + public void returnCompressor(Compressor compressor) { + if (compressor != null) { + CodecPool.returnCompressor(compressor); + } + } + + public Decompressor getDecompressor() { + CompressionCodec codec = getCodec(conf); + if (codec != null) { + Decompressor decompressor = CodecPool.getDecompressor(codec); + if (decompressor != null) { + if (decompressor.finished()) { + // Somebody returns the decompressor to CodecPool but is still using + // it. + LOG + .warn("Deompressor obtained from CodecPool is already finished()"); + // throw new AssertionError( + // "Decompressor obtained from CodecPool is already finished()"); + } + decompressor.reset(); + } + return decompressor; + } + + return null; + } + + public void returnDecompressor(Decompressor decompressor) { + if (decompressor != null) { + CodecPool.returnDecompressor(decompressor); + } + } + + public String getName() { + return compressName; + } + } + + public static Algorithm getCompressionAlgorithmByName(String compressName) { + Algorithm[] algos = Algorithm.class.getEnumConstants(); + + for (Algorithm a : algos) { + if (a.getName().equals(compressName)) { + return a; + } + } + + throw new IllegalArgumentException( + "Unsupported compression algorithm name: " + compressName); + } + + /** + * Get names of supported compression algorithms. + * + * @return Array of strings, each represents a supported compression + * algorithm. Currently, the following compression algorithms are supported. + */ + public static String[] getSupportedAlgorithms() { + Algorithm[] algos = Algorithm.class.getEnumConstants(); + + String[] ret = new String[algos.length]; + int i = 0; + for (Algorithm a : algos) { + ret[i++] = a.getName(); + } + + return ret; + } + + /** + * Decompresses data from the given stream using the configured compression + * algorithm. It will throw an exception if the dest buffer does not have + * enough space to hold the decompressed data. + * + * @param dest + * the output bytes buffer + * @param destOffset + * start writing position of the output buffer + * @param bufferedBoundedStream + * a stream to read compressed data from, bounded to the exact amount + * of compressed data + * @param compressedSize + * compressed data size, header not included + * @param uncompressedSize + * uncompressed data size, header not included + * @param compressAlgo + * compression algorithm used + * @throws IOException + */ + public static void decompress(byte[] dest, int destOffset, + InputStream bufferedBoundedStream, int compressedSize, + int uncompressedSize, Compression.Algorithm compressAlgo) + throws IOException { + + if (dest.length - destOffset < uncompressedSize) { + throw new IllegalArgumentException( + "Output buffer does not have enough space to hold " + + uncompressedSize + " decompressed bytes, available: " + + (dest.length - destOffset)); + } + + Decompressor decompressor = null; + try { + decompressor = compressAlgo.getDecompressor(); + InputStream is = compressAlgo.createDecompressionStream( + bufferedBoundedStream, decompressor, 0); + + IOUtils.readFully(is, dest, destOffset, uncompressedSize); + is.close(); + } finally { + if (decompressor != null) { + compressAlgo.returnDecompressor(decompressor); + } + } + } + +} diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java new file mode 100644 index 0000000..043635d --- /dev/null +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hbase.io.compress; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.Arrays; +import java.util.zip.GZIPOutputStream; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.io.compress.CompressionOutputStream; +import org.apache.hadoop.io.compress.CompressorStream; +import org.apache.hadoop.io.compress.GzipCodec; +import org.apache.hadoop.io.compress.zlib.ZlibFactory; + +/** + * Fixes an inefficiency in Hadoop's Gzip codec, allowing to reuse compression + * streams. + */ +@InterfaceAudience.Private +public class ReusableStreamGzipCodec extends GzipCodec { + + private static final Log LOG = LogFactory.getLog(Compression.class); + + /** + * A bridge that wraps around a DeflaterOutputStream to make it a + * CompressionOutputStream. + */ + protected static class ReusableGzipOutputStream extends CompressorStream { + + private static final int GZIP_HEADER_LENGTH = 10; + + /** + * Fixed ten-byte gzip header. See {@link GZIPOutputStream}'s source for + * details. + */ + private static final byte[] GZIP_HEADER; + + static { + // Capture the fixed ten-byte header hard-coded in GZIPOutputStream. + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + byte[] header = null; + GZIPOutputStream gzipStream = null; + try { + gzipStream = new GZIPOutputStream(baos); + gzipStream.finish(); + header = Arrays.copyOfRange(baos.toByteArray(), 0, GZIP_HEADER_LENGTH); + } catch (IOException e) { + throw new RuntimeException("Could not create gzip stream", e); + } finally { + if (gzipStream != null) { + try { + gzipStream.close(); + } catch (IOException e) { + LOG.error(e); + } + } + } + GZIP_HEADER = header; + } + + private static class ResetableGZIPOutputStream extends GZIPOutputStream { + public ResetableGZIPOutputStream(OutputStream out) throws IOException { + super(out); + } + + public void resetState() throws IOException { + def.reset(); + crc.reset(); + out.write(GZIP_HEADER); + } + } + + public ReusableGzipOutputStream(OutputStream out) throws IOException { + super(new ResetableGZIPOutputStream(out)); + } + + @Override + public void close() throws IOException { + out.close(); + } + + @Override + public void flush() throws IOException { + out.flush(); + } + + @Override + public void write(int b) throws IOException { + out.write(b); + } + + @Override + public void write(byte[] data, int offset, int length) throws IOException { + out.write(data, offset, length); + } + + @Override + public void finish() throws IOException { + ((GZIPOutputStream) out).finish(); + } + + @Override + public void resetState() throws IOException { + ((ResetableGZIPOutputStream) out).resetState(); + } + } + + @Override + public CompressionOutputStream createOutputStream(OutputStream out) + throws IOException { + if (ZlibFactory.isNativeZlibLoaded(getConf())) { + return super.createOutputStream(out); + } + return new ReusableGzipOutputStream(out); + } + +} diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java index c659fb8..cedd6be 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java @@ -22,7 +22,7 @@ import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.io.RawComparator; /** diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java index de6256f..a86cc71 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.io.encoding; import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.compress.Compression; /** * A decoding context that is created by a reader's encoder, and is shared diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java index 45f2749..55ba233 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.io.encoding; import java.io.IOException; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.BlockType; -import org.apache.hadoop.hbase.io.hfile.Compression; /** * An encoding context that is created by a writer's encoder, and is shared diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java deleted file mode 100644 index 06194c5..0000000 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java +++ /dev/null @@ -1,392 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ -package org.apache.hadoop.hbase.io.hfile; - -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.FilterOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.io.compress.CodecPool; -import org.apache.hadoop.io.compress.CompressionCodec; -import org.apache.hadoop.io.compress.CompressionInputStream; -import org.apache.hadoop.io.compress.CompressionOutputStream; -import org.apache.hadoop.io.compress.Compressor; -import org.apache.hadoop.io.compress.Decompressor; -import org.apache.hadoop.io.compress.GzipCodec; -import org.apache.hadoop.io.compress.DefaultCodec; -import org.apache.hadoop.util.ReflectionUtils; - -/** - * Compression related stuff. - * Copied from hadoop-3315 tfile. - */ -@InterfaceAudience.Private -public final class Compression { - static final Log LOG = LogFactory.getLog(Compression.class); - - /** - * Prevent the instantiation of class. - */ - private Compression() { - super(); - } - - static class FinishOnFlushCompressionStream extends FilterOutputStream { - public FinishOnFlushCompressionStream(CompressionOutputStream cout) { - super(cout); - } - - @Override - public void write(byte b[], int off, int len) throws IOException { - out.write(b, off, len); - } - - @Override - public void flush() throws IOException { - CompressionOutputStream cout = (CompressionOutputStream) out; - cout.finish(); - cout.flush(); - cout.resetState(); - } - } - - /** - * Returns the classloader to load the Codec class from. - * @return - */ - private static ClassLoader getClassLoaderForCodec() { - ClassLoader cl = Thread.currentThread().getContextClassLoader(); - if (cl == null) { - cl = Compression.class.getClassLoader(); - } - if (cl == null) { - cl = ClassLoader.getSystemClassLoader(); - } - if (cl == null) { - throw new RuntimeException("A ClassLoader to load the Codec could not be determined"); - } - return cl; - } - - /** - * Compression algorithms. The ordinal of these cannot change or else you - * risk breaking all existing HFiles out there. Even the ones that are - * not compressed! (They use the NONE algorithm) - */ - public static enum Algorithm { - LZO("lzo") { - // Use base type to avoid compile-time dependencies. - private transient CompressionCodec lzoCodec; - - @Override - CompressionCodec getCodec(Configuration conf) { - if (lzoCodec == null) { - try { - Class externalCodec = - getClassLoaderForCodec().loadClass("com.hadoop.compression.lzo.LzoCodec"); - lzoCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, - new Configuration(conf)); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } - } - return lzoCodec; - } - }, - GZ("gz") { - private transient GzipCodec codec; - - @Override - DefaultCodec getCodec(Configuration conf) { - if (codec == null) { - codec = new ReusableStreamGzipCodec(); - codec.setConf(new Configuration(conf)); - } - - return codec; - } - }, - - NONE("none") { - @Override - DefaultCodec getCodec(Configuration conf) { - return null; - } - - @Override - public synchronized InputStream createDecompressionStream( - InputStream downStream, Decompressor decompressor, - int downStreamBufferSize) throws IOException { - if (downStreamBufferSize > 0) { - return new BufferedInputStream(downStream, downStreamBufferSize); - } - // else { - // Make sure we bypass FSInputChecker buffer. - // return new BufferedInputStream(downStream, 1024); - // } - // } - return downStream; - } - - @Override - public synchronized OutputStream createCompressionStream( - OutputStream downStream, Compressor compressor, - int downStreamBufferSize) throws IOException { - if (downStreamBufferSize > 0) { - return new BufferedOutputStream(downStream, downStreamBufferSize); - } - - return downStream; - } - }, - SNAPPY("snappy") { - // Use base type to avoid compile-time dependencies. - private transient CompressionCodec snappyCodec; - - @Override - CompressionCodec getCodec(Configuration conf) { - if (snappyCodec == null) { - try { - Class externalCodec = - getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.SnappyCodec"); - snappyCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } - } - return snappyCodec; - } - }, - LZ4("lz4") { - // Use base type to avoid compile-time dependencies. - private transient CompressionCodec lz4Codec; - - @Override - CompressionCodec getCodec(Configuration conf) { - if (lz4Codec == null) { - try { - Class externalCodec = - getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.Lz4Codec"); - lz4Codec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } - } - return lz4Codec; - } - }; - - private final Configuration conf; - private final String compressName; - // data input buffer size to absorb small reads from application. - private static final int DATA_IBUF_SIZE = 1 * 1024; - // data output buffer size to absorb small writes from application. - private static final int DATA_OBUF_SIZE = 4 * 1024; - - Algorithm(String name) { - this.conf = new Configuration(); - this.conf.setBoolean("hadoop.native.lib", true); - this.compressName = name; - } - - abstract CompressionCodec getCodec(Configuration conf); - - public InputStream createDecompressionStream( - InputStream downStream, Decompressor decompressor, - int downStreamBufferSize) throws IOException { - CompressionCodec codec = getCodec(conf); - // Set the internal buffer size to read from down stream. - if (downStreamBufferSize > 0) { - ((Configurable)codec).getConf().setInt("io.file.buffer.size", - downStreamBufferSize); - } - CompressionInputStream cis = - codec.createInputStream(downStream, decompressor); - BufferedInputStream bis2 = new BufferedInputStream(cis, DATA_IBUF_SIZE); - return bis2; - - } - - public OutputStream createCompressionStream( - OutputStream downStream, Compressor compressor, int downStreamBufferSize) - throws IOException { - OutputStream bos1 = null; - if (downStreamBufferSize > 0) { - bos1 = new BufferedOutputStream(downStream, downStreamBufferSize); - } - else { - bos1 = downStream; - } - CompressionOutputStream cos = - createPlainCompressionStream(bos1, compressor); - BufferedOutputStream bos2 = - new BufferedOutputStream(new FinishOnFlushCompressionStream(cos), - DATA_OBUF_SIZE); - return bos2; - } - - /** - * Creates a compression stream without any additional wrapping into - * buffering streams. - */ - public CompressionOutputStream createPlainCompressionStream( - OutputStream downStream, Compressor compressor) throws IOException { - CompressionCodec codec = getCodec(conf); - ((Configurable)codec).getConf().setInt("io.file.buffer.size", 32 * 1024); - return codec.createOutputStream(downStream, compressor); - } - - public Compressor getCompressor() { - CompressionCodec codec = getCodec(conf); - if (codec != null) { - Compressor compressor = CodecPool.getCompressor(codec); - if (compressor != null) { - if (compressor.finished()) { - // Somebody returns the compressor to CodecPool but is still using - // it. - LOG - .warn("Compressor obtained from CodecPool is already finished()"); - // throw new AssertionError( - // "Compressor obtained from CodecPool is already finished()"); - } - compressor.reset(); - } - return compressor; - } - return null; - } - - public void returnCompressor(Compressor compressor) { - if (compressor != null) { - CodecPool.returnCompressor(compressor); - } - } - - public Decompressor getDecompressor() { - CompressionCodec codec = getCodec(conf); - if (codec != null) { - Decompressor decompressor = CodecPool.getDecompressor(codec); - if (decompressor != null) { - if (decompressor.finished()) { - // Somebody returns the decompressor to CodecPool but is still using - // it. - LOG - .warn("Deompressor obtained from CodecPool is already finished()"); - // throw new AssertionError( - // "Decompressor obtained from CodecPool is already finished()"); - } - decompressor.reset(); - } - return decompressor; - } - - return null; - } - - public void returnDecompressor(Decompressor decompressor) { - if (decompressor != null) { - CodecPool.returnDecompressor(decompressor); - } - } - - public String getName() { - return compressName; - } - } - - public static Algorithm getCompressionAlgorithmByName(String compressName) { - Algorithm[] algos = Algorithm.class.getEnumConstants(); - - for (Algorithm a : algos) { - if (a.getName().equals(compressName)) { - return a; - } - } - - throw new IllegalArgumentException( - "Unsupported compression algorithm name: " + compressName); - } - - static String[] getSupportedAlgorithms() { - Algorithm[] algos = Algorithm.class.getEnumConstants(); - - String[] ret = new String[algos.length]; - int i = 0; - for (Algorithm a : algos) { - ret[i++] = a.getName(); - } - - return ret; - } - - /** - * Decompresses data from the given stream using the configured compression - * algorithm. It will throw an exception if the dest buffer does not have - * enough space to hold the decompressed data. - * - * @param dest - * the output bytes buffer - * @param destOffset - * start writing position of the output buffer - * @param bufferedBoundedStream - * a stream to read compressed data from, bounded to the exact amount - * of compressed data - * @param compressedSize - * compressed data size, header not included - * @param uncompressedSize - * uncompressed data size, header not included - * @param compressAlgo - * compression algorithm used - * @throws IOException - */ - public static void decompress(byte[] dest, int destOffset, - InputStream bufferedBoundedStream, int compressedSize, - int uncompressedSize, Compression.Algorithm compressAlgo) - throws IOException { - - if (dest.length - destOffset < uncompressedSize) { - throw new IllegalArgumentException( - "Output buffer does not have enough space to hold " - + uncompressedSize + " decompressed bytes, available: " - + (dest.length - destOffset)); - } - - Decompressor decompressor = null; - try { - decompressor = compressAlgo.getDecompressor(); - InputStream is = compressAlgo.createDecompressionStream( - bufferedBoundedStream, decompressor, 0); - - IOUtils.readFully(is, dest, destOffset, uncompressedSize); - is.close(); - } finally { - if (decompressor != null) { - compressAlgo.returnDecompressor(decompressor); - } - } - } - -} diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/ReusableStreamGzipCodec.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/ReusableStreamGzipCodec.java deleted file mode 100644 index 7f372c5..0000000 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/ReusableStreamGzipCodec.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ -package org.apache.hadoop.hbase.io.hfile; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.Arrays; -import java.util.zip.GZIPOutputStream; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.io.compress.CompressionOutputStream; -import org.apache.hadoop.io.compress.CompressorStream; -import org.apache.hadoop.io.compress.GzipCodec; -import org.apache.hadoop.io.compress.zlib.ZlibFactory; - -/** - * Fixes an inefficiency in Hadoop's Gzip codec, allowing to reuse compression - * streams. - */ -@InterfaceAudience.Private -public class ReusableStreamGzipCodec extends GzipCodec { - - private static final Log LOG = LogFactory.getLog(Compression.class); - - /** - * A bridge that wraps around a DeflaterOutputStream to make it a - * CompressionOutputStream. - */ - protected static class ReusableGzipOutputStream extends CompressorStream { - - private static final int GZIP_HEADER_LENGTH = 10; - - /** - * Fixed ten-byte gzip header. See {@link GZIPOutputStream}'s source for - * details. - */ - private static final byte[] GZIP_HEADER; - - static { - // Capture the fixed ten-byte header hard-coded in GZIPOutputStream. - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - byte[] header = null; - GZIPOutputStream gzipStream = null; - try { - gzipStream = new GZIPOutputStream(baos); - gzipStream.finish(); - header = Arrays.copyOfRange(baos.toByteArray(), 0, GZIP_HEADER_LENGTH); - } catch (IOException e) { - throw new RuntimeException("Could not create gzip stream", e); - } finally { - if (gzipStream != null) { - try { - gzipStream.close(); - } catch (IOException e) { - LOG.error(e); - } - } - } - GZIP_HEADER = header; - } - - private static class ResetableGZIPOutputStream extends GZIPOutputStream { - public ResetableGZIPOutputStream(OutputStream out) throws IOException { - super(out); - } - - public void resetState() throws IOException { - def.reset(); - crc.reset(); - out.write(GZIP_HEADER); - } - } - - public ReusableGzipOutputStream(OutputStream out) throws IOException { - super(new ResetableGZIPOutputStream(out)); - } - - @Override - public void close() throws IOException { - out.close(); - } - - @Override - public void flush() throws IOException { - out.flush(); - } - - @Override - public void write(int b) throws IOException { - out.write(b); - } - - @Override - public void write(byte[] data, int offset, int length) throws IOException { - out.write(data, offset, length); - } - - @Override - public void finish() throws IOException { - ((GZIPOutputStream) out).finish(); - } - - @Override - public void resetState() throws IOException { - ((ResetableGZIPOutputStream) out).resetState(); - } - } - - @Override - public CompressionOutputStream createOutputStream(OutputStream out) - throws IOException { - if (ZlibFactory.isNativeZlibLoaded(getConf())) { - return super.createOutputStream(out); - } - return new ReusableGzipOutputStream(out); - } - -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 9568496..466f0a9 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -30,8 +30,8 @@ import java.util.Set; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java hbase-server/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java index 935bb87..bad7ced 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.compress.Compression; /** * Immutable HColumnDescriptor @@ -87,7 +87,7 @@ public class UnmodifyableHColumnDescriptor extends HColumnDescriptor { } /** - * @see org.apache.hadoop.hbase.HColumnDescriptor#setCompressionType(org.apache.hadoop.hbase.io.hfile.Compression.Algorithm) + * @see org.apache.hadoop.hbase.HColumnDescriptor#setCompressionType(org.apache.hadoop.hbase.io.compress.Compression.Algorithm) */ @Override public HColumnDescriptor setCompressionType(Compression.Algorithm type) { diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java index 68b4354..c979143 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java @@ -24,8 +24,8 @@ import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.SamePrefixComparator; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.BlockType; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.RawComparator; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java index e4e7594..e8a36f3 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java @@ -28,8 +28,8 @@ import java.util.Iterator; import org.apache.commons.lang.NotImplementedException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.io.hfile.Compression; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFileBlock; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.compress.Compressor; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java index dee40aa..9c842e8 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java @@ -22,9 +22,9 @@ import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFileBlock; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; /** * A default implementation of {@link HFileBlockDecodingContext}. It assumes the diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java index 965d5cf..ae00575 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java @@ -16,16 +16,16 @@ */ package org.apache.hadoop.hbase.io.encoding; -import static org.apache.hadoop.hbase.io.hfile.Compression.Algorithm.NONE; +import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.NONE; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.BlockType; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFileBlock; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.Compressor; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java index b871865..8b6e4dc 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java @@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.fs.HFileSystem; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.regionserver.metrics.SchemaConfigured; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java index 635b407..1ce4683 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java @@ -32,6 +32,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue.KeyComparator; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.regionserver.metrics.SchemaConfigured; import org.apache.hadoop.hbase.util.Bytes; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java index b1de554..5e98d31 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java @@ -33,6 +33,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.RawComparator; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index 598e3ba..a642f01 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KeyComparator; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.HbaseMapWritable; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.SchemaAware; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index c93c8d5..d6b65a1 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -34,12 +34,13 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.fs.HFileSystem; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.regionserver.MemStore; import org.apache.hadoop.hbase.regionserver.metrics.SchemaConfigured; import org.apache.hadoop.hbase.util.Bytes; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java index 3345220..1644928 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java @@ -20,10 +20,10 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.util.Bytes; /** diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java index 013218a..b83ef39 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java @@ -20,13 +20,13 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.util.Bytes; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java index 3861c00..07a5868 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java @@ -35,8 +35,9 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KeyComparator; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.hadoop.hbase.regionserver.MemStore; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java index 92c3a62..1b05138 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KeyComparator; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java index 97af47d..27da395 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java @@ -20,12 +20,12 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; /** * Does not perform any kind of encoding/decoding. diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java index 860d909..4dc2715 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java @@ -47,8 +47,8 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index eb2c476..722f1fd 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -63,9 +63,9 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.ServerCallable; import org.apache.hadoop.hbase.io.HalfStoreFileReader; import org.apache.hadoop.hbase.io.Reference; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java index a526345..1cb8b09 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java @@ -31,7 +31,7 @@ import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.StringUtils; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 32d83f9..88288ad 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -54,8 +54,8 @@ import org.apache.hadoop.hbase.backup.HFileArchiver; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.HFileLink; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index af1225a..203aecf 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -50,10 +50,10 @@ import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.HalfStoreFileReader; import org.apache.hadoop.hbase.io.Reference; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.BlockType; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.HFileWriterV1; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index d26c862..042fd3b 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.HbaseObjectWritable; -import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.InternalScanner; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java index badf5cd..ced3e9c 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java index a10b9c9..a756a8f 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java @@ -28,8 +28,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.io.compress.Compressor; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index ed2a453..5f548d8 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -63,10 +63,10 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.fs.HFileSystem; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.ChecksumUtil; -import org.apache.hadoop.hbase.io.hfile.Compression; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.mapreduce.MapreduceTestingShim; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.regionserver.HRegion; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java index bc72d6d..25c297d 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java @@ -30,8 +30,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.util.Bytes; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java index 9c0a735..a2bd388 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java @@ -58,7 +58,7 @@ import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; -import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Hash; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java index ae9187f..6c60da5 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.Compression; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.junit.experimental.categories.Category; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java index c79c7d6..37fad21 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.LargeTests; -import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.HFileBlock; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java index 64bdf17..9d8b9ac 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java @@ -32,8 +32,8 @@ import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.LruBlockCache; import org.apache.hadoop.hbase.regionserver.HRegion; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java index 7fd0fa7..fea07a4 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java @@ -31,8 +31,8 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.util.TestMiniClusterLoadSequential; import org.apache.hadoop.hbase.util.Threads; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java index 84460e9..da8ffd7 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.fs.HFileSystem; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.StoreFile; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java index ca90b11..fa23be0 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java @@ -46,13 +46,15 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.fs.HFileSystem; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.compress.Compressor; -import static org.apache.hadoop.hbase.io.hfile.Compression.Algorithm.*; +import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.*; + import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java index 5f8214e..258ded5 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java @@ -31,8 +31,9 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java index eeec46c..1570708 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue.KeyComparator; import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.HFile.Reader; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.hadoop.hbase.util.Bytes; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java index 71d79d8..97e84fe 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java @@ -49,18 +49,20 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.DoubleOutputStream; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.compress.Compressor; -import static org.apache.hadoop.hbase.io.hfile.Compression.Algorithm.*; +import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.*; + import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java index 3314d35..97b1126 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.apache.hadoop.hbase.io.hfile.Compression.Algorithm.GZ; -import static org.apache.hadoop.hbase.io.hfile.Compression.Algorithm.NONE; +import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.GZ; +import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.NONE; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -41,6 +41,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.fs.HFileSystem; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index ffab311..677fcbe 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -44,6 +44,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.fs.HFileSystem; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexReader; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexChunk; import org.apache.hadoop.hbase.util.Bytes; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java index ff97fe6..6d6ba79 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java @@ -29,6 +29,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.io.HeapSize; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java index 6f6e4e6..f91c83e 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java @@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.util.Bytes; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java index c1c7537..310f662 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java @@ -36,7 +36,8 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.*; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.RawComparator; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java index 7ef00c1..81d6232 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java @@ -59,9 +59,9 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile.Reader; import org.apache.hadoop.hbase.regionserver.HStore; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java index c38e37d..81dfb8a 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java @@ -33,8 +33,8 @@ import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.regionserver.StoreFile; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java index bfa9f05..4987a6a 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java @@ -35,8 +35,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex; import org.apache.hadoop.hbase.regionserver.StoreFile; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java index 21a92ca..9659de5 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java @@ -38,12 +38,12 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.EncodedDataBlock; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression; -import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.compress.CompressionOutputStream; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java index 5e2032c..ecf9a7b 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java @@ -52,10 +52,10 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java index 19c690f..5629359 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.BlockMetricType; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java index 8656dd2..d4c2646 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java @@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.ServerCallable; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java index 9490691..b81c42f 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java @@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.util.Bytes; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java index a9dd743..40f9ca3 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java @@ -40,8 +40,8 @@ import org.apache.hadoop.hbase.KeyValueTestUtil; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter; import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java index fb24285..d9b8835 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java @@ -43,8 +43,8 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.util.Bytes; import org.junit.After; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java index 8a8da93..3d51fd1 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java @@ -30,7 +30,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.junit.experimental.categories.Category; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java index b2d1e8f..46c4fbf 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java @@ -28,8 +28,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.PerformanceEvaluation; import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.regionserver.StoreFile; /** diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java index a2f3d28..8f02f0b 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java @@ -27,8 +27,8 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.Compression; /** * A command-line tool that spins up a local process-based cluster, loads diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java index 7caedc6..371c7b5 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.SmallTests; -import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionOutputStream; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java index 53a58be..f57dcbe 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java @@ -32,8 +32,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.Compression; import org.junit.After; import org.junit.Before; import org.junit.Test;