Index: src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java (revision 1100042) +++ src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java (working copy) @@ -281,7 +281,7 @@ private Options buildOptions() { Option compress = - OptionBuilder.withLongOpt("compress").withArgName("[none|lzo|gz]") + OptionBuilder.withLongOpt("compress").withArgName("[none|lzo|gz|snappy]") .hasArg().withDescription("compression scheme").create('c'); Option fileSize = @@ -446,7 +446,7 @@ private void validateOptions() throws ParseException { if (!compress.equals("none") && !compress.equals("lzo") - && !compress.equals("gz")) { + && !compress.equals("gz") && !compress.equals("snappy")) { throw new ParseException("Unknown compression scheme: " + compress); } Index: src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java (revision 1100042) +++ src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java (working copy) @@ -432,6 +432,7 @@ switch (type) { case LZO: compressionType = "LZO"; break; case GZ: compressionType = "GZ"; break; + case SNAPPY: compressionType = "SNAPPY"; break; default: compressionType = "NONE"; break; } setValue(COMPRESSION, compressionType); @@ -456,6 +457,7 @@D switch (type) { case LZO: compressionType = "LZO"; break; case GZ: compressionType = "GZ"; break; + case SNAPPY: compressionType = "SNAPPY"; break; default: compressionType = "NONE"; break; } setValue(COMPRESSION_COMPACT, compressionType); Index: src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java (revision 1100042) +++ src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java (working copy) @@ -129,7 +129,7 @@ * @param fileType "HFile" or "SequenceFile" * @param keyLength * @param valueLength - * @param codecName "none", "lzo", "gz" + * @param codecName "none", "lzo", "gz", "snappy" * @param rows number of rows to be written. * @param writeMethod used for HFile only. * @param minBlockSize used for HFile only. Index: src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java (revision 1100042) +++ src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java (working copy) @@ -94,7 +94,7 @@ public static void usage() { System.err.println( - "Usage: CompressionTest none|gz|lzo\n" + + "Usage: CompressionTest none|gz|lzo|snappy\n" + "\n" + "For example:\n" + " hbase " + CompressionTest.class + " file:///tmp/testfile gz\n"); Index: src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java (revision 1100042) +++ src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java (working copy) @@ -438,6 +438,9 @@ familyToCompression.put("Family1!@#!@#&", Compression.Algorithm.LZO); } if (numCfs-- > 0) { + familyToCompression.put("Family2=asdads&!AASD", Compression.Algorithm.SNAPPY); + } + if (numCfs-- > 0) { familyToCompression.put("Family2=asdads&!AASD", Compression.Algorithm.GZ); } if (numCfs-- > 0) { Index: src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java (revision 1100042) +++ src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java (working copy) @@ -95,6 +95,25 @@ return lzoCodec; } }, + SNAPPY("snappy") { + // Use base type to avoid compile-time dependencies. + private transient CompressionCodec snappyCodec; + + @Override + CompressionCodec getCodec(Configuration conf) { + if (snappyCodec == null) { + try { + Class externalCodec = + ClassLoader.getSystemClassLoader().loadClass("org.apache.hadoop.io.compress.SnappyCodec"); + snappyCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, + conf); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + return snappyCodec; + } + }, GZ("gz") { private transient GzipCodec codec; Index: src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java (revision 1100042) +++ src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java (working copy) @@ -54,5 +54,6 @@ assertFalse(CompressionTest.testCompression("LZO")); assertTrue(CompressionTest.testCompression("NONE")); assertTrue(CompressionTest.testCompression("GZ")); + assertTrue(CompressionTest.testCompression("SNAPPY")); } }