Index: hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java (revision 1462800) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java (working copy) @@ -160,6 +160,52 @@ assertEquals(expectedRows, util.countRows(table)); } + /** + * Test loading into a column family that does not exist. + */ + @Test + public void testNonexistentColumnFamilyLoad() throws Exception { + String testName = "testNonexistentColumnFamilyLoad"; + byte[][][] hfileRanges = new byte[][][] { + new byte[][]{ Bytes.toBytes("aaa"), Bytes.toBytes("ccc") }, + new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, + }; + + Path dir = util.getDataTestDirOnTestFS(testName); + FileSystem fs = util.getTestFileSystem(); + dir = dir.makeQualified(fs); + Path familyDir = new Path(dir, Bytes.toString(FAMILY)); + + int hfileIdx = 0; + for (byte[][] range : hfileRanges) { + byte[] from = range[0]; + byte[] to = range[1]; + createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_" + + hfileIdx++), FAMILY, QUALIFIER, from, to, 1000); + } + + final byte[] TABLE = Bytes.toBytes("mytable_"+testName); + + HBaseAdmin admin = new HBaseAdmin(util.getConfiguration()); + HTableDescriptor htd = new HTableDescriptor(TABLE); + HColumnDescriptor familyDesc = new HColumnDescriptor(Bytes.toBytes("not_myfam")); + familyDesc.setBloomFilterType(BloomType.NONE); + htd.addFamily(familyDesc); + admin.createTable(htd, SPLIT_KEYS); + + HTable table = new HTable(util.getConfiguration(), TABLE); + util.waitTableEnabled(TABLE); + LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration(), useSecure); + try { + loader.doBulkLoad(dir, table); + assertTrue(false); + } catch (Exception e) { + e.printStackTrace(); + assertTrue(e instanceof IOException); + } + + } + private void verifyAssignedSequenceNumber(String testName, byte[][][] hfileRanges, boolean nonZero) throws Exception { Path dir = util.getDataTestDir(testName); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (revision 1462800) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (working copy) @@ -3215,7 +3215,6 @@ IOException ioe = new org.apache.hadoop.hbase.exceptions.DoNotRetryIOException( "No such column family " + Bytes.toStringBinary(familyName)); ioes.add(ioe); - failures.add(p); } else { try { store.assertBulkLoadHFileOk(new Path(path)); @@ -3229,6 +3228,13 @@ } } + // validation failed because of some sort of IO problem. + if (ioes.size() != 0) { + IOException e = MultipleIOException.createIOException(ioes); + LOG.error("There were one or more IO errors when checking if the bulk load is ok.", e); + throw e; + } + // validation failed, bail out before doing anything permanent. if (failures.size() != 0) { StringBuilder list = new StringBuilder(); @@ -3242,13 +3248,6 @@ return false; } - // validation failed because of some sort of IO problem. - if (ioes.size() != 0) { - IOException e = MultipleIOException.createIOException(ioes); - LOG.error("There were one or more IO errors when checking if the bulk load is ok.", e); - throw e; - } - for (Pair p : familyPaths) { byte[] familyName = p.getFirst(); String path = p.getSecond();