diff --git hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestExamples.java hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestExamples.java new file mode 100644 index 0000000..64973c0 --- /dev/null +++ hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestExamples.java @@ -0,0 +1,177 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the License. You may + * obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.mapreduce; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.LargeTests; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.mapreduce.IndexBuilder.Map; +import org.apache.hadoop.hbase.mapreduce.SampleUploader.Uploader; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.LauncherSecurityManager; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Mapper.Context; +import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +@Category(LargeTests.class) +public class TestExamples { + private static HBaseTestingUtility util = new HBaseTestingUtility(); + + /** + * Test SampleUploader from examples + */ + + @SuppressWarnings("unchecked") + @Test + public void testSampleUploader() throws Exception { + + Configuration configuration = new Configuration(); + Uploader uploader = new Uploader(); + Mapper.Context ctx = mock(Context.class); + doAnswer(new Answer() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0]; + Put put = (Put) invocation.getArguments()[1]; + assertEquals("row", Bytes.toString(writer.get())); + assertEquals("row", Bytes.toString(put.getRow())); + return null; + } + }).when(ctx).write(any(ImmutableBytesWritable.class), any(Put.class)); + + uploader.map(null, new Text("row,family,qualifier,value"), ctx); + + Path dir = util.getDataTestDirOnTestFS("testSampleUploader"); + + String[] args = { dir.toString(), "simpleTable" }; + Job job = SampleUploader.configureJob(configuration, args); + assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass()); + + } + + /** + * Test main method of SampleUploader. + */ + @Test + public void testMainSampleUploader() throws Exception { + PrintStream oldPrintStream = System.err; + SecurityManager SECURITY_MANAGER = System.getSecurityManager(); + new LauncherSecurityManager(); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + String[] args = {}; + System.setErr(new PrintStream(data)); + try { + System.setErr(new PrintStream(data)); + + try { + SampleUploader.main(args); + fail("should be SecurityException"); + } catch (SecurityException e) { + assertTrue(data.toString().contains("Wrong number of arguments:")); + assertTrue(data.toString().contains("Usage: SampleUploader ")); + } + + } finally { + System.setErr(oldPrintStream); + System.setSecurityManager(SECURITY_MANAGER); + } + + } + + /** + * Test IndexBuilder from examples + */ + @SuppressWarnings("unchecked") + @Test + public void testIndexBuilder() throws Exception { + Configuration configuration = new Configuration(); + String[] args = { "tableName", "columnFamily", "column1", "column2" }; + IndexBuilder.configureJob(configuration, args); + assertEquals("tableName", configuration.get("index.tablename")); + assertEquals("attributes", configuration.get("index.familyname")); + assertEquals("tableName", configuration.get(TableInputFormat.INPUT_TABLE)); + assertEquals("column1,column2", configuration.get("index.fields")); + + Map map = new Map(); + ImmutableBytesWritable rowKey = new ImmutableBytesWritable(Bytes.toBytes("test")); + Mapper.Context ctx = + mock(Context.class); + when(ctx.getConfiguration()).thenReturn(configuration); + doAnswer(new Answer() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0]; + Put put = (Put) invocation.getArguments()[1]; + assertEquals("tableName-column1", Bytes.toString(writer.get())); + assertEquals("test", Bytes.toString(put.getRow())); + return null; + } + }).when(ctx).write(any(ImmutableBytesWritable.class), any(Put.class)); + Result result = mock(Result.class); + when(result.getValue(Bytes.toBytes("attributes"), Bytes.toBytes("column1"))).thenReturn( + Bytes.toBytes("test")); + map.setup(ctx); + map.map(rowKey, result, ctx); + } + + /** + * Test main method of IndexBuilder + */ + @Test + public void testMainIndexBuilder() throws Exception { + PrintStream oldPrintStream = System.err; + SecurityManager SECURITY_MANAGER = System.getSecurityManager(); + new LauncherSecurityManager(); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + String[] args = {}; + System.setErr(new PrintStream(data)); + try { + System.setErr(new PrintStream(data)); + try { + IndexBuilder.main(args); + fail("should be SecurityException"); + } catch (SecurityException e) { + assertTrue(data.toString().contains("arguments supplied, required: 3")); + assertTrue(data.toString().contains( + "Usage: IndexBuilder [ ...]")); + } + + } finally { + System.setErr(oldPrintStream); + System.setSecurityManager(SECURITY_MANAGER); + } + + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java new file mode 100644 index 0000000..009e3b7 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java @@ -0,0 +1,157 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.mapreduce; + +import org.apache.commons.io.IOUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.LocalFileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.LargeTests; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.LauncherSecurityManager; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.util.GenericOptionsParser; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.*; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +@Category(LargeTests.class) +public class TestCellCounter { + private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); + private static final byte[] ROW1 = Bytes.toBytes("row1"); + private static final byte[] ROW2 = Bytes.toBytes("row2"); + private static final String FAMILY_A_STRING = "a"; + private static final String FAMILY_B_STRING = "b"; + private static final byte[] FAMILY_A = Bytes.toBytes(FAMILY_A_STRING); + private static final byte[] FAMILY_B = Bytes.toBytes(FAMILY_B_STRING); + private static final byte[] QUALIFIER = Bytes.toBytes("q"); + + private static Path FQ_OUTPUT_DIR; + private static final String OUTPUT_DIR = "target" + File.separator + "test-data" + File.separator + + "output"; + private static long now = System.currentTimeMillis(); + + @BeforeClass + public static void beforeClass() throws Exception { + UTIL.startMiniCluster(); + UTIL.startMiniMapReduceCluster(); + FQ_OUTPUT_DIR = new Path(OUTPUT_DIR).makeQualified(new LocalFileSystem()); + FileUtil.fullyDelete(new File(OUTPUT_DIR)); + } + + @AfterClass + public static void afterClass() throws Exception { + UTIL.shutdownMiniMapReduceCluster(); + UTIL.shutdownMiniCluster(); + } + + /** + * Test CellCounter all data should print to output + * + */ + @Test + public void testCellCounter() throws Exception { + String sourceTable = "sourceTable"; + + byte[][] families = { FAMILY_A, FAMILY_B }; + HTable t = UTIL.createTable(Bytes.toBytes(sourceTable), families); + Put p = new Put(ROW1); + p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); + p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12")); + p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13")); + t.put(p); + p = new Put(ROW2); + p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21")); + p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22")); + p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); + t.put(p); + System.out.println("file out:" + FQ_OUTPUT_DIR.toString()); + String[] args = { sourceTable, FQ_OUTPUT_DIR.toString(), ";", "^row1" }; + runCount(args); + FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator + + "part-r-00000"); + String data = IOUtils.toString(inputStream); + inputStream.close(); + assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2")); + assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2")); + assertTrue(data.contains("Total ROWS" + "\t" + "1")); + assertTrue(data.contains("b;q" + "\t" + "1")); + assertTrue(data.contains("a;q" + "\t" + "1")); + assertTrue(data.contains("row1;a;q_Versions" + "\t" + "2")); + + } + + private boolean runCount(String[] args) throws IOException, InterruptedException, + ClassNotFoundException { + // need to make a copy of the configuration because to make sure + // different temp dirs are used. + GenericOptionsParser opts = new GenericOptionsParser( + new Configuration(UTIL.getConfiguration()), args); + Configuration configuration = opts.getConfiguration(); + args = opts.getRemainingArgs(); + Job job = CellCounter.createSubmittableJob(configuration, args); + job.waitForCompletion(false); + return job.isSuccessful(); + } + + /** + * Test main method of CellCounter + */ + @Test + public void testMain() throws Exception { + + PrintStream oldPrintStream = System.err; + SecurityManager SECURITY_MANAGER = System.getSecurityManager(); + new LauncherSecurityManager(); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + String[] args = {}; + System.setErr(new PrintStream(data)); + try { + System.setErr(new PrintStream(data)); + + try { + CellCounter.main(args); + fail("should be SecurityException"); + } catch (SecurityException e) { + assertTrue(data.toString().contains("ERROR: Wrong number of parameters:")); + assertTrue(data + .toString() + .contains( + "Usage: CellCounter [^[regex pattern]" + + " or [Prefix] for row filter]]")); + assertTrue(data.toString().contains("-D hbase.mapreduce.scan.column.family=")); + } + + } finally { + System.setErr(oldPrintStream); + System.setSecurityManager(SECURITY_MANAGER); + } + + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopy.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopy.java new file mode 100644 index 0000000..fb4486b --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopy.java @@ -0,0 +1,151 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the License. You may + * obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.mapreduce; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.LargeTests; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.util.GenericOptionsParser; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; + +import static org.junit.Assert.*; + +@Category(LargeTests.class) +public class TestCopy { + private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); + private static final byte[] ROW1 = Bytes.toBytes("row1"); + private static final byte[] ROW2 = Bytes.toBytes("row2"); + private static final String FAMILY_A_STRING = "a"; + private static final String FAMILY_B_STRING = "b"; + private static final byte[] FAMILY_A = Bytes.toBytes(FAMILY_A_STRING); + private static final byte[] FAMILY_B = Bytes.toBytes(FAMILY_B_STRING); + private static final byte[] QUALIFIER = Bytes.toBytes("q"); + + private static long now = System.currentTimeMillis(); + + @BeforeClass + public static void beforeClass() throws Exception { + UTIL.startMiniCluster(); + UTIL.startMiniMapReduceCluster(); + } + + @AfterClass + public static void afterClass() throws Exception { + UTIL.shutdownMiniMapReduceCluster(); + UTIL.shutdownMiniCluster(); + } + + /** + * Test copy of table from sourceTable to targetTable all rows from family a + */ + @Test + public void testCopyTable() throws Exception { + String sourceTable = "sourceTable"; + String targetTable = "targetTable"; + + byte[][] families = { FAMILY_A, FAMILY_B }; + + HTable t = UTIL.createTable(Bytes.toBytes(sourceTable), families); + HTable t2 = UTIL.createTable(Bytes.toBytes(targetTable), families); + Put p = new Put(ROW1); + p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); + p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12")); + p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13")); + t.put(p); + p = new Put(ROW2); + p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21")); + p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22")); + p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); + t.put(p); + + long currentTime = System.currentTimeMillis(); + String[] args = new String[] { "--new.name=" + targetTable, "--families=a:b", "--all.cells", + "--starttime=" + (currentTime - 100000), "--endtime=" + (currentTime + 100000), + "--versions=1", sourceTable }; + assertNull(t2.get(new Get(ROW1)).getRow()); + assertTrue(runCopy(args)); + + assertNotNull(t2.get(new Get(ROW1)).getRow()); + Result res = t2.get(new Get(ROW1)); + byte[] b1 = res.getValue(FAMILY_B, QUALIFIER); + assertEquals("Data13", new String(b1)); + assertNotNull(t2.get(new Get(ROW2)).getRow()); + res = t2.get(new Get(ROW2)); + b1 = res.getValue(FAMILY_A, QUALIFIER); + // Data from the family of B is not copied + assertNull(b1); + + } + + /** + * Test main method of CopyTable. + */ + @Test + public void testMainMethod() throws Exception { + String[] emptyArgs = { "-h" }; + PrintStream oldWriter = System.err; + ByteArrayOutputStream data = new ByteArrayOutputStream(); + PrintStream writer = new PrintStream(data); + System.setErr(writer); + try { + CopyTable.main(emptyArgs); + } finally { + System.setErr(oldWriter); + } + assertTrue(data.toString().contains("rs.class")); + assertTrue(data + .toString() + .contains( + "Usage: CopyTable [general options] [--starttime=X] [--endtime=Y] [--new.name=NEW]" + + " [--peer.adr=ADR] ")); + assertTrue(data.toString().contains( + "rs.impl hbase.regionserver.impl of the peer cluster")); + assertTrue(data.toString().contains( + "starttime beginning of the time range (unixtime in millis)")); + assertTrue(data.toString().contains( + "endtime end of the time range. Ignored if no starttime specified.")); + assertTrue(data.toString().contains("versions number of cell versions to copy")); + assertTrue(data.toString().contains("new.name new table's name")); + assertTrue(data.toString().contains( + "peer.adr Address of the peer cluster given in the format")); + assertTrue(data.toString().contains( + "all.cells also copy delete markers and deleted cells")); + assertTrue(data.toString().contains("tablename Name of the table to copy")); + } + + private boolean runCopy(String[] args) throws IOException, InterruptedException, + ClassNotFoundException { + GenericOptionsParser opts = new GenericOptionsParser( + new Configuration(UTIL.getConfiguration()), args); + Configuration configuration = opts.getConfiguration(); + args = opts.getRemainingArgs(); + Job job = CopyTable.createSubmittableJob(configuration, args); + job.waitForCompletion(false); + return job.isSuccessful(); + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestDriver.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestDriver.java new file mode 100644 index 0000000..bce2a90 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestDriver.java @@ -0,0 +1,68 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the License. You may + * obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.mapreduce; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.lang.reflect.InvocationTargetException; + +import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.util.LauncherSecurityManager; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(SmallTests.class) +public class TestDriver { + /** + * Test main method of Driver class + */ + @Test + public void testDriver() throws Throwable { + + PrintStream oldPrintStream = System.out; + SecurityManager SECURITY_MANAGER = System.getSecurityManager(); + new LauncherSecurityManager(); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + String[] args = {}; + System.setOut(new PrintStream(data)); + try { + System.setOut(new PrintStream(data)); + + try { + Driver.main(args); + fail("should be SecurityException"); + } catch (InvocationTargetException e) { + assertTrue(data.toString().contains( + "An example program must be given as the first argument.")); + assertTrue(data.toString().contains("CellCounter: Count cells in HBase table")); + assertTrue(data.toString().contains("completebulkload: Complete a bulk data load.")); + assertTrue(data.toString().contains( + "copytable: Export a table from local cluster to peer cluster")); + assertTrue(data.toString().contains("export: Write table data to HDFS.")); + assertTrue(data.toString().contains("import: Import data written by Export.")); + assertTrue(data.toString().contains("importtsv: Import data in TSV format.")); + assertTrue(data.toString().contains("rowcounter: Count rows in HBase table")); + } + } finally { + System.setOut(oldPrintStream); + System.setSecurityManager(SECURITY_MANAGER); + } + + } +} \ No newline at end of file diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java new file mode 100644 index 0000000..4b11c90 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the License. You may + * obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.mapreduce; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.mapreduce.Mapper; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import static org.mockito.Mockito.*; + +@Category(SmallTests.class) +public class TestGroupingTableMapper { + + /** + * Test GroupingTableMapper class + */ + @Test + public void testGroupingTableMapper() throws Exception { + + GroupingTableMapper mapper = new GroupingTableMapper(); + Configuration configuration = new Configuration(); + configuration.set(GroupingTableMapper.GROUP_COLUMNS, "family1:clm family2:clm"); + mapper.setConf(configuration); + + Result result = mock(Result.class); + @SuppressWarnings("unchecked") + Mapper.Context context = + mock(Mapper.Context.class); + context.write(any(ImmutableBytesWritable.class), any(Result.class)); + List keyValue = new ArrayList(); + byte[] row = {}; + keyValue.add(new KeyValue(row, Bytes.toBytes("family2"), Bytes.toBytes("clm"), Bytes + .toBytes("value1"))); + keyValue.add(new KeyValue(row, Bytes.toBytes("family1"), Bytes.toBytes("clm"), Bytes + .toBytes("value2"))); + when(result.list()).thenReturn(keyValue); + mapper.map(null, result, context); + // template data + byte[][] data = { Bytes.toBytes("value1"), Bytes.toBytes("value2") }; + ImmutableBytesWritable ibw = mapper.createGroupKey(data); + verify(context).write(ibw, result); + } + +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java new file mode 100644 index 0000000..41608db --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the License. You may + * obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.mapreduce; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.LargeTests; +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import static org.junit.Assert.assertEquals; + +@Category(LargeTests.class) +public class TestHRegionPartitioner { + private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); + + @BeforeClass + public static void beforeClass() throws Exception { + UTIL.startMiniCluster(); + UTIL.startMiniMapReduceCluster(); + } + + @AfterClass + public static void afterClass() throws Exception { + UTIL.shutdownMiniMapReduceCluster(); + UTIL.shutdownMiniCluster(); + } + + /** + * Test HRegionPartitioner + */ + @Test + public void testHRegionPartitioner() throws Exception { + + byte[][] families = { Bytes.toBytes("familyA"), Bytes.toBytes("familyB") }; + + UTIL.createTable(Bytes.toBytes("out_table"), families, 1, Bytes.toBytes("aa"), + Bytes.toBytes("cc"), 3); + + HRegionPartitioner partitioner = new HRegionPartitioner(); + Configuration configuration = UTIL.getConfiguration(); + configuration.set(TableOutputFormat.OUTPUT_TABLE, "out_table"); + partitioner.setConf(configuration); + ImmutableBytesWritable writable = new ImmutableBytesWritable(Bytes.toBytes("bb")); + + assertEquals(1, partitioner.getPartition(writable, 10L, 3)); + assertEquals(0, partitioner.getPartition(writable, 10L, 1)); + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index 23c14c8..18f58a2 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -20,8 +20,13 @@ package org.apache.hadoop.hbase.mapreduce; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.PrintStream; +import java.util.ArrayList; +import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -39,9 +44,14 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.filter.PrefixFilter; +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.mapreduce.Import.KeyValueImporter; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.LauncherSecurityManager; import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.util.GenericOptionsParser; import org.junit.After; import org.junit.AfterClass; @@ -50,6 +60,10 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import static org.mockito.Mockito.*; /** * Tests the table import and table export MR job functionality @@ -355,4 +369,118 @@ public class TestImportExport { results.close(); return count; } + + /** + * test maim method. Import should print help and call System.exit + */ + @Test + public void testImportMain() throws Exception { + PrintStream oldPrintStream = System.err; + SecurityManager SECURITY_MANAGER = System.getSecurityManager(); + new LauncherSecurityManager(); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + String[] args = {}; + System.setErr(new PrintStream(data)); + try { + System.setErr(new PrintStream(data)); + Import.main(args); + fail("should be SecurityException"); + } catch (SecurityException e) { + assertTrue(data.toString().contains("Wrong number of arguments:")); + assertTrue(data.toString().contains("-Dimport.bulk.output=/path/for/output")); + assertTrue(data.toString().contains("-Dimport.filter.class=")); + assertTrue(data.toString().contains("-Dimport.bulk.output=/path/for/output")); + assertTrue(data.toString().contains("-Dmapred.reduce.tasks.speculative.execution=false")); + } finally { + System.setErr(oldPrintStream); + System.setSecurityManager(SECURITY_MANAGER); + } + } + + /** + * test maim method. Export should print help and call System.exit + */ + @Test + public void testExportMain() throws Exception { + PrintStream oldPrintStream = System.err; + SecurityManager SECURITY_MANAGER = System.getSecurityManager(); + new LauncherSecurityManager(); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + String[] args = {}; + System.setErr(new PrintStream(data)); + try { + System.setErr(new PrintStream(data)); + Export.main(args); + fail("should be SecurityException"); + } catch (SecurityException e) { + assertTrue(data.toString().contains("Wrong number of arguments:")); + assertTrue(data + .toString() + .contains( + "Usage: Export [-D ]* [ " + + "[ []] [^[regex pattern] or [Prefix] to filter]]")); + assertTrue(data.toString().contains("-D hbase.mapreduce.scan.column.family=")); + assertTrue(data.toString().contains("-D hbase.mapreduce.include.deleted.rows=true")); + assertTrue(data.toString().contains("-Dhbase.client.scanner.caching=100")); + assertTrue(data.toString().contains("-Dmapred.map.tasks.speculative.execution=false")); + assertTrue(data.toString().contains("-Dmapred.reduce.tasks.speculative.execution=false")); + assertTrue(data.toString().contains("-Dhbase.export.scanner.batch=10")); + } finally { + System.setErr(oldPrintStream); + System.setSecurityManager(SECURITY_MANAGER); + } + } + + /** + * Test map method of Importer + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testKeyValueImporter() throws Exception { + KeyValueImporter importer = new KeyValueImporter(); + Configuration configuration = new Configuration(); + Context ctx = mock(Context.class); + when(ctx.getConfiguration()).thenReturn(configuration); + + doAnswer(new Answer() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0]; + KeyValue key = (KeyValue) invocation.getArguments()[1]; + assertEquals("Key", Bytes.toString(writer.get())); + assertEquals("row", Bytes.toString(key.getRow())); + return null; + } + }).when(ctx).write(any(ImmutableBytesWritable.class), any(KeyValue.class)); + + importer.setup(ctx); + Result value = mock(Result.class); + KeyValue[] keys = { + new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"), + Bytes.toBytes("value")), + new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"), + Bytes.toBytes("value1")) }; + when(value.raw()).thenReturn(keys); + importer.map(new ImmutableBytesWritable(Bytes.toBytes("Key")), value, ctx); + + } + + /** + * Test addFilterAndArguments method of Import This method set couple + * parameters into Configuration + */ + @Test + public void testAddFilterAndArguments() { + Configuration configuration = new Configuration(); + + List args = new ArrayList(); + args.add("param1"); + args.add("param2"); + + Import.addFilterAndArguments(configuration, FilterBase.class, args); + assertEquals("org.apache.hadoop.hbase.filter.FilterBase", + configuration.get(Import.FILTER_CLASS_CONF_KEY)); + assertEquals("param1,param2", configuration.get(Import.FILTER_ARGS_CONF_KEY)); + } } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java index c7e12e5..211dad9 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java @@ -18,22 +18,12 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.util.Collection; -import java.util.TreeMap; -import java.util.List; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; -import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFile; @@ -41,9 +31,16 @@ import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.*; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; import org.junit.experimental.categories.Category; +import java.io.IOException; +import java.util.TreeMap; + +import static org.junit.Assert.*; + /** * Test cases for the "load" half of the HFileOutputFormat bulk load * functionality. These tests run faster than the full MR cluster @@ -146,18 +143,16 @@ public class TestLoadIncrementalHFiles { final byte[] TABLE = Bytes.toBytes("mytable_"+testName); - HBaseAdmin admin = new HBaseAdmin(util.getConfiguration()); HTableDescriptor htd = new HTableDescriptor(TABLE); HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY); familyDesc.setBloomFilterType(bloomType); htd.addFamily(familyDesc); - admin.createTable(htd, SPLIT_KEYS); - HTable table = new HTable(util.getConfiguration(), TABLE); - util.waitTableEnabled(TABLE); LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration(), useSecure); - loader.doBulkLoad(dir, table); - + String [] args= {dir.toString(),"mytable_"+testName}; + loader.run(args); + HTable table = new HTable(util.getConfiguration(), TABLE); + assertEquals(expectedRows, util.countRows(table)); } @@ -167,7 +162,7 @@ public class TestLoadIncrementalHFiles { @Test public void testNonexistentColumnFamilyLoad() throws Exception { String testName = "testNonexistentColumnFamilyLoad"; - byte[][][] hfileRanges = new byte[][][] { + byte[][][] hFileRanges = new byte[][][] { new byte[][]{ Bytes.toBytes("aaa"), Bytes.toBytes("ccc") }, new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }; @@ -177,12 +172,12 @@ public class TestLoadIncrementalHFiles { dir = dir.makeQualified(fs); Path familyDir = new Path(dir, Bytes.toString(FAMILY)); - int hfileIdx = 0; - for (byte[][] range : hfileRanges) { + int hFileIdx = 0; + for (byte[][] range : hFileRanges) { byte[] from = range[0]; byte[] to = range[1]; createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_" - + hfileIdx++), FAMILY, QUALIFIER, from, to, 1000); + + hFileIdx++), FAMILY, QUALIFIER, from, to, 1000); } final byte[] TABLE = Bytes.toBytes("mytable_"+testName); @@ -214,55 +209,6 @@ public class TestLoadIncrementalHFiles { admin.close(); } - private void verifyAssignedSequenceNumber(String testName, - byte[][][] hfileRanges, boolean nonZero) throws Exception { - Path dir = util.getDataTestDir(testName); - FileSystem fs = util.getTestFileSystem(); - dir = dir.makeQualified(fs); - Path familyDir = new Path(dir, Bytes.toString(FAMILY)); - - int hfileIdx = 0; - for (byte[][] range : hfileRanges) { - byte[] from = range[0]; - byte[] to = range[1]; - createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_" - + hfileIdx++), FAMILY, QUALIFIER, from, to, 1000); - } - - final byte[] TABLE = Bytes.toBytes("mytable_"+testName); - - HBaseAdmin admin = new HBaseAdmin(util.getConfiguration()); - HTableDescriptor htd = new HTableDescriptor(TABLE); - HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY); - htd.addFamily(familyDesc); - admin.createTable(htd, SPLIT_KEYS); - - HTable table = new HTable(util.getConfiguration(), TABLE); - util.waitTableEnabled(TABLE); - LoadIncrementalHFiles loader = new LoadIncrementalHFiles( - util.getConfiguration()); - - // Do a dummy put to increase the hlog sequence number - Put put = new Put(Bytes.toBytes("row")); - put.add(FAMILY, QUALIFIER, Bytes.toBytes("value")); - table.put(put); - - loader.doBulkLoad(dir, table); - - // Get the store files - Collection files = util.getHBaseCluster(). - getRegions(TABLE).get(0).getStore(FAMILY).getStorefiles(); - for (StoreFile file: files) { - // the sequenceId gets initialized during createReader - file.createReader(); - - if (nonZero) - assertTrue(file.getMaxSequenceId() > 0); - else - assertTrue(file.getMaxSequenceId() == -1); - } - } - @Test public void testSplitStoreFile() throws IOException { Path dir = util.getDataTestDirOnTestFS("testSplitHFile"); @@ -287,9 +233,9 @@ public class TestLoadIncrementalHFiles { } private int verifyHFile(Path p) throws IOException { - Configuration conf = util.getConfiguration(); + Configuration configuration = util.getConfiguration(); HFile.Reader reader = HFile.createReader( - p.getFileSystem(conf), p, new CacheConfig(conf)); + p.getFileSystem(configuration), p, new CacheConfig(configuration)); reader.loadFileInfo(); HFileScanner scanner = reader.getScanner(false, false); scanner.seekTo(); @@ -309,12 +255,12 @@ public class TestLoadIncrementalHFiles { * TODO put me in an HFileTestUtil or something? */ static void createHFile( - Configuration conf, + Configuration configuration, FileSystem fs, Path path, byte[] family, byte[] qualifier, byte[] startKey, byte[] endKey, int numRows) throws IOException { - HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf)) + HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration)) .withPath(fs, path) .withBlockSize(BLOCKSIZE) .withCompression(COMPRESSION) @@ -335,10 +281,10 @@ public class TestLoadIncrementalHFiles { } private void addStartEndKeysForTest(TreeMap map, byte[] first, byte[] last) { - Integer value = map.containsKey(first)?(Integer)map.get(first):0; + Integer value = map.containsKey(first)?map.get(first):0; map.put(first, value+1); - value = map.containsKey(last)?(Integer)map.get(last):0; + value = map.containsKey(last)?map.get(last):0; map.put(last, value-1); } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java index bd68052..e600f7d 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java @@ -20,8 +20,11 @@ package org.apache.hadoop.hbase.mapreduce; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.PrintStream; import java.util.ArrayList; import org.apache.commons.logging.Log; @@ -33,6 +36,7 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.mapreduce.RowCounter.RowCounterMapper; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.LauncherSecurityManager; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.util.GenericOptionsParser; @@ -170,4 +174,53 @@ public class TestRowCounter { } table.put(rowsUpdate); } + + /** + * test maim method. Import should print help and call System.exit + */ + @Test + public void testImportMain() throws Exception { + PrintStream oldPrintStream = System.err; + SecurityManager SECURITY_MANAGER = System.getSecurityManager(); + new LauncherSecurityManager(); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + String[] args = {}; + System.setErr(new PrintStream(data)); + try { + System.setErr(new PrintStream(data)); + + try { + RowCounter.main(args); + fail("should be SecurityException"); + } catch (SecurityException e) { + assertTrue(data.toString().contains("Wrong number of parameters:")); + assertTrue(data.toString().contains( + "Usage: RowCounter [options] [--range=[startKey],[endKey]] " + + "[ ...]")); + assertTrue(data.toString().contains("-Dhbase.client.scanner.caching=100")); + assertTrue(data.toString().contains("-Dmapred.map.tasks.speculative.execution=false")); + } + data.reset(); + try { + args = new String[2]; + args[0] = "table"; + args[1] = "--range=1"; + RowCounter.main(args); + fail("should be SecurityException"); + } catch (SecurityException e) { + assertTrue(data.toString().contains( + "Please specify range in such format as \"--range=a,b\" or, with only one boundary," + + " \"--range=,b\" or \"--range=a,\"")); + assertTrue(data.toString().contains( + "Usage: RowCounter [options] [--range=[startKey],[endKey]] " + + "[ ...]")); + } + + } finally { + System.setErr(oldPrintStream); + System.setSecurityManager(SECURITY_MANAGER); + } + + } + } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java new file mode 100644 index 0000000..adeccdf --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java @@ -0,0 +1,109 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the License. You may + * obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.mapreduce; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Job; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import static org.junit.Assert.*; + +/** + * Test TableMapReduceUtil + */ + +@Category(SmallTests.class) +public class TestTableMapReduceUtil { + + /** + * Test initTableMapperJob method + */ + @Test + public void testInitTableMapperJob() throws Exception { + Configuration configuration = new Configuration(); + Job job = new Job(configuration, "tableName"); + TableMapReduceUtil.initTableMapperJob("Table", new Scan(), Import.Importer.class, Text.class, + Text.class, job, false, HLogInputFormat.class); + assertEquals(HLogInputFormat.class, job.getInputFormatClass()); + assertEquals(Import.Importer.class, job.getMapperClass()); + assertEquals(LongWritable.class, job.getOutputKeyClass()); + assertEquals(Text.class, job.getOutputValueClass()); + assertNull(job.getCombinerClass()); + assertEquals("Table", job.getConfiguration().get(TableInputFormat.INPUT_TABLE)); + assertEquals( + "org.apache.hadoop.io.serializer.WritableSerialization," + + "org.apache.hadoop.hbase.mapreduce.MutationSerialization," + + "org.apache.hadoop.hbase.mapreduce.ResultSerialization," + + "org.apache.hadoop.hbase.mapreduce.KeyValueSerialization", + job.getConfiguration().get("io.serializations")); + + configuration = new Configuration(); + job = new Job(configuration, "tableName"); + TableMapReduceUtil.initTableMapperJob(Bytes.toBytes("Table"), new Scan(), + Import.Importer.class, Text.class, Text.class, job, false, HLogInputFormat.class); + assertEquals(HLogInputFormat.class, job.getInputFormatClass()); + assertEquals(Import.Importer.class, job.getMapperClass()); + assertEquals(LongWritable.class, job.getOutputKeyClass()); + assertEquals(Text.class, job.getOutputValueClass()); + assertNull(job.getCombinerClass()); + assertEquals("Table", job.getConfiguration().get(TableInputFormat.INPUT_TABLE)); + assertEquals( + "org.apache.hadoop.io.serializer.WritableSerialization," + + "org.apache.hadoop.hbase.mapreduce.MutationSerialization," + + "org.apache.hadoop.hbase.mapreduce.ResultSerialization," + + "org.apache.hadoop.hbase.mapreduce.KeyValueSerialization", + job.getConfiguration().get("io.serializations")); + + configuration = new Configuration(); + job = new Job(configuration, "tableName"); + TableMapReduceUtil.initTableMapperJob(Bytes.toBytes("Table"), new Scan(), + Import.Importer.class, Text.class, Text.class, job); + assertEquals(TableInputFormat.class, job.getInputFormatClass()); + assertEquals(Import.Importer.class, job.getMapperClass()); + assertEquals(LongWritable.class, job.getOutputKeyClass()); + assertEquals(Text.class, job.getOutputValueClass()); + assertNull(job.getCombinerClass()); + assertEquals("Table", job.getConfiguration().get(TableInputFormat.INPUT_TABLE)); + assertEquals( + "org.apache.hadoop.io.serializer.WritableSerialization," + + "org.apache.hadoop.hbase.mapreduce.MutationSerialization," + + "org.apache.hadoop.hbase.mapreduce.ResultSerialization," + + "rg.apache.hadoop.hbase.mapreduce.KeyValueSerialization", + job.getConfiguration().get("io.serializations")); + + configuration = new Configuration(); + job = new Job(configuration, "tableName"); + TableMapReduceUtil.initTableMapperJob(Bytes.toBytes("Table"), new Scan(), + Import.Importer.class, Text.class, Text.class, job, false); + assertEquals(TableInputFormat.class, job.getInputFormatClass()); + assertEquals(Import.Importer.class, job.getMapperClass()); + assertEquals(LongWritable.class, job.getOutputKeyClass()); + assertEquals(Text.class, job.getOutputValueClass()); + assertNull(job.getCombinerClass()); + assertEquals("Table", job.getConfiguration().get(TableInputFormat.INPUT_TABLE)); + assertEquals( + "org.apache.hadoop.io.serializer.WritableSerialization," + + "org.apache.hadoop.hbase.mapreduce.MutationSerialization," + + "org.apache.hadoop.hbase.mapreduce.ResultSerialization," + + "org.apache.hadoop.hbase.mapreduce.KeyValueSerialization", + job.getConfiguration().get("io.serializations")); + } +} \ No newline at end of file diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java index 93653af..e9b4015 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java @@ -19,12 +19,18 @@ package org.apache.hadoop.hbase.mapreduce; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.ArrayList; +import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.client.Delete; @@ -32,12 +38,24 @@ import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.mapreduce.WALPlayer.HLogKeyValueMapper; import org.apache.hadoop.hbase.regionserver.wal.HLog; +import org.apache.hadoop.hbase.regionserver.wal.HLogKey; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.LauncherSecurityManager; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Mapper.Context; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.*; /** * Basic test for the WALPlayer M/R tool @@ -90,14 +108,92 @@ public class TestWALPlayer { String walInputDir = new Path(cluster.getMaster().getMasterFileSystem() .getRootDir(), HConstants.HREGION_LOGDIR_NAME).toString(); - WALPlayer player = new WALPlayer(TEST_UTIL.getConfiguration()); + Configuration configuration= TEST_UTIL.getConfiguration(); + WALPlayer player = new WALPlayer(configuration); + String optionName="_test_.name"; + configuration.set(optionName, "1000"); + player.setupTime(configuration, optionName); + assertEquals(1000,configuration.getLong(optionName,0)); assertEquals(0, player.run(new String[] { walInputDir, Bytes.toString(TABLENAME1), Bytes.toString(TABLENAME2) })); + // verify the WAL was player into table 2 Get g = new Get(ROW); Result r = t2.get(g); assertEquals(1, r.size()); assertTrue(Bytes.equals(COLUMN2, r.raw()[0].getQualifier())); } + + /** + * Test HLogKeyValueMapper setup and map + */ + @Test + public void testHLogKeyValueMapper() throws Exception { + Configuration configuration = new Configuration(); + configuration.set(WALPlayer.TABLES_KEY, "table"); + HLogKeyValueMapper mapper = new HLogKeyValueMapper(); + HLogKey key = mock(HLogKey.class); + when(key.getTablename()).thenReturn(Bytes.toBytes("table")); + @SuppressWarnings("unchecked") + Mapper.Context context = + mock(Context.class); + when(context.getConfiguration()).thenReturn(configuration); + + WALEdit value = mock(WALEdit.class); + List values = new ArrayList(); + KeyValue kv1 = mock(KeyValue.class); + when(kv1.getFamily()).thenReturn(Bytes.toBytes("family")); + when(kv1.getRow()).thenReturn(Bytes.toBytes("row")); + values.add(kv1); + when(value.getKeyValues()).thenReturn(values); + mapper.setup(context); + + doAnswer(new Answer() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0]; + KeyValue key = (KeyValue) invocation.getArguments()[1]; + assertEquals("row", Bytes.toString(writer.get())); + assertEquals("row", Bytes.toString(key.getRow())); + return null; + } + }).when(context).write(any(ImmutableBytesWritable.class), any(KeyValue.class)); + + mapper.map(key, value, context); + + } + + /** + * Test main method + */ + @Test + public void testMainMethod() throws Exception{ + + PrintStream oldPrintStream = System.err; + SecurityManager SECURITY_MANAGER = System.getSecurityManager(); + new LauncherSecurityManager(); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + String[] args = {}; + System.setErr(new PrintStream(data)); + try { + System.setErr(new PrintStream(data)); + try { + WALPlayer.main(args); + fail("should be SecurityException"); + } catch (SecurityException e) { + assertTrue(data.toString().contains("ERROR: Wrong number of arguments:")); + assertTrue(data.toString().contains("Usage: WALPlayer [options] " + + " []")); + assertTrue(data.toString().contains("-Dhlog.bulk.output=/path/for/output")); + } + + } finally { + System.setErr(oldPrintStream); + System.setSecurityManager(SECURITY_MANAGER); + } + + } + } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/LauncherSecurityManager.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/LauncherSecurityManager.java new file mode 100644 index 0000000..d4e7f2a --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/LauncherSecurityManager.java @@ -0,0 +1,72 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.util; + +import java.security.Permission; + +public class LauncherSecurityManager extends SecurityManager { + + private static boolean exitInvoked; + private static int exitCode; + private SecurityManager securityManager; + + public LauncherSecurityManager() { + reset(); + securityManager = System.getSecurityManager(); + System.setSecurityManager(this); + } + + @Override + public void checkPermission(Permission perm, Object context) { + if (securityManager != null) { + // check everything with the original SecurityManager + securityManager.checkPermission(perm, context); + } + } + + @Override + public void checkPermission(Permission perm) { + if (securityManager != null) { + // check everything with the original SecurityManager + securityManager.checkPermission(perm); + } + } + + @Override + public void checkExit(int status) throws SecurityException { + exitInvoked = true; + exitCode = status; + throw new SecurityException("Intercepted System.exit(" + status + ")"); + } + + public static boolean getExitInvoked() { + return exitInvoked; + } + + public static int getExitCode() { + return exitCode; + } + + public static void reset() { + exitInvoked = false; + exitCode = 0; + } + +}