commit 7b9da633f8e67b3513691ecd38441dd3251df571 Author: Enis Soztutar Date: Sun Sep 21 20:41:27 2014 -0700 HBASE-12046 v1 diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 0d3ead0..2f6cb33 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -610,11 +610,11 @@ public class HColumnDescriptor implements Comparable { // TODO: Allow maxVersion of 0 to be the way you say "Keep all versions". // Until there is support, consider 0 or < 0 -- a configuration error. throw new IllegalArgumentException("Maximum versions must be positive"); - } - if (maxVersions < this.getMinVersions()) { + } + if (maxVersions < this.getMinVersions()) { throw new IllegalArgumentException("Set MaxVersion to " + maxVersions + " while minVersion is " + this.getMinVersions() - + ". Maximum versions must be >= minimum versions "); + + ". Maximum versions must be >= minimum versions "); } setValue(HConstants.VERSIONS, Integer.toString(maxVersions)); cachedMaxVersions = maxVersions; @@ -709,7 +709,7 @@ public class HColumnDescriptor implements Comparable { /** * Set whether the tags should be compressed along with DataBlockEncoding. When no * DataBlockEncoding is been used, this is having no effect. - * + * * @param compressTags * @return this (for chained invocation) */ @@ -751,7 +751,7 @@ public class HColumnDescriptor implements Comparable { } /** - * @return True if we are to favor keeping all values for this column family in the + * @return True if we are to favor keeping all values for this column family in the * HRegionServer cache. */ public boolean isInMemory() { @@ -1118,6 +1118,7 @@ public class HColumnDescriptor implements Comparable { } // Comparable + @Override public int compareTo(HColumnDescriptor o) { int result = Bytes.compareTo(this.name, o.getName()); if (result == 0) { @@ -1225,12 +1226,13 @@ public class HColumnDescriptor implements Comparable { * @param key Config key. Same as XML config key e.g. hbase.something.or.other. * @param value String value. If null, removes the configuration. */ - public void setConfiguration(String key, String value) { + public HColumnDescriptor setConfiguration(String key, String value) { if (value == null) { removeConfiguration(key); } else { configuration.put(key, value); } + return this; } /** diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index 0e9e25c..cd8357f 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -254,6 +254,7 @@ public class HTableDescriptor implements Comparable { * INTERNAL Private constructor used internally creating table descriptors for * catalog tables, hbase:meta and -ROOT-. */ + @InterfaceAudience.Private protected HTableDescriptor(final TableName name, HColumnDescriptor[] families) { setName(name); for(HColumnDescriptor descriptor : families) { @@ -475,17 +476,19 @@ public class HTableDescriptor implements Comparable { * @param value The value. * @see #values */ - public void setValue(byte[] key, byte[] value) { + public HTableDescriptor setValue(byte[] key, byte[] value) { setValue(new Bytes(key), new Bytes(value)); + return this; } /* * @param key The key. * @param value The value. */ - private void setValue(final Bytes key, + private HTableDescriptor setValue(final Bytes key, final String value) { setValue(key, new Bytes(Bytes.toBytes(value))); + return this; } /* @@ -494,16 +497,17 @@ public class HTableDescriptor implements Comparable { * @param key The key. * @param value The value. */ - public void setValue(final Bytes key, + public HTableDescriptor setValue(final Bytes key, final Bytes value) { if (key.compareTo(DEFERRED_LOG_FLUSH_KEY) == 0) { boolean isDeferredFlush = Boolean.valueOf(Bytes.toString(value.get())); LOG.warn("HTableDescriptor property:" + DEFERRED_LOG_FLUSH + " is deprecated, " + "use " + DURABILITY + " instead"); setDurability(isDeferredFlush ? Durability.ASYNC_WAL : DEFAULT_DURABLITY); - return; + return this; } values.put(key, value); + return this; } /** @@ -513,12 +517,13 @@ public class HTableDescriptor implements Comparable { * @param value The value. * @see #values */ - public void setValue(String key, String value) { + public HTableDescriptor setValue(String key, String value) { if (value == null) { remove(key); } else { setValue(Bytes.toBytes(key), Bytes.toBytes(value)); } + return this; } /** @@ -569,8 +574,8 @@ public class HTableDescriptor implements Comparable { * @param readOnly True if all of the columns in the table should be read * only. */ - public void setReadOnly(final boolean readOnly) { - setValue(READONLY_KEY, readOnly? TRUE: FALSE); + public HTableDescriptor setReadOnly(final boolean readOnly) { + return setValue(READONLY_KEY, readOnly? TRUE: FALSE); } /** @@ -588,17 +593,19 @@ public class HTableDescriptor implements Comparable { * * @param isEnable True if enable compaction. */ - public void setCompactionEnabled(final boolean isEnable) { + public HTableDescriptor setCompactionEnabled(final boolean isEnable) { setValue(COMPACTION_ENABLED_KEY, isEnable ? TRUE : FALSE); + return this; } /** * Sets the {@link Durability} setting for the table. This defaults to Durability.USE_DEFAULT. * @param durability enum value */ - public void setDurability(Durability durability) { + public HTableDescriptor setDurability(Durability durability) { this.durability = durability; setValue(DURABILITY_KEY, durability.name()); + return this; } /** @@ -636,7 +643,9 @@ public class HTableDescriptor implements Comparable { * Get the name of the table as a byte array. * * @return name of table + * @deprecated Use {@link #getTableName()} instead */ + @Deprecated public byte[] getName() { return name.getName(); } @@ -656,8 +665,9 @@ public class HTableDescriptor implements Comparable { * default is defined in {@link org.apache.hadoop.hbase.regionserver.RegionSplitPolicy} * @param clazz the class name */ - public void setRegionSplitPolicyClassName(String clazz) { + public HTableDescriptor setRegionSplitPolicyClassName(String clazz) { setValue(SPLIT_POLICY, clazz); + return this; } /** @@ -678,14 +688,16 @@ public class HTableDescriptor implements Comparable { * @param name name of table */ @Deprecated - public void setName(byte[] name) { + public HTableDescriptor setName(byte[] name) { setName(TableName.valueOf(name)); + return this; } @Deprecated - public void setName(TableName name) { + public HTableDescriptor setName(TableName name) { this.name = name; setMetaFlags(this.name); + return this; } /** @@ -720,8 +732,9 @@ public class HTableDescriptor implements Comparable { * @param maxFileSize The maximum file size that a store file can grow to * before a split is triggered. */ - public void setMaxFileSize(long maxFileSize) { + public HTableDescriptor setMaxFileSize(long maxFileSize) { setValue(MAX_FILESIZE_KEY, Long.toString(maxFileSize)); + return this; } /** @@ -745,19 +758,21 @@ public class HTableDescriptor implements Comparable { * * @param memstoreFlushSize memory cache flush size for each hregion */ - public void setMemStoreFlushSize(long memstoreFlushSize) { + public HTableDescriptor setMemStoreFlushSize(long memstoreFlushSize) { setValue(MEMSTORE_FLUSHSIZE_KEY, Long.toString(memstoreFlushSize)); + return this; } /** * Adds a column family. * @param family HColumnDescriptor of family to add. */ - public void addFamily(final HColumnDescriptor family) { + public HTableDescriptor addFamily(final HColumnDescriptor family) { if (family.getName() == null || family.getName().length <= 0) { throw new NullPointerException("Family name cannot be null or empty"); } this.families.put(family.getName(), family); + return this; } /** @@ -999,9 +1014,10 @@ public class HTableDescriptor implements Comparable { * Sets the number of replicas per region. * @param regionReplication the replication factor per region */ - public void setRegionReplication(int regionReplication) { + public HTableDescriptor setRegionReplication(int regionReplication) { setValue(REGION_REPLICATION_KEY, new Bytes(Bytes.toBytes(Integer.toString(regionReplication)))); + return this; } /** @@ -1066,8 +1082,9 @@ public class HTableDescriptor implements Comparable { * @param className Full class name. * @throws IOException */ - public void addCoprocessor(String className) throws IOException { + public HTableDescriptor addCoprocessor(String className) throws IOException { addCoprocessor(className, null, Coprocessor.PRIORITY_USER, null); + return this; } @@ -1085,7 +1102,7 @@ public class HTableDescriptor implements Comparable { * @param kvs Arbitrary key-value parameter pairs passed into the coprocessor. * @throws IOException */ - public void addCoprocessor(String className, Path jarFilePath, + public HTableDescriptor addCoprocessor(String className, Path jarFilePath, int priority, final Map kvs) throws IOException { if (hasCoprocessor(className)) { @@ -1132,6 +1149,7 @@ public class HTableDescriptor implements Comparable { "|" + className + "|" + Integer.toString(priority) + "|" + kvString.toString(); setValue(key, value); + return this; } @@ -1291,18 +1309,19 @@ public class HTableDescriptor implements Comparable { }); @Deprecated - public void setOwner(User owner) { - setOwnerString(owner != null ? owner.getShortName() : null); + public HTableDescriptor setOwner(User owner) { + return setOwnerString(owner != null ? owner.getShortName() : null); } // used by admin.rb:alter(table_name,*args) to update owner. @Deprecated - public void setOwnerString(String ownerString) { + public HTableDescriptor setOwnerString(String ownerString) { if (ownerString != null) { setValue(OWNER_KEY, ownerString); } else { remove(OWNER_KEY); } + return this; } @Deprecated @@ -1414,12 +1433,13 @@ public class HTableDescriptor implements Comparable { * @param key Config key. Same as XML config key e.g. hbase.something.or.other. * @param value String value. If null, removes the setting. */ - public void setConfiguration(String key, String value) { + public HTableDescriptor setConfiguration(String key, String value) { if (value == null) { removeConfiguration(key); } else { configuration.put(key, value); } + return this; } /** diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java index d6fcb52..95c2128 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java @@ -61,9 +61,10 @@ public class UnmodifyableHTableDescriptor extends HTableDescriptor { /** * Does NOT add a column family. This object is immutable * @param family HColumnDescriptor of familyto add. + * @return */ @Override - public void addFamily(final HColumnDescriptor family) { + public HTableDescriptor addFamily(final HColumnDescriptor family) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } @@ -78,42 +79,47 @@ public class UnmodifyableHTableDescriptor extends HTableDescriptor { } /** + * @return * @see org.apache.hadoop.hbase.HTableDescriptor#setReadOnly(boolean) */ @Override - public void setReadOnly(boolean readOnly) { + public HTableDescriptor setReadOnly(boolean readOnly) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } /** + * @return * @see org.apache.hadoop.hbase.HTableDescriptor#setValue(byte[], byte[]) */ @Override - public void setValue(byte[] key, byte[] value) { + public HTableDescriptor setValue(byte[] key, byte[] value) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } /** + * @return * @see org.apache.hadoop.hbase.HTableDescriptor#setValue(java.lang.String, java.lang.String) */ @Override - public void setValue(String key, String value) { + public HTableDescriptor setValue(String key, String value) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } /** + * @return * @see org.apache.hadoop.hbase.HTableDescriptor#setMaxFileSize(long) */ @Override - public void setMaxFileSize(long maxFileSize) { + public HTableDescriptor setMaxFileSize(long maxFileSize) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } /** + * @return * @see org.apache.hadoop.hbase.HTableDescriptor#setMemStoreFlushSize(long) */ @Override - public void setMemStoreFlushSize(long memstoreFlushSize) { + public HTableDescriptor setMemStoreFlushSize(long memstoreFlushSize) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } diff --git hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java new file mode 100644 index 0000000..98a68d6 --- /dev/null +++ hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java @@ -0,0 +1,116 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.regionserver.BloomType; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.BuilderStyleTest; +import org.junit.experimental.categories.Category; +import org.junit.Test; + +/** Tests the HColumnDescriptor with appropriate arguments */ +@Category({MiscTests.class, SmallTests.class}) +public class TestHColumnDescriptor { + @Test + public void testPb() throws DeserializationException { + HColumnDescriptor hcd = new HColumnDescriptor( + HTableDescriptor.META_TABLEDESC.getColumnFamilies()[0]); + final int v = 123; + hcd.setBlocksize(v); + hcd.setTimeToLive(v); + hcd.setBlockCacheEnabled(!HColumnDescriptor.DEFAULT_BLOCKCACHE); + hcd.setValue("a", "b"); + hcd.setMaxVersions(v); + assertEquals(v, hcd.getMaxVersions()); + hcd.setMinVersions(v); + assertEquals(v, hcd.getMinVersions()); + hcd.setKeepDeletedCells(!HColumnDescriptor.DEFAULT_KEEP_DELETED); + hcd.setInMemory(!HColumnDescriptor.DEFAULT_IN_MEMORY); + boolean inmemory = hcd.isInMemory(); + hcd.setScope(v); + hcd.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF); + hcd.setBloomFilterType(BloomType.ROW); + hcd.setCompressionType(Algorithm.SNAPPY); + + + byte [] bytes = hcd.toByteArray(); + HColumnDescriptor deserializedHcd = HColumnDescriptor.parseFrom(bytes); + assertTrue(hcd.equals(deserializedHcd)); + assertEquals(v, hcd.getBlocksize()); + assertEquals(v, hcd.getTimeToLive()); + assertEquals(hcd.getValue("a"), deserializedHcd.getValue("a")); + assertEquals(hcd.getMaxVersions(), deserializedHcd.getMaxVersions()); + assertEquals(hcd.getMinVersions(), deserializedHcd.getMinVersions()); + assertEquals(hcd.getKeepDeletedCells(), deserializedHcd.getKeepDeletedCells()); + assertEquals(inmemory, deserializedHcd.isInMemory()); + assertEquals(hcd.getScope(), deserializedHcd.getScope()); + assertTrue(deserializedHcd.getCompressionType().equals(Compression.Algorithm.SNAPPY)); + assertTrue(deserializedHcd.getDataBlockEncoding().equals(DataBlockEncoding.FAST_DIFF)); + assertTrue(deserializedHcd.getBloomFilterType().equals(BloomType.ROW)); + } + + @Test + /** Tests HColumnDescriptor with empty familyName*/ + public void testHColumnDescriptorShouldThrowIAEWhenFamiliyNameEmpty() + throws Exception { + try { + new HColumnDescriptor("".getBytes()); + } catch (IllegalArgumentException e) { + assertEquals("Family name can not be empty", e.getLocalizedMessage()); + } + } + + /** + * Test that we add and remove strings from configuration properly. + */ + @Test + public void testAddGetRemoveConfiguration() throws Exception { + HColumnDescriptor desc = new HColumnDescriptor("foo"); + String key = "Some"; + String value = "value"; + desc.setConfiguration(key, value); + assertEquals(value, desc.getConfigurationValue(key)); + desc.removeConfiguration(key); + assertEquals(null, desc.getConfigurationValue(key)); + } + + @Test + public void testClassMethodsAreBuilderStyle() { + /* HColumnDescriptor should have a builder style setup where setXXX/addXXX methods + * can be chainable together: + * . For example: + * HColumnDescriptor hcd + * = new HColumnDescriptor() + * .setFoo(foo) + * .setBar(bar) + * .setBuz(buz) + * + * This test ensures that all methods starting with "set" returns the declaring object + */ + + BuilderStyleTest.assertClassesAreBuilderStyle(HColumnDescriptor.class); + } +} diff --git hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java new file mode 100644 index 0000000..0fa24b4 --- /dev/null +++ hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java @@ -0,0 +1,228 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.IOException; +import java.util.regex.Pattern; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.client.Durability; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.BuilderStyleTest; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +/** + * Test setting values in the descriptor + */ +@Category({MiscTests.class, SmallTests.class}) +public class TestHTableDescriptor { + final static Log LOG = LogFactory.getLog(TestHTableDescriptor.class); + + @Test + public void testPb() throws DeserializationException, IOException { + HTableDescriptor htd = new HTableDescriptor(HTableDescriptor.META_TABLEDESC); + final int v = 123; + htd.setMaxFileSize(v); + htd.setDurability(Durability.ASYNC_WAL); + htd.setReadOnly(true); + htd.setRegionReplication(2); + byte [] bytes = htd.toByteArray(); + HTableDescriptor deserializedHtd = HTableDescriptor.parseFrom(bytes); + assertEquals(htd, deserializedHtd); + assertEquals(v, deserializedHtd.getMaxFileSize()); + assertTrue(deserializedHtd.isReadOnly()); + assertEquals(Durability.ASYNC_WAL, deserializedHtd.getDurability()); + assertEquals(deserializedHtd.getRegionReplication(), 2); + } + + /** + * Test cps in the table description + * @throws Exception + */ + @Test + public void testGetSetRemoveCP() throws Exception { + HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table")); + // simple CP + String className = "org.apache.hadoop.hbase.coprocessor.BaseRegionObserver"; + // add and check that it is present + desc.addCoprocessor(className); + assertTrue(desc.hasCoprocessor(className)); + // remove it and check that it is gone + desc.removeCoprocessor(className); + assertFalse(desc.hasCoprocessor(className)); + } + + /** + * Test cps in the table description + * @throws Exception + */ + @Test + public void testSetListRemoveCP() throws Exception { + HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("testGetSetRemoveCP")); + // simple CP + String className1 = "org.apache.hadoop.hbase.coprocessor.BaseRegionObserver"; + String className2 = "org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver"; + // Check that any coprocessor is present. + assertTrue(desc.getCoprocessors().size() == 0); + + // Add the 1 coprocessor and check if present. + desc.addCoprocessor(className1); + assertTrue(desc.getCoprocessors().size() == 1); + assertTrue(desc.getCoprocessors().contains(className1)); + + // Add the 2nd coprocessor and check if present. + // remove it and check that it is gone + desc.addCoprocessor(className2); + assertTrue(desc.getCoprocessors().size() == 2); + assertTrue(desc.getCoprocessors().contains(className2)); + + // Remove one and check + desc.removeCoprocessor(className1); + assertTrue(desc.getCoprocessors().size() == 1); + assertFalse(desc.getCoprocessors().contains(className1)); + assertTrue(desc.getCoprocessors().contains(className2)); + + // Remove the last and check + desc.removeCoprocessor(className2); + assertTrue(desc.getCoprocessors().size() == 0); + assertFalse(desc.getCoprocessors().contains(className1)); + assertFalse(desc.getCoprocessors().contains(className2)); + } + + /** + * Test that we add and remove strings from settings properly. + * @throws Exception + */ + @Test + public void testRemoveString() throws Exception { + HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table")); + String key = "Some"; + String value = "value"; + desc.setValue(key, value); + assertEquals(value, desc.getValue(key)); + desc.remove(key); + assertEquals(null, desc.getValue(key)); + } + + String legalTableNames[] = { "foo", "with-dash_under.dot", "_under_start_ok", + "with-dash.with_underscore", "02-01-2012.my_table_01-02", "xyz._mytable_", "9_9_0.table_02" + , "dot1.dot2.table", "new.-mytable", "with-dash.with.dot", "legal..t2", "legal..legal.t2", + "trailingdots..", "trailing.dots...", "ns:mytable", "ns:_mytable_", "ns:my_table_01-02"}; + String illegalTableNames[] = { ".dot_start_illegal", "-dash_start_illegal", "spaces not ok", + "-dash-.start_illegal", "new.table with space", "01 .table", "ns:-illegaldash", + "new:.illegaldot", "new:illegalcolon1:", "new:illegalcolon1:2"}; + + @Test + public void testLegalHTableNames() { + for (String tn : legalTableNames) { + TableName.isLegalFullyQualifiedTableName(Bytes.toBytes(tn)); + } + } + + @Test + public void testIllegalHTableNames() { + for (String tn : illegalTableNames) { + try { + TableName.isLegalFullyQualifiedTableName(Bytes.toBytes(tn)); + fail("invalid tablename " + tn + " should have failed"); + } catch (Exception e) { + // expected + } + } + } + + @Test + public void testLegalHTableNamesRegex() { + for (String tn : legalTableNames) { + TableName tName = TableName.valueOf(tn); + assertTrue("Testing: '" + tn + "'", Pattern.matches(TableName.VALID_USER_TABLE_REGEX, + tName.getNameAsString())); + } + } + + @Test + public void testIllegalHTableNamesRegex() { + for (String tn : illegalTableNames) { + LOG.info("Testing: '" + tn + "'"); + assertFalse(Pattern.matches(TableName.VALID_USER_TABLE_REGEX, tn)); + } + } + + /** + * Test default value handling for maxFileSize + */ + @Test + public void testGetMaxFileSize() { + HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table")); + assertEquals(-1, desc.getMaxFileSize()); + desc.setMaxFileSize(1111L); + assertEquals(1111L, desc.getMaxFileSize()); + } + + /** + * Test default value handling for memStoreFlushSize + */ + @Test + public void testGetMemStoreFlushSize() { + HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table")); + assertEquals(-1, desc.getMemStoreFlushSize()); + desc.setMemStoreFlushSize(1111L); + assertEquals(1111L, desc.getMemStoreFlushSize()); + } + + /** + * Test that we add and remove strings from configuration properly. + */ + @Test + public void testAddGetRemoveConfiguration() throws Exception { + HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table")); + String key = "Some"; + String value = "value"; + desc.setConfiguration(key, value); + assertEquals(value, desc.getConfigurationValue(key)); + desc.removeConfiguration(key); + assertEquals(null, desc.getConfigurationValue(key)); + } + + @Test + public void testClassMethodsAreBuilderStyle() { + /* HTableDescriptor should have a builder style setup where setXXX/addXXX methods + * can be chainable together: + * . For example: + * HTableDescriptor htd + * = new HTableDescriptor() + * .setFoo(foo) + * .setBar(bar) + * .setBuz(buz) + * + * This test ensures that all methods starting with "set" returns the declaring object + */ + + BuilderStyleTest.assertClassesAreBuilderStyle(HTableDescriptor.class); + } +} diff --git hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java index d85cffc..ee65ede 100644 --- hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java +++ hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.testclassification.ClientTests; @@ -32,7 +30,6 @@ import org.junit.Assert; import org.junit.Test; import java.io.IOException; -import java.lang.reflect.Method; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.HashMap; @@ -64,8 +61,8 @@ import org.apache.hadoop.hbase.filter.SkipFilter; import org.apache.hadoop.hbase.filter.TimestampsFilter; import org.apache.hadoop.hbase.filter.ValueFilter; import org.apache.hadoop.hbase.filter.WhileMatchFilter; +import org.apache.hadoop.hbase.util.BuilderStyleTest; import org.apache.hadoop.hbase.util.Bytes; - import org.codehaus.jackson.map.ObjectMapper; import org.junit.experimental.categories.Category; @@ -433,7 +430,7 @@ public class TestOperation { * .setBar(bar) * .setBuz(buz) * - * This test ensures that all methods starting with "set" returns an Operation object + * This test ensures that all methods starting with "set" returns the declaring object */ // TODO: We should ensure all subclasses of Operation is checked. @@ -449,22 +446,7 @@ public class TestOperation { Get.class, Scan.class}; - for (Class clazz : classes) { - System.out.println("Checking " + clazz); - Method[] methods = clazz.getDeclaredMethods(); - for (Method method : methods) { - Class ret = method.getReturnType(); - if (method.getName().startsWith("set") || method.getName().startsWith("add")) { - System.out.println(" " + clazz.getSimpleName() + "." + method.getName() + "() : " - + ret.getSimpleName()); - String errorMsg = "All setXXX() methods in " + clazz.getSimpleName() + " should return a " - + clazz.getSimpleName() + " object in builder style. Offending method:" - + method.getName(); - assertTrue(errorMsg, Operation.class.isAssignableFrom(ret) - || Attributes.class.isAssignableFrom(ret)); // for setAttributes() - } - } - } + BuilderStyleTest.assertClassesAreBuilderStyle(classes); } } diff --git hbase-client/src/test/java/org/apache/hadoop/hbase/util/BuilderStyleTest.java hbase-client/src/test/java/org/apache/hadoop/hbase/util/BuilderStyleTest.java new file mode 100644 index 0000000..d2d0a53 --- /dev/null +++ hbase-client/src/test/java/org/apache/hadoop/hbase/util/BuilderStyleTest.java @@ -0,0 +1,104 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.util; + +import static org.junit.Assert.assertTrue; + +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * Utility class to check whether a given class conforms to builder-style: + * Foo foo = + * new Foo() + * .setBar(bar) + * .setBaz(baz) + */ +public class BuilderStyleTest { + + /* + * If a base class Foo declares a method setFoo() returning Foo, then the subclass should + * re-declare the methods overriding the return class with the subclass: + * + * class Foo { + * Foo setFoo() { + * .. + * return this; + * } + * } + * + * class Bar { + * Bar setFoo() { + * return (Bar) super.setFoo(); + * } + * } + * + */ + @SuppressWarnings("rawtypes") + public static void assertClassesAreBuilderStyle(Class... classes) { + for (Class clazz : classes) { + System.out.println("Checking " + clazz); + Method[] methods = clazz.getDeclaredMethods(); + Map> methodsBySignature = new HashMap<>(); + for (Method method : methods) { + if (!Modifier.isPublic(method.getModifiers())) { + continue; // only public classes + } + Class ret = method.getReturnType(); + if (method.getName().startsWith("set") || method.getName().startsWith("add")) { + System.out.println(" " + clazz.getSimpleName() + "." + method.getName() + "() : " + + ret.getSimpleName()); + + // because of subclass / super class method overrides, we group the methods fitting the + // same signatures because we get two method definitions from java reflection: + // Mutation.setDurability() : Mutation + // Delete.setDurability() : Mutation + // Delete.setDurability() : Delete + String sig = method.getName(); + for (Class param : method.getParameterTypes()) { + sig += param.getName(); + } + Set sigMethods = methodsBySignature.get(sig); + if (sigMethods == null) { + sigMethods = new HashSet(); + methodsBySignature.put(sig, sigMethods); + } + sigMethods.add(method); + } + } + // now iterate over the methods by signatures + for (Map.Entry> e : methodsBySignature.entrySet()) { + // at least one of method sigs should return the declaring class + boolean found = false; + for (Method m : e.getValue()) { + found = clazz.isAssignableFrom(m.getReturnType()); + if (found) break; + } + String errorMsg = "All setXXX()|addXX() methods in " + clazz.getSimpleName() + + " should return a " + clazz.getSimpleName() + " object in builder style. " + + "Offending method:" + e.getValue().iterator().next().getName(); + assertTrue(errorMsg, found); + } + } + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java deleted file mode 100644 index 132004d..0000000 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java +++ /dev/null @@ -1,99 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hadoop.hbase.io.compress.Compression; -import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; -import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.regionserver.BloomType; -import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.experimental.categories.Category; - -import org.junit.Test; - -/** Tests the HColumnDescriptor with appropriate arguments */ -@Category({MiscTests.class, SmallTests.class}) -public class TestHColumnDescriptor { - @Test - public void testPb() throws DeserializationException { - HColumnDescriptor hcd = new HColumnDescriptor( - HTableDescriptor.META_TABLEDESC.getColumnFamilies()[0]); - final int v = 123; - hcd.setBlocksize(v); - hcd.setTimeToLive(v); - hcd.setBlockCacheEnabled(!HColumnDescriptor.DEFAULT_BLOCKCACHE); - hcd.setValue("a", "b"); - hcd.setMaxVersions(v); - assertEquals(v, hcd.getMaxVersions()); - hcd.setMinVersions(v); - assertEquals(v, hcd.getMinVersions()); - hcd.setKeepDeletedCells(!HColumnDescriptor.DEFAULT_KEEP_DELETED); - hcd.setInMemory(!HColumnDescriptor.DEFAULT_IN_MEMORY); - boolean inmemory = hcd.isInMemory(); - hcd.setScope(v); - hcd.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF); - hcd.setBloomFilterType(BloomType.ROW); - hcd.setCompressionType(Algorithm.SNAPPY); - - - byte [] bytes = hcd.toByteArray(); - HColumnDescriptor deserializedHcd = HColumnDescriptor.parseFrom(bytes); - assertTrue(hcd.equals(deserializedHcd)); - assertEquals(v, hcd.getBlocksize()); - assertEquals(v, hcd.getTimeToLive()); - assertEquals(hcd.getValue("a"), deserializedHcd.getValue("a")); - assertEquals(hcd.getMaxVersions(), deserializedHcd.getMaxVersions()); - assertEquals(hcd.getMinVersions(), deserializedHcd.getMinVersions()); - assertEquals(hcd.getKeepDeletedCells(), deserializedHcd.getKeepDeletedCells()); - assertEquals(inmemory, deserializedHcd.isInMemory()); - assertEquals(hcd.getScope(), deserializedHcd.getScope()); - assertTrue(deserializedHcd.getCompressionType().equals(Compression.Algorithm.SNAPPY)); - assertTrue(deserializedHcd.getDataBlockEncoding().equals(DataBlockEncoding.FAST_DIFF)); - assertTrue(deserializedHcd.getBloomFilterType().equals(BloomType.ROW)); - } - - @Test - /** Tests HColumnDescriptor with empty familyName*/ - public void testHColumnDescriptorShouldThrowIAEWhenFamiliyNameEmpty() - throws Exception { - try { - new HColumnDescriptor("".getBytes()); - } catch (IllegalArgumentException e) { - assertEquals("Family name can not be empty", e.getLocalizedMessage()); - } - } - - /** - * Test that we add and remove strings from configuration properly. - */ - @Test - public void testAddGetRemoveConfiguration() throws Exception { - HColumnDescriptor desc = new HColumnDescriptor("foo"); - String key = "Some"; - String value = "value"; - desc.setConfiguration(key, value); - assertEquals(value, desc.getConfigurationValue(key)); - desc.removeConfiguration(key); - assertEquals(null, desc.getConfigurationValue(key)); - } -} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java deleted file mode 100644 index c52eecb..0000000 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java +++ /dev/null @@ -1,212 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.io.IOException; -import java.util.regex.Pattern; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.client.Durability; -import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; -import org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver; -import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -/** - * Test setting values in the descriptor - */ -@Category({MiscTests.class, SmallTests.class}) -public class TestHTableDescriptor { - final static Log LOG = LogFactory.getLog(TestHTableDescriptor.class); - - @Test - public void testPb() throws DeserializationException, IOException { - HTableDescriptor htd = new HTableDescriptor(HTableDescriptor.META_TABLEDESC); - final int v = 123; - htd.setMaxFileSize(v); - htd.setDurability(Durability.ASYNC_WAL); - htd.setReadOnly(true); - htd.setRegionReplication(2); - byte [] bytes = htd.toByteArray(); - HTableDescriptor deserializedHtd = HTableDescriptor.parseFrom(bytes); - assertEquals(htd, deserializedHtd); - assertEquals(v, deserializedHtd.getMaxFileSize()); - assertTrue(deserializedHtd.isReadOnly()); - assertEquals(Durability.ASYNC_WAL, deserializedHtd.getDurability()); - assertEquals(deserializedHtd.getRegionReplication(), 2); - } - - /** - * Test cps in the table description - * @throws Exception - */ - @Test - public void testGetSetRemoveCP() throws Exception { - HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table")); - // simple CP - String className = BaseRegionObserver.class.getName(); - // add and check that it is present - desc.addCoprocessor(className); - assertTrue(desc.hasCoprocessor(className)); - // remove it and check that it is gone - desc.removeCoprocessor(className); - assertFalse(desc.hasCoprocessor(className)); - } - - /** - * Test cps in the table description - * @throws Exception - */ - @Test - public void testSetListRemoveCP() throws Exception { - HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("testGetSetRemoveCP")); - // simple CP - String className1 = BaseRegionObserver.class.getName(); - String className2 = SampleRegionWALObserver.class.getName(); - // Check that any coprocessor is present. - assertTrue(desc.getCoprocessors().size() == 0); - - // Add the 1 coprocessor and check if present. - desc.addCoprocessor(className1); - assertTrue(desc.getCoprocessors().size() == 1); - assertTrue(desc.getCoprocessors().contains(className1)); - - // Add the 2nd coprocessor and check if present. - // remove it and check that it is gone - desc.addCoprocessor(className2); - assertTrue(desc.getCoprocessors().size() == 2); - assertTrue(desc.getCoprocessors().contains(className2)); - - // Remove one and check - desc.removeCoprocessor(className1); - assertTrue(desc.getCoprocessors().size() == 1); - assertFalse(desc.getCoprocessors().contains(className1)); - assertTrue(desc.getCoprocessors().contains(className2)); - - // Remove the last and check - desc.removeCoprocessor(className2); - assertTrue(desc.getCoprocessors().size() == 0); - assertFalse(desc.getCoprocessors().contains(className1)); - assertFalse(desc.getCoprocessors().contains(className2)); - } - - /** - * Test that we add and remove strings from settings properly. - * @throws Exception - */ - @Test - public void testRemoveString() throws Exception { - HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table")); - String key = "Some"; - String value = "value"; - desc.setValue(key, value); - assertEquals(value, desc.getValue(key)); - desc.remove(key); - assertEquals(null, desc.getValue(key)); - } - - String legalTableNames[] = { "foo", "with-dash_under.dot", "_under_start_ok", - "with-dash.with_underscore", "02-01-2012.my_table_01-02", "xyz._mytable_", "9_9_0.table_02" - , "dot1.dot2.table", "new.-mytable", "with-dash.with.dot", "legal..t2", "legal..legal.t2", - "trailingdots..", "trailing.dots...", "ns:mytable", "ns:_mytable_", "ns:my_table_01-02"}; - String illegalTableNames[] = { ".dot_start_illegal", "-dash_start_illegal", "spaces not ok", - "-dash-.start_illegal", "new.table with space", "01 .table", "ns:-illegaldash", - "new:.illegaldot", "new:illegalcolon1:", "new:illegalcolon1:2"}; - - @Test - public void testLegalHTableNames() { - for (String tn : legalTableNames) { - TableName.isLegalFullyQualifiedTableName(Bytes.toBytes(tn)); - } - } - - @Test - public void testIllegalHTableNames() { - for (String tn : illegalTableNames) { - try { - TableName.isLegalFullyQualifiedTableName(Bytes.toBytes(tn)); - fail("invalid tablename " + tn + " should have failed"); - } catch (Exception e) { - // expected - } - } - } - - @Test - public void testLegalHTableNamesRegex() { - for (String tn : legalTableNames) { - TableName tName = TableName.valueOf(tn); - assertTrue("Testing: '" + tn + "'", Pattern.matches(TableName.VALID_USER_TABLE_REGEX, - tName.getNameAsString())); - } - } - - @Test - public void testIllegalHTableNamesRegex() { - for (String tn : illegalTableNames) { - LOG.info("Testing: '" + tn + "'"); - assertFalse(Pattern.matches(TableName.VALID_USER_TABLE_REGEX, tn)); - } - } - - /** - * Test default value handling for maxFileSize - */ - @Test - public void testGetMaxFileSize() { - HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table")); - assertEquals(-1, desc.getMaxFileSize()); - desc.setMaxFileSize(1111L); - assertEquals(1111L, desc.getMaxFileSize()); - } - - /** - * Test default value handling for memStoreFlushSize - */ - @Test - public void testGetMemStoreFlushSize() { - HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table")); - assertEquals(-1, desc.getMemStoreFlushSize()); - desc.setMemStoreFlushSize(1111L); - assertEquals(1111L, desc.getMemStoreFlushSize()); - } - - /** - * Test that we add and remove strings from configuration properly. - */ - @Test - public void testAddGetRemoveConfiguration() throws Exception { - HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table")); - String key = "Some"; - String value = "value"; - desc.setConfiguration(key, value); - assertEquals(value, desc.getConfigurationValue(key)); - desc.removeConfiguration(key); - assertEquals(null, desc.getConfigurationValue(key)); - } -}