diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 60281a8..58c7d4c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase; +import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -25,7 +26,6 @@ import java.util.regex.Pattern; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hbase.util.Bytes; /** * HConstants holds a bunch of HBase-related constants @@ -33,6 +33,20 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public @InterfaceStability.Stable public final class HConstants { + /** When we encode strings, we always specify UTF8 encoding */ + public static final String UTF8_ENCODING = "UTF-8"; + + /** When we encode strings, we always specify UTF8 encoding */ + public static final Charset UTF8_CHARSET = Charset.forName(UTF8_ENCODING); + + private static byte[] toBytes(String target) { + return target.getBytes(UTF8_CHARSET); + } + + private static String toString(byte[] target) { + return new String(target, UTF8_CHARSET); + } + /** * Status codes used for return values of bulk operations. */ @@ -306,10 +320,10 @@ public final class HConstants { // should go down. /** The root table's name.*/ - public static final byte [] ROOT_TABLE_NAME = Bytes.toBytes("-ROOT-"); + public static final byte [] ROOT_TABLE_NAME = toBytes("-ROOT-"); /** The META table's name. */ - public static final byte [] META_TABLE_NAME = Bytes.toBytes(".META."); + public static final byte [] META_TABLE_NAME = toBytes(".META."); /** delimiter used between portions of a region name */ public static final int META_ROW_DELIMITER = ','; @@ -318,33 +332,33 @@ public final class HConstants { public static final String CATALOG_FAMILY_STR = "info"; /** The catalog family */ - public static final byte [] CATALOG_FAMILY = Bytes.toBytes(CATALOG_FAMILY_STR); + public static final byte [] CATALOG_FAMILY = toBytes(CATALOG_FAMILY_STR); /** The RegionInfo qualifier as a string */ public static final String REGIONINFO_QUALIFIER_STR = "regioninfo"; /** The regioninfo column qualifier */ public static final byte [] REGIONINFO_QUALIFIER = - Bytes.toBytes(REGIONINFO_QUALIFIER_STR); + toBytes(REGIONINFO_QUALIFIER_STR); /** The server column qualifier */ - public static final byte [] SERVER_QUALIFIER = Bytes.toBytes("server"); + public static final byte [] SERVER_QUALIFIER = toBytes("server"); /** The startcode column qualifier */ - public static final byte [] STARTCODE_QUALIFIER = Bytes.toBytes("serverstartcode"); + public static final byte [] STARTCODE_QUALIFIER = toBytes("serverstartcode"); /** The lower-half split region column qualifier */ - public static final byte [] SPLITA_QUALIFIER = Bytes.toBytes("splitA"); + public static final byte [] SPLITA_QUALIFIER = toBytes("splitA"); /** The upper-half split region column qualifier */ - public static final byte [] SPLITB_QUALIFIER = Bytes.toBytes("splitB"); + public static final byte [] SPLITB_QUALIFIER = toBytes("splitB"); /** * The meta table version column qualifier. * We keep current version of the meta table in this column in -ROOT- * table: i.e. in the 'info:v' column. */ - public static final byte [] META_VERSION_QUALIFIER = Bytes.toBytes("v"); + public static final byte [] META_VERSION_QUALIFIER = toBytes("v"); /** * The current version of the meta table. @@ -386,9 +400,6 @@ public final class HConstants { */ public static final int MAX_ROW_LENGTH = Short.MAX_VALUE; - /** When we encode strings, we always specify UTF8 encoding */ - public static final String UTF8_ENCODING = "UTF-8"; - /** * Timestamp to use when we want to refer to the latest cell. * This is the timestamp sent by clients when no timestamp is specified on @@ -404,7 +415,17 @@ public final class HConstants { /** * LATEST_TIMESTAMP in bytes form */ - public static final byte [] LATEST_TIMESTAMP_BYTES = Bytes.toBytes(LATEST_TIMESTAMP); + public static final byte [] LATEST_TIMESTAMP_BYTES = { + // big-endian + (byte) (LATEST_TIMESTAMP >>> 56), + (byte) (LATEST_TIMESTAMP >>> 48), + (byte) (LATEST_TIMESTAMP >>> 40), + (byte) (LATEST_TIMESTAMP >>> 32), + (byte) (LATEST_TIMESTAMP >>> 24), + (byte) (LATEST_TIMESTAMP >>> 16), + (byte) (LATEST_TIMESTAMP >>> 8), + (byte) LATEST_TIMESTAMP, + }; /** * Define for 'return-all-versions'. @@ -689,7 +710,7 @@ public final class HConstants { * The byte array represents for NO_NEXT_INDEXED_KEY; * The actual value is irrelevant because this is always compared by reference. */ - public static final byte [] NO_NEXT_INDEXED_KEY = Bytes.toBytes("NO_NEXT_INDEXED_KEY"); + public static final byte [] NO_NEXT_INDEXED_KEY = toBytes("NO_NEXT_INDEXED_KEY"); /** delimiter used between portions of a region name */ public static final int DELIMITER = ','; @@ -708,7 +729,7 @@ public final class HConstants { public static final List HBASE_NON_USER_TABLE_DIRS = new ArrayList( Arrays.asList(new String[] { HREGION_LOGDIR_NAME, HREGION_OLDLOGDIR_NAME, CORRUPT_DIR_NAME, - Bytes.toString(META_TABLE_NAME), Bytes.toString(ROOT_TABLE_NAME), SPLIT_LOGDIR_NAME, + toString(META_TABLE_NAME), toString(ROOT_TABLE_NAME), SPLIT_LOGDIR_NAME, HBCK_SIDELINEDIR_NAME, HFILE_ARCHIVE_DIRECTORY })); private HConstants() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index 20f2f97..77e5f8a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.util; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import java.io.UnsupportedEncodingException; import java.lang.reflect.Field; import java.math.BigDecimal; import java.math.BigInteger; @@ -279,8 +278,7 @@ public class Bytes { /** * This method will convert utf8 encoded bytes into a string. If - * an UnsupportedEncodingException occurs, this method will eat it - * and return null instead. + * the given byte array is null, this method will return null. * * @param b Presumed UTF-8 encoded byte array. * @param off offset into array @@ -294,12 +292,7 @@ public class Bytes { if (len == 0) { return ""; } - try { - return new String(b, off, len, HConstants.UTF8_ENCODING); - } catch (UnsupportedEncodingException e) { - LOG.error("UTF-8 not supported?", e); - return null; - } + return new String(b, off, len, HConstants.UTF8_CHARSET); } /** @@ -414,12 +407,7 @@ public class Bytes { * @return the byte array */ public static byte[] toBytes(String s) { - try { - return s.getBytes(HConstants.UTF8_ENCODING); - } catch (UnsupportedEncodingException e) { - LOG.error("UTF-8 not supported?", e); - return null; - } + return s.getBytes(HConstants.UTF8_CHARSET); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java index 740423f..301303f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java @@ -70,7 +70,7 @@ public class RegexStringComparator extends ByteArrayComparable { private static final Log LOG = LogFactory.getLog(RegexStringComparator.class); - private Charset charset = Charset.forName(HConstants.UTF8_ENCODING); + private Charset charset = HConstants.UTF8_CHARSET; private Pattern pattern; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java index 4738615..9380710 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java @@ -19,11 +19,9 @@ package org.apache.hadoop.hbase.mapred; import java.io.IOException; -import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Map; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.client.Result; @@ -150,11 +148,7 @@ implements TableMap { if(i > 0) { sb.append(" "); } - try { - sb.append(new String(vals[i], HConstants.UTF8_ENCODING)); - } catch (UnsupportedEncodingException e) { - throw new RuntimeException(e); - } + sb.append(Bytes.toString(vals[i])); } return new ImmutableBytesWritable(Bytes.toBytes(sb.toString())); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java index e9ab390..a637f55 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import java.io.UnsupportedEncodingException; import java.util.ArrayList; import org.apache.hadoop.classification.InterfaceAudience; @@ -27,7 +26,6 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -143,11 +141,7 @@ extends TableMapper implements Configurable { if(i > 0) { sb.append(" "); } - try { - sb.append(new String(vals[i], HConstants.UTF8_ENCODING)); - } catch (UnsupportedEncodingException e) { - throw new RuntimeException(e); - } + sb.append(Bytes.toString(vals[i])); } return new ImmutableBytesWritable(Bytes.toBytes(sb.toString())); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java index b33bd4c..7888aba 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.IOException; -import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -47,15 +46,7 @@ import org.apache.hadoop.hbase.util.Bytes; public class HLogUtil { static final Log LOG = LogFactory.getLog(HLogUtil.class); - static byte[] COMPLETE_CACHE_FLUSH; - static { - try { - COMPLETE_CACHE_FLUSH = "HBASE::CACHEFLUSH" - .getBytes(HConstants.UTF8_ENCODING); - } catch (UnsupportedEncodingException e) { - assert (false); - } - } + static final byte[] COMPLETE_CACHE_FLUSH = Bytes.toBytes("HBASE::CACHEFLUSH"); /** * @param family diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java index 59c9bef..ffdf6bb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.filter; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Before; @@ -30,7 +29,6 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; -import java.io.UnsupportedEncodingException; import static org.junit.Assert.*; @@ -40,15 +38,7 @@ public class TestPrefixFilter { static final char FIRST_CHAR = 'a'; static final char LAST_CHAR = 'e'; static final String HOST_PREFIX = "org.apache.site-"; - static byte [] GOOD_BYTES = null; - - static { - try { - GOOD_BYTES = "abc".getBytes(HConstants.UTF8_ENCODING); - } catch (UnsupportedEncodingException e) { - fail(); - } - } + static final byte [] GOOD_BYTES = Bytes.toBytes("abc"); @Before public void setUp() throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java index 316a67a..415bb54 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java @@ -115,8 +115,7 @@ public class TestTableMapReduce { // Get the original value and reverse it - String originalValue = new String(value.getValue(INPUT_FAMILY, null), - HConstants.UTF8_ENCODING); + String originalValue = Bytes.toString(value.getValue(INPUT_FAMILY, null)); StringBuilder newValue = new StringBuilder(originalValue); newValue.reverse(); @@ -233,7 +232,7 @@ public class TestTableMapReduce { throw new NullPointerException(Bytes.toString(r.getRow()) + ": first value is null"); } - first = new String(firstValue, HConstants.UTF8_ENCODING); + first = Bytes.toString(firstValue); String second = ""; if (secondValue == null) { @@ -244,7 +243,7 @@ public class TestTableMapReduce { for (int i = 0, j = secondValue.length - 1; j >= 0; j--, i++) { secondReversed[i] = secondValue[j]; } - second = new String(secondReversed, HConstants.UTF8_ENCODING); + second = Bytes.toString(secondReversed); if (first.compareTo(second) != 0) { if (LOG.isDebugEnabled()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java index 6b44d72..76c8cdc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java @@ -103,8 +103,7 @@ public class TestMultithreadedTableMapper { Bytes.toString(INPUT_FAMILY) + "'."); } // Get the original value and reverse it - String originalValue = new String(value.getValue(INPUT_FAMILY, null), - HConstants.UTF8_ENCODING); + String originalValue = Bytes.toString(value.getValue(INPUT_FAMILY, null)); StringBuilder newValue = new StringBuilder(originalValue); newValue.reverse(); // Now set the value to be collected @@ -229,7 +228,7 @@ public class TestMultithreadedTableMapper { throw new NullPointerException(Bytes.toString(r.getRow()) + ": first value is null"); } - first = new String(firstValue, HConstants.UTF8_ENCODING); + first = Bytes.toString(firstValue); String second = ""; if (secondValue == null) { throw new NullPointerException(Bytes.toString(r.getRow()) + @@ -239,7 +238,7 @@ public class TestMultithreadedTableMapper { for (int i = 0, j = secondValue.length - 1; j >= 0; j--, i++) { secondReversed[i] = secondValue[j]; } - second = new String(secondReversed, HConstants.UTF8_ENCODING); + second = Bytes.toString(secondReversed); if (first.compareTo(second) != 0) { if (LOG.isDebugEnabled()) { LOG.debug("second key is not the reverse of first. row=" + diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java index ed87668..73b9900 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java @@ -105,8 +105,7 @@ public class TestTableMapReduce { } // Get the original value and reverse it - String originalValue = new String(value.getValue(INPUT_FAMILY, null), - HConstants.UTF8_ENCODING); + String originalValue = Bytes.toString(value.getValue(INPUT_FAMILY, null)); StringBuilder newValue = new StringBuilder(originalValue); newValue.reverse(); // Now set the value to be collected @@ -232,7 +231,7 @@ public class TestTableMapReduce { throw new NullPointerException(Bytes.toString(r.getRow()) + ": first value is null"); } - first = new String(firstValue, HConstants.UTF8_ENCODING); + first = Bytes.toString(firstValue); String second = ""; if (secondValue == null) { @@ -243,7 +242,7 @@ public class TestTableMapReduce { for (int i = 0, j = secondValue.length - 1; j >= 0; j--, i++) { secondReversed[i] = secondValue[j]; } - second = new String(secondReversed, HConstants.UTF8_ENCODING); + second = Bytes.toString(secondReversed); if (first.compareTo(second) != 0) { if (LOG.isDebugEnabled()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java index 0fd274a..ed5f173 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java @@ -79,7 +79,7 @@ public class TestCompaction extends HBaseTestCase { private static final long MAX_FILES_TO_COMPACT = 10; /** constructor */ - public TestCompaction() throws Exception { + public TestCompaction() { super(); // Set cache flush size to 1MB @@ -87,15 +87,14 @@ public class TestCompaction extends HBaseTestCase { conf.setInt("hbase.hregion.memstore.block.multiplier", 100); compactionThreshold = conf.getInt("hbase.hstore.compactionThreshold", 3); - firstRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING); - secondRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING); + firstRowBytes = START_KEY_BYTES; + secondRowBytes = START_KEY_BYTES.clone(); // Increment the least significant character so we get to next row. secondRowBytes[START_KEY_BYTES.length - 1]++; - thirdRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING); - thirdRowBytes[START_KEY_BYTES.length - 1]++; - thirdRowBytes[START_KEY_BYTES.length - 1]++; - col1 = "column1".getBytes(HConstants.UTF8_ENCODING); - col2 = "column2".getBytes(HConstants.UTF8_ENCODING); + thirdRowBytes = START_KEY_BYTES.clone(); + thirdRowBytes[START_KEY_BYTES.length - 1] += 2; + col1 = Bytes.toBytes("column1"); + col2 = Bytes.toBytes("column2"); } @Override @@ -226,7 +225,7 @@ public class TestCompaction extends HBaseTestCase { // look at the second row // Increment the least significant character so we get to next row. - byte [] secondRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING); + byte [] secondRowBytes = START_KEY_BYTES.clone(); secondRowBytes[START_KEY_BYTES.length - 1]++; // Always 3 versions if that is what max versions is. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java index 96f90cf..f5cf415 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java @@ -489,7 +489,7 @@ public class TestMemStore extends TestCase { m.kvset.size(), m.kvset.size() == 3); } - public void testBinary() throws IOException { + public void testBinary() { MemStore mc = new MemStore(new Configuration(), KeyValue.ROOT_COMPARATOR); final int start = 43; final int end = 46; @@ -499,12 +499,12 @@ public class TestMemStore extends TestCase { Bytes.toBytes(".META.,table," + Bytes.toString(kk) + ",1," + k); KeyValue key = new KeyValue(row, CONTENTS, BASIC, System.currentTimeMillis(), - (CONTENTSTR + k).getBytes(HConstants.UTF8_ENCODING)); + Bytes.toBytes(CONTENTSTR + k)); mc.add(key); System.out.println(key); // key = new KeyValue(row, Bytes.toBytes(ANCHORNUM + k), // System.currentTimeMillis(), -// (ANCHORSTR + k).getBytes(HConstants.UTF8_ENCODING)); +// Bytes.toBytes(ANCHORSTR + k)); // mc.add(key); // System.out.println(key); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java index 0d08655..322b747 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java @@ -86,18 +86,17 @@ public class TestScanner extends HBaseTestCase { private byte[] firstRowBytes, secondRowBytes, thirdRowBytes; final private byte[] col1, col2; - public TestScanner() throws Exception { + public TestScanner() { super(); - firstRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING); - secondRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING); + firstRowBytes = START_KEY_BYTES; + secondRowBytes = START_KEY_BYTES.clone(); // Increment the least significant character so we get to next row. secondRowBytes[START_KEY_BYTES.length - 1]++; - thirdRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING); - thirdRowBytes[START_KEY_BYTES.length - 1]++; - thirdRowBytes[START_KEY_BYTES.length - 1]++; - col1 = "column1".getBytes(HConstants.UTF8_ENCODING); - col2 = "column2".getBytes(HConstants.UTF8_ENCODING); + thirdRowBytes = START_KEY_BYTES.clone(); + thirdRowBytes[START_KEY_BYTES.length - 1] += 2; + col1 = Bytes.toBytes("column1"); + col2 = Bytes.toBytes("column2"); } /**