diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java index ef251df..90f672a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java @@ -345,7 +345,7 @@ public class ServerName implements Comparable, Serializable { int prefixLen = ProtobufUtil.lengthOfPBMagic(); try { MetaRegionServer rss = - MetaRegionServer.newBuilder().mergeFrom(data, prefixLen, data.length - prefixLen).build(); + MetaRegionServer.PARSER.parseFrom(data, prefixLen, data.length - prefixLen); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName sn = rss.getServer(); return new ServerName(sn.getHostName(), sn.getPort(), sn.getStartCode()); } catch (InvalidProtocolBufferException e) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 08dfa11..a976b44 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -132,6 +132,7 @@ import com.google.common.collect.Lists; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Message; +import com.google.protobuf.Parser; import com.google.protobuf.RpcChannel; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; @@ -2055,17 +2056,19 @@ public final class ProtobufUtil { } public static ScanMetrics toScanMetrics(final byte[] bytes) { - MapReduceProtos.ScanMetrics.Builder builder = MapReduceProtos.ScanMetrics.newBuilder(); + Parser parser = MapReduceProtos.ScanMetrics.PARSER; + MapReduceProtos.ScanMetrics pScanMetrics = null; try { - builder.mergeFrom(bytes); + pScanMetrics = parser.parseFrom(bytes); } catch (InvalidProtocolBufferException e) { //Ignored there are just no key values to add. } - MapReduceProtos.ScanMetrics pScanMetrics = builder.build(); ScanMetrics scanMetrics = new ScanMetrics(); - for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) { - if (pair.hasName() && pair.hasValue()) { - scanMetrics.setCounter(pair.getName(), pair.getValue()); + if (pScanMetrics != null) { + for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) { + if (pair.hasName() && pair.hasValue()) { + scanMetrics.setCounter(pair.getName(), pair.getValue()); + } } } return scanMetrics; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java index 43f1e13..b97d791 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -824,24 +824,34 @@ public final class RequestConverter { return builder.build(); } + /** + * @see {@link #buildRollWALWriterRequest() + */ + private static RollWALWriterRequest ROLL_WAL_WRITER_REQUEST = + RollWALWriterRequest.newBuilder().build(); + /** * Create a new RollWALWriterRequest * * @return a ReplicateWALEntryRequest */ public static RollWALWriterRequest buildRollWALWriterRequest() { - RollWALWriterRequest.Builder builder = RollWALWriterRequest.newBuilder(); - return builder.build(); + return ROLL_WAL_WRITER_REQUEST; } /** + * @see {@link #buildGetServerInfoRequest()} + */ + private static GetServerInfoRequest GET_SERVER_INFO_REQUEST = + GetServerInfoRequest.newBuilder().build(); + + /** * Create a new GetServerInfoRequest * * @return a GetServerInfoRequest */ public static GetServerInfoRequest buildGetServerInfoRequest() { - GetServerInfoRequest.Builder builder = GetServerInfoRequest.newBuilder(); - return builder.build(); + return GET_SERVER_INFO_REQUEST; } /** @@ -1158,20 +1168,32 @@ public final class RequestConverter { } /** + * @see {@link #buildGetClusterStatusRequest} + */ + private static final GetClusterStatusRequest GET_CLUSTER_STATUS_REQUEST = + GetClusterStatusRequest.newBuilder().build(); + + /** * Creates a protocol buffer GetClusterStatusRequest * * @return A GetClusterStatusRequest */ public static GetClusterStatusRequest buildGetClusterStatusRequest() { - return GetClusterStatusRequest.newBuilder().build(); + return GET_CLUSTER_STATUS_REQUEST; } /** + * @see {@link #buildCatalogScanRequest} + */ + private static final CatalogScanRequest CATALOG_SCAN_REQUEST = + CatalogScanRequest.newBuilder().build(); + + /** * Creates a request for running a catalog scan * @return A {@link CatalogScanRequest} */ public static CatalogScanRequest buildCatalogScanRequest() { - return CatalogScanRequest.newBuilder().build(); + return CATALOG_SCAN_REQUEST; } /** @@ -1183,11 +1205,17 @@ public final class RequestConverter { } /** + * @see {@link #buildIsCatalogJanitorEnabledRequest()} + */ + private static final IsCatalogJanitorEnabledRequest IS_CATALOG_JANITOR_ENABLED_REQUEST = + IsCatalogJanitorEnabledRequest.newBuilder().build(); + + /** * Creates a request for querying the master whether the catalog janitor is enabled * @return A {@link IsCatalogJanitorEnabledRequest} */ public static IsCatalogJanitorEnabledRequest buildIsCatalogJanitorEnabledRequest() { - return IsCatalogJanitorEnabledRequest.newBuilder().build(); + return IS_CATALOG_JANITOR_ENABLED_REQUEST; } /** @@ -1413,4 +1441,4 @@ public final class RequestConverter { } return builder.build(); } -} +} \ No newline at end of file diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index 5033e74..d95b2bd 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -27,7 +27,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; /** @@ -90,14 +89,25 @@ public class HBaseCommonTestingUtility { } String randomStr = UUID.randomUUID().toString(); - Path testPath= new Path(getBaseTestDir(), randomStr); + Path testPath = new Path(getBaseTestDir(), randomStr); this.dataTestDir = new File(testPath.toString()).getAbsoluteFile(); - this.dataTestDir.deleteOnExit(); + // Set this property so if mapreduce jobs run, they will use this as their home dir. + System.setProperty("test.build.dir", this.dataTestDir.toString()); + if (deleteOnExit()) this.dataTestDir.deleteOnExit(); return testPath; } /** + * @return True if we should delete testing dirs on exit. + */ + boolean deleteOnExit() { + String v = System.getProperty("hbase.testing.preserve.testdir"); + // Let default be true, to delete on exit. + return v == null? true: !Boolean.parseBoolean(v); + } + + /** * @return True if we removed the test dirs * @throws IOException */ @@ -146,7 +156,7 @@ public class HBaseCommonTestingUtility { return true; } try { - FileUtils.deleteDirectory(dir); + if (deleteOnExit()) FileUtils.deleteDirectory(dir); return true; } catch (IOException ex) { LOG.warn("Failed to delete " + dir.getAbsolutePath()); diff --git a/hbase-protocol/src/main/protobuf/MasterAdmin.proto b/hbase-protocol/src/main/protobuf/MasterAdmin.proto index f5f5a22..2976089 100644 --- a/hbase-protocol/src/main/protobuf/MasterAdmin.proto +++ b/hbase-protocol/src/main/protobuf/MasterAdmin.proto @@ -404,19 +404,17 @@ service MasterAdminService { /** * Create a snapshot for the given table. - * @param snapshot description of the snapshot to take */ rpc Snapshot(TakeSnapshotRequest) returns(TakeSnapshotResponse); /** * List completed snapshots. - * Returns a list of snapshot descriptors for completed snapshots + * @return a list of snapshot descriptors for completed snapshots */ rpc GetCompletedSnapshots(ListSnapshotRequest) returns(ListSnapshotResponse); /** * Delete an existing snapshot. This method can also be used to clean up an aborted snapshot. - * @param snapshotName snapshot to delete */ rpc DeleteSnapshot(DeleteSnapshotRequest) returns(DeleteSnapshotResponse); @@ -427,7 +425,6 @@ service MasterAdminService { /** * Restore a snapshot - * @param snapshot description of the snapshot to restore */ rpc RestoreSnapshot(RestoreSnapshotRequest) returns(RestoreSnapshotResponse); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 0fb9a96..0b67a85 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -1,7 +1,4 @@ - - /** - * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -120,6 +117,8 @@ import org.apache.zookeeper.ZooKeeper.States; * Depends on log4j being on classpath and * hbase-site.xml for logging and test-run configuration. It does not set * logging levels nor make changes to configuration parameters. + *

To preserve test data directories, pass the system property "hbase.testing.preserve.testdir" + * setting it to true. */ @InterfaceAudience.Public @InterfaceStability.Evolving @@ -284,7 +283,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { private void createSubDir(String propertyName, Path parent, String subDirName){ Path newPath= new Path(parent, subDirName); File newDir = new File(newPath.toString()).getAbsoluteFile(); - newDir.deleteOnExit(); + if (deleteOnExit()) newDir.deleteOnExit(); conf.set(propertyName, newDir.getAbsolutePath()); } @@ -350,9 +349,10 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { Path testDir = getDataTestDir("dfscluster_" + UUID.randomUUID().toString()); clusterTestDir = new File(testDir.toString()).getAbsoluteFile(); // Have it cleaned up on exit - clusterTestDir.deleteOnExit(); + boolean b = deleteOnExit(); + if (b) clusterTestDir.deleteOnExit(); conf.set(TEST_DIRECTORY_KEY, clusterTestDir.getPath()); - LOG.info("Created new mini-cluster data directory: " + clusterTestDir); + LOG.info("Created new mini-cluster data directory: " + clusterTestDir + ", deleteOnExit=" + b); } /** @@ -397,13 +397,13 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { FileSystem fs = getTestFileSystem(); if (fs.getUri().getScheme().equals(FileSystem.getLocal(conf).getUri().getScheme())) { File dataTestDir = new File(getDataTestDir().toString()); - dataTestDir.deleteOnExit(); + if (deleteOnExit()) dataTestDir.deleteOnExit(); dataTestDirOnTestFS = new Path(dataTestDir.getAbsolutePath()); } else { Path base = getBaseTestDirOnTestFS(); String randomStr = UUID.randomUUID().toString(); dataTestDirOnTestFS = new Path(base, randomStr); - fs.deleteOnExit(dataTestDirOnTestFS); + if (deleteOnExit()) fs.deleteOnExit(dataTestDirOnTestFS); } } diff --git a/pom.xml b/pom.xml index d64be23..78ae543 100644 --- a/pom.xml +++ b/pom.xml @@ -366,6 +366,10 @@ + Arun Staging 2.1.0-beta RCs + https://repository.apache.org/content/repositories/orgapachehadoop-099/ + + cloudbees netty http://repository-netty.forge.cloudbees.com/snapshot/ @@ -881,7 +885,7 @@ ${maven.build.timestamp} 1.6 - 2.0.5-alpha + 2.1.0-beta 1.2.1 1.2 1.7 @@ -905,7 +909,7 @@ 2.00 1.2.17 1.9.0 - 2.4.1 + 2.5.0 1.0.1 0.9.0 3.4.5