diff --git a/pom.xml b/pom.xml
index af285c0..c83d3f9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1988,7 +1988,7 @@
- 0.23.3
+ 0.23.7-SNAPSHOT
1.6.1
diff --git a/src/main/java/org/apache/hadoop/hbase/HConstants.java b/src/main/java/org/apache/hadoop/hbase/HConstants.java
index e70bd3b..6180386 100644
--- a/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -27,6 +27,7 @@ import java.util.regex.Pattern;
import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.hbase.ipc.HRegionInterface;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -298,10 +299,16 @@ public final class HConstants {
// should go down.
/** The root table's name.*/
- public static final byte [] ROOT_TABLE_NAME = Bytes.toBytes("-ROOT-");
+ public static final byte [] ROOT_TABLE_QUALIFIER = Bytes.toBytes("-ROOT-");
+ public static final byte [] ROOT_TABLE_NAME =
+ TableName.valueOf(Bytes.toBytes(NamespaceDescriptor.SYSTEM_NAMESPACE.getName()),
+ ROOT_TABLE_QUALIFIER).toBytes();
/** The META table's name. */
- public static final byte [] META_TABLE_NAME = Bytes.toBytes(".META.");
+ public static final byte [] META_TABLE_QUALIFIER = Bytes.toBytes(".META.");
+ public static final byte [] META_TABLE_NAME =
+ TableName.valueOf(Bytes.toBytes(NamespaceDescriptor.SYSTEM_NAMESPACE.getName()),
+ META_TABLE_QUALIFIER).toBytes();
/** delimiter used between portions of a region name */
public static final int META_ROW_DELIMITER = ',';
diff --git a/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
index 0ee74e7..274ba36 100644
--- a/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
+++ b/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
@@ -139,11 +139,11 @@ implements WritableComparable {
/** HRegionInfo for root region */
public static final HRegionInfo ROOT_REGIONINFO =
- new HRegionInfo(0L, Bytes.toBytes("-ROOT-"));
+ new HRegionInfo(0L, HConstants.ROOT_TABLE_NAME);
/** HRegionInfo for first meta region */
public static final HRegionInfo FIRST_META_REGIONINFO =
- new HRegionInfo(1L, Bytes.toBytes(".META."));
+ new HRegionInfo(1L, HConstants.META_TABLE_NAME);
private byte [] endKey = HConstants.EMPTY_BYTE_ARRAY;
// This flag is in the parent of a split while the parent is still referenced
diff --git a/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index 934587d..9f48953 100644
--- a/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -36,8 +36,10 @@ import java.util.regex.Matcher;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.io.WritableComparable;
/**
@@ -56,9 +58,7 @@ public class HTableDescriptor implements WritableComparable {
*/
private static final byte TABLE_DESCRIPTOR_VERSION = 5;
- private byte [] name = HConstants.EMPTY_BYTE_ARRAY;
-
- private String nameAsString = "";
+ private TableName name = TableName.valueOf("","");
/**
* A map which holds the metadata information of the table. This metadata
@@ -175,10 +175,13 @@ public class HTableDescriptor implements WritableComparable {
* INTERNAL Private constructor used internally creating table descriptors for
* catalog tables, .META. and -ROOT-.
*/
- protected HTableDescriptor(final byte [] name, HColumnDescriptor[] families) {
- this.name = name.clone();
- this.nameAsString = Bytes.toString(this.name);
- setMetaFlags(name);
+ protected HTableDescriptor(final byte[] name, HColumnDescriptor[] families) {
+ this(TableName.valueOf(name),
+ families);
+ }
+
+ protected HTableDescriptor(final TableName name, HColumnDescriptor[] families) {
+ setName(name);
for(HColumnDescriptor descriptor : families) {
this.families.put(descriptor.getName(), descriptor);
}
@@ -188,11 +191,16 @@ public class HTableDescriptor implements WritableComparable {
* INTERNAL Private constructor used internally creating table descriptors for
* catalog tables, .META. and -ROOT-.
*/
- protected HTableDescriptor(final byte [] name, HColumnDescriptor[] families,
+ protected HTableDescriptor(final byte[] name, HColumnDescriptor[] families,
Map values) {
- this.name = name.clone();
- this.nameAsString = Bytes.toString(this.name);
- setMetaFlags(name);
+ this(TableName.valueOf(name),
+ families,
+ values);
+ }
+
+ protected HTableDescriptor(final TableName name, HColumnDescriptor[] families,
+ Map values) {
+ setName(name);
for(HColumnDescriptor descriptor : families) {
this.families.put(descriptor.getName(), descriptor);
}
@@ -234,8 +242,7 @@ public class HTableDescriptor implements WritableComparable {
public HTableDescriptor(final byte [] name) {
super();
setMetaFlags(this.name);
- this.name = this.isMetaRegion()? name: isLegalTableName(name);
- this.nameAsString = Bytes.toString(this.name);
+ setName(this.isMetaRegion()? name: isLegalTableName(name));
}
/**
@@ -247,8 +254,7 @@ public class HTableDescriptor implements WritableComparable {
*/
public HTableDescriptor(final HTableDescriptor desc) {
super();
- this.name = desc.name.clone();
- this.nameAsString = Bytes.toString(this.name);
+ setName(desc.name);
setMetaFlags(this.name);
for (HColumnDescriptor c: desc.families.values()) {
this.families.put(c.getName(), new HColumnDescriptor(c));
@@ -266,10 +272,10 @@ public class HTableDescriptor implements WritableComparable {
* Called by constructors.
* @param name
*/
- private void setMetaFlags(final byte [] name) {
- setRootRegion(Bytes.equals(name, HConstants.ROOT_TABLE_NAME));
+ private void setMetaFlags(final TableName name) {
+ setRootRegion(Bytes.equals(name.getName(), HConstants.ROOT_TABLE_NAME));
setMetaRegion(isRootRegion() ||
- Bytes.equals(name, HConstants.META_TABLE_NAME));
+ Bytes.equals(name.getName(), HConstants.META_TABLE_NAME));
}
/**
@@ -551,7 +557,7 @@ public class HTableDescriptor implements WritableComparable {
* @return name of table
*/
public byte [] getName() {
- return name;
+ return name.getName();
}
/**
@@ -560,7 +566,11 @@ public class HTableDescriptor implements WritableComparable {
* @return name of table as a String
*/
public String getNameAsString() {
- return this.nameAsString;
+ return name.getNameAsString();
+ }
+
+ public TableName getTableName() {
+ return name;
}
/**
@@ -583,8 +593,11 @@ public class HTableDescriptor implements WritableComparable {
* @param name name of table
*/
public void setName(byte[] name) {
+ setName(TableName.valueOf(name));
+ }
+
+ public void setName(TableName name) {
this.name = name;
- this.nameAsString = Bytes.toString(this.name);
setMetaFlags(this.name);
}
@@ -681,7 +694,7 @@ public class HTableDescriptor implements WritableComparable {
s.append('{');
s.append(HConstants.NAME);
s.append(" => '");
- s.append(Bytes.toString(name));
+ s.append(name);
s.append("'");
for (Map.Entry e:
values.entrySet()) {
@@ -720,7 +733,7 @@ public class HTableDescriptor implements WritableComparable {
s.append('{');
s.append(HConstants.NAME);
s.append(" => '");
- s.append(Bytes.toString(name));
+ s.append(name);
s.append("'");
for (Map.Entry e:
values.entrySet()) {
@@ -785,7 +798,7 @@ public class HTableDescriptor implements WritableComparable {
*/
@Override
public int hashCode() {
- int result = Bytes.hashCode(this.name);
+ int result = this.name.hashCode();
result ^= Byte.valueOf(TABLE_DESCRIPTOR_VERSION).hashCode();
if (this.families != null && this.families.size() > 0) {
for (HColumnDescriptor e: this.families.values()) {
@@ -807,8 +820,7 @@ public class HTableDescriptor implements WritableComparable {
if (version < 3)
throw new IOException("versions < 3 are not supported (and never existed!?)");
// version 3+
- name = Bytes.readByteArray(in);
- nameAsString = Bytes.toString(this.name);
+ name = TableName.valueOf(Bytes.readByteArray(in));
setRootRegion(in.readBoolean());
setMetaRegion(in.readBoolean());
values.clear();
@@ -838,8 +850,8 @@ public class HTableDescriptor implements WritableComparable {
*/
@Override
public void write(DataOutput out) throws IOException {
- out.writeInt(TABLE_DESCRIPTOR_VERSION);
- Bytes.writeByteArray(out, name);
+ out.writeInt(TABLE_DESCRIPTOR_VERSION);
+ Bytes.writeByteArray(out, name.toBytes());
out.writeBoolean(isRootRegion());
out.writeBoolean(isMetaRegion());
out.writeInt(values.size());
@@ -867,7 +879,7 @@ public class HTableDescriptor implements WritableComparable {
*/
@Override
public int compareTo(final HTableDescriptor other) {
- int result = Bytes.compareTo(this.name, other.name);
+ int result = this.name.compareTo(other.name);
if (result == 0) {
result = families.size() - other.families.size();
}
@@ -1134,12 +1146,16 @@ public class HTableDescriptor implements WritableComparable {
* @return {@link Path} for table
*/
public static Path getTableDir(Path rootdir, final byte [] tableName) {
- return new Path(rootdir, Bytes.toString(tableName));
+ TableName name = TableName.valueOf(tableName);
+ return new Path(rootdir,
+ new Path(name.getNamespaceAsString().length()<1?"default":name.getNamespaceAsString(),
+ name.getQualifierAsString()));
}
/** Table descriptor for -ROOT- catalog table */
public static final HTableDescriptor ROOT_TABLEDESC = new HTableDescriptor(
- HConstants.ROOT_TABLE_NAME,
+ TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE.getName(),
+ Bytes.toString(HConstants.ROOT_TABLE_QUALIFIER)),
new HColumnDescriptor[] {
new HColumnDescriptor(HConstants.CATALOG_FAMILY)
// Ten is arbitrary number. Keep versions to help debugging.
@@ -1152,7 +1168,9 @@ public class HTableDescriptor implements WritableComparable {
/** Table descriptor for .META. catalog table */
public static final HTableDescriptor META_TABLEDESC = new HTableDescriptor(
- HConstants.META_TABLE_NAME, new HColumnDescriptor[] {
+ TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE.getName(),
+ Bytes.toString(HConstants.META_TABLE_QUALIFIER)),
+ new HColumnDescriptor[] {
new HColumnDescriptor(HConstants.CATALOG_FAMILY)
// Ten is arbitrary number. Keep versions to help debugging.
.setMaxVersions(10)
diff --git a/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 39d1f09..fbfdfbb 100644
--- a/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -1941,7 +1941,7 @@ public class KeyValue implements Writable, HeapSize {
// Rows look like this: .META.,ROW_FROM_META,RID
// LOG.info("ROOT " + Bytes.toString(left, loffset, llength) +
// "---" + Bytes.toString(right, roffset, rlength));
- final int metalength = 7; // '.META.' length
+ final int metalength = HConstants.META_TABLE_NAME.length+1; // '.META.' length
int lmetaOffsetPlusDelimiter = loffset + metalength;
int leftFarDelimiter = getDelimiterInReverse(left,
lmetaOffsetPlusDelimiter,
diff --git a/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java b/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
new file mode 100644
index 0000000..68d8dd6
--- /dev/null
+++ b/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
@@ -0,0 +1,159 @@
+/**
+ * Copyright The Apache Software Foundation Licensed to the Apache Software Foundation (ASF) under
+ * one or more contributor license agreements. See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership. The ASF licenses this file to you under the
+ * Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hbase;
+
+import com.google.common.base.Splitter;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import com.google.protobuf.GeneratedMessage;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.constraint.ConstraintException;
+import org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.Comparator;
+import java.util.List;
+import java.util.NavigableSet;
+import java.util.TreeSet;
+
+public class NamespaceDescriptor implements Serializable {
+
+ public static NamespaceDescriptor DEFAULT_NAMESPACE;
+ public static NamespaceDescriptor SYSTEM_NAMESPACE;
+ public static String NAMESPACE_DELIM = ".";
+
+ static {
+ DEFAULT_NAMESPACE = NamespaceDescriptor.create("").build();
+ SYSTEM_NAMESPACE = NamespaceDescriptor.create("-hbase-").build();
+ }
+
+ private String name;
+ private String groupName;
+ private long maxTables;
+ private long maxRegions;
+ private NavigableSet tables;
+
+ public static final Comparator
+ NAMESPACE_DESCRIPTOR_COMPARATOR = new Comparator() {
+ @Override
+ public int compare(NamespaceDescriptor namespaceDescriptor,
+ NamespaceDescriptor namespaceDescriptor2) {
+ return namespaceDescriptor.getName().compareTo(namespaceDescriptor2.getName());
+ }
+ };
+
+ private NamespaceDescriptor(String name) {
+ this.name = name;
+ }
+
+ public String getGroupName() {
+ return groupName;
+ }
+
+ public long getMaxTables() {
+ return maxTables;
+ }
+
+ public long getMaxRegions() {
+ return maxRegions;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public NavigableSet getTables() {
+ return tables;
+ }
+
+ public static Builder create(String name) {
+ return new Builder(name);
+ }
+
+ public static byte[] toBytes(NamespaceDescriptor ns) {
+ NamespaceProtos.NamespaceDescriptor.Builder b =
+ NamespaceProtos.NamespaceDescriptor.newBuilder()
+ .setName(ns.getName())
+ .setMaxRegions(ns.getMaxRegions())
+ .setMaxTables(ns.getMaxTables())
+ .setTables(NamespaceProtos.TableList.newBuilder().addAllTable(ns.getTables()));
+ if(ns.getGroupName() == null) {
+ b.clearGroupName();
+ } else {
+ b.setGroupName(ns.getGroupName());
+ }
+ return b.build().toByteArray();
+ }
+
+ public static NamespaceDescriptor fromBytes(byte[] data) throws IOException {
+ NamespaceProtos.NamespaceDescriptor desc =
+ NamespaceProtos.NamespaceDescriptor.parseFrom(data);
+ NamespaceDescriptor ns =
+ NamespaceDescriptor.create(desc.getName())
+ .setGroupName(desc.hasGroupName() ? desc.getGroupName() : null)
+ .setMaxRegions(desc.getMaxRegions())
+ .setMaxTables(desc.getMaxTables())
+ .setTables(Sets.newTreeSet(desc.getTables().getTableList()))
+ .build();
+ return ns;
+ }
+
+ public static class Builder {
+ private String bName;
+ private String bGroupName;
+ private long bMaxTables;
+ private long bMaxRegions;
+ private NavigableSet tables;
+
+ private Builder(String name) {
+ this.bName = name;
+ }
+
+ public Builder setMaxTables(long numTables){
+ this.bMaxTables = numTables;
+ return this;
+ }
+
+ public Builder setMaxRegions(long maxRegions){
+ this.bMaxRegions = maxRegions;
+ return this;
+ }
+
+ public Builder setGroupName(String groupName){
+ this.bGroupName = groupName;
+ return this;
+ }
+
+ public Builder setTables(NavigableSet tables) {
+ this.tables = tables;
+ return this;
+ }
+
+ public NamespaceDescriptor build() {
+ if(this.bName == null){
+ throw new IllegalArgumentException("A name has to be specified in a namespace.");
+ }
+
+ NamespaceDescriptor desc = new NamespaceDescriptor(this.bName);
+ desc.groupName = this.bGroupName;
+ desc.maxRegions = this.bMaxRegions;
+ desc.maxTables = this.bMaxTables;
+ desc.tables = this.tables == null ? new TreeSet() : this.tables;
+ return desc;
+ }
+ }
+}
diff --git a/src/main/java/org/apache/hadoop/hbase/TableName.java b/src/main/java/org/apache/hadoop/hbase/TableName.java
new file mode 100644
index 0000000..d51f8c1
--- /dev/null
+++ b/src/main/java/org/apache/hadoop/hbase/TableName.java
@@ -0,0 +1,130 @@
+/**
+ * Copyright The Apache Software Foundation Licensed to the Apache Software Foundation (ASF) under
+ * one or more contributor license agreements. See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership. The ASF licenses this file to you under the
+ * Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hbase;
+
+import com.google.common.base.Splitter;
+import com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import java.util.List;
+
+public class TableName implements Comparable {
+
+ private byte[] name;
+ private String nameAsString;
+ private byte[] namespace;
+ private String namespaceAsString;
+ private byte[] qualifier;
+ private String qualifierAsString;
+
+ private TableName() {}
+
+ public byte[] getName() {
+ return name;
+ }
+
+ public String getNameAsString() {
+ return nameAsString;
+ }
+
+ public byte[] getNamespace() {
+ return namespace;
+ }
+
+ public String getNamespaceAsString() {
+ return namespaceAsString;
+ }
+
+ public byte[] getQualifier() {
+ return qualifier;
+ }
+
+ public String getQualifierAsString() {
+ return qualifierAsString;
+ }
+
+ public byte[] toBytes() {
+ return name;
+ }
+
+ @Override
+ public String toString() {
+ return nameAsString;
+ }
+
+ public static TableName valueOf(byte[] namespace, byte[] qualifier) {
+ TableName ret = new TableName();
+ ret.namespace = namespace;
+ ret.namespaceAsString = Bytes.toString(namespace);
+ ret.qualifier = qualifier;
+ ret.qualifierAsString = Bytes.toString(qualifier);
+ ret.nameAsString = createFullyQualified(ret.namespaceAsString, ret.qualifierAsString);
+ ret.name = Bytes.toBytes(ret.nameAsString);
+ return ret;
+ }
+
+ public static TableName valueOf(String namespaceAsString, String qualifierAsString) {
+ TableName ret = new TableName();
+ ret.namespace = Bytes.toBytes(namespaceAsString);
+ ret.namespaceAsString = namespaceAsString;
+ ret.qualifier = Bytes.toBytes(qualifierAsString);
+ ret.qualifierAsString = qualifierAsString;
+ ret.nameAsString = createFullyQualified(ret.namespaceAsString, ret.qualifierAsString);
+ ret.name = Bytes.toBytes(ret.nameAsString);
+ return ret;
+ }
+
+ public static TableName valueOf(byte[] name) {
+ return valueOf(Bytes.toString(name));
+ }
+
+ public static TableName valueOf(String name) {
+ List list = Lists.newArrayList(Splitter.on('.').limit(2).split(name));
+ if(list.size() == 2) {
+ return TableName.valueOf(list.get(0),list.get(1));
+ } else {
+ return TableName.valueOf(NamespaceDescriptor.DEFAULT_NAMESPACE.getName(), list.get(0));
+ }
+ }
+
+ private static String createFullyQualified(String namespace, String tableQualifier) {
+ if(namespace.length() < 1) {
+ return tableQualifier;
+ }
+ return namespace+NamespaceDescriptor.NAMESPACE_DELIM+tableQualifier;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ TableName tableName = (TableName) o;
+
+ if (!nameAsString.equals(tableName.nameAsString)) return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = nameAsString.hashCode();
+ return result;
+ }
+
+ @Override
+ public int compareTo(TableName tableName) {
+ return this.nameAsString.compareTo(tableName.getNameAsString());
+ }
+}
diff --git a/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java b/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
index 94ff5bc..c62963a 100644
--- a/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
+++ b/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.backup.HFileArchiver;
import org.apache.hadoop.hbase.catalog.MetaEditor;
import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.util.Bytes;
@@ -310,8 +311,8 @@ class CatalogJanitor extends Chore {
}
FileSystem fs = this.services.getMasterFileSystem().getFileSystem();
Path rootdir = this.services.getMasterFileSystem().getRootDir();
- Path tabledir = new Path(rootdir, split.getTableNameAsString());
- Path regiondir = new Path(tabledir, split.getEncodedName());
+ Path tabledir = HTableDescriptor.getTableDir(rootdir, split.getTableName());
+ Path regiondir = HRegion.getRegionDir(rootdir, split);
exists = fs.exists(regiondir);
if (!exists) {
LOG.warn("Daughter regiondir does not exist: " + regiondir.toString());
diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/generated/NamespaceProtos.java b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/NamespaceProtos.java
new file mode 100644
index 0000000..0046c7f
--- /dev/null
+++ b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/NamespaceProtos.java
@@ -0,0 +1,1923 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: src/main/protobuf/Namespace.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class NamespaceProtos {
+ private NamespaceProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface NamespaceDescriptorListOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .NamespaceDescriptor namespaceDescriptor = 1;
+ java.util.List
+ getNamespaceDescriptorList();
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor getNamespaceDescriptor(int index);
+ int getNamespaceDescriptorCount();
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorOrBuilder>
+ getNamespaceDescriptorOrBuilderList();
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(
+ int index);
+ }
+ public static final class NamespaceDescriptorList extends
+ com.google.protobuf.GeneratedMessage
+ implements NamespaceDescriptorListOrBuilder {
+ // Use NamespaceDescriptorList.newBuilder() to construct.
+ private NamespaceDescriptorList(Builder builder) {
+ super(builder);
+ }
+ private NamespaceDescriptorList(boolean noInit) {}
+
+ private static final NamespaceDescriptorList defaultInstance;
+ public static NamespaceDescriptorList getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public NamespaceDescriptorList getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_NamespaceDescriptorList_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_NamespaceDescriptorList_fieldAccessorTable;
+ }
+
+ // repeated .NamespaceDescriptor namespaceDescriptor = 1;
+ public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1;
+ private java.util.List namespaceDescriptor_;
+ public java.util.List getNamespaceDescriptorList() {
+ return namespaceDescriptor_;
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorOrBuilder>
+ getNamespaceDescriptorOrBuilderList() {
+ return namespaceDescriptor_;
+ }
+ public int getNamespaceDescriptorCount() {
+ return namespaceDescriptor_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor getNamespaceDescriptor(int index) {
+ return namespaceDescriptor_.get(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(
+ int index) {
+ return namespaceDescriptor_.get(index);
+ }
+
+ private void initFields() {
+ namespaceDescriptor_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getNamespaceDescriptorCount(); i++) {
+ if (!getNamespaceDescriptor(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < namespaceDescriptor_.size(); i++) {
+ output.writeMessage(1, namespaceDescriptor_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < namespaceDescriptor_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, namespaceDescriptor_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList other = (org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList) obj;
+
+ boolean result = true;
+ result = result && getNamespaceDescriptorList()
+ .equals(other.getNamespaceDescriptorList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getNamespaceDescriptorCount() > 0) {
+ hash = (37 * hash) + NAMESPACEDESCRIPTOR_FIELD_NUMBER;
+ hash = (53 * hash) + getNamespaceDescriptorList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorListOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_NamespaceDescriptorList_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_NamespaceDescriptorList_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getNamespaceDescriptorFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (namespaceDescriptorBuilder_ == null) {
+ namespaceDescriptor_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ namespaceDescriptorBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList build() {
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList result = new org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList(this);
+ int from_bitField0_ = bitField0_;
+ if (namespaceDescriptorBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ namespaceDescriptor_ = java.util.Collections.unmodifiableList(namespaceDescriptor_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.namespaceDescriptor_ = namespaceDescriptor_;
+ } else {
+ result.namespaceDescriptor_ = namespaceDescriptorBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList.getDefaultInstance()) return this;
+ if (namespaceDescriptorBuilder_ == null) {
+ if (!other.namespaceDescriptor_.isEmpty()) {
+ if (namespaceDescriptor_.isEmpty()) {
+ namespaceDescriptor_ = other.namespaceDescriptor_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureNamespaceDescriptorIsMutable();
+ namespaceDescriptor_.addAll(other.namespaceDescriptor_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.namespaceDescriptor_.isEmpty()) {
+ if (namespaceDescriptorBuilder_.isEmpty()) {
+ namespaceDescriptorBuilder_.dispose();
+ namespaceDescriptorBuilder_ = null;
+ namespaceDescriptor_ = other.namespaceDescriptor_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ namespaceDescriptorBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getNamespaceDescriptorFieldBuilder() : null;
+ } else {
+ namespaceDescriptorBuilder_.addAllMessages(other.namespaceDescriptor_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getNamespaceDescriptorCount(); i++) {
+ if (!getNamespaceDescriptor(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.newBuilder();
+ input.readMessage(subBuilder, extensionRegistry);
+ addNamespaceDescriptor(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated .NamespaceDescriptor namespaceDescriptor = 1;
+ private java.util.List namespaceDescriptor_ =
+ java.util.Collections.emptyList();
+ private void ensureNamespaceDescriptorIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ namespaceDescriptor_ = new java.util.ArrayList(namespaceDescriptor_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_;
+
+ public java.util.List getNamespaceDescriptorList() {
+ if (namespaceDescriptorBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(namespaceDescriptor_);
+ } else {
+ return namespaceDescriptorBuilder_.getMessageList();
+ }
+ }
+ public int getNamespaceDescriptorCount() {
+ if (namespaceDescriptorBuilder_ == null) {
+ return namespaceDescriptor_.size();
+ } else {
+ return namespaceDescriptorBuilder_.getCount();
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor getNamespaceDescriptor(int index) {
+ if (namespaceDescriptorBuilder_ == null) {
+ return namespaceDescriptor_.get(index);
+ } else {
+ return namespaceDescriptorBuilder_.getMessage(index);
+ }
+ }
+ public Builder setNamespaceDescriptor(
+ int index, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor value) {
+ if (namespaceDescriptorBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureNamespaceDescriptorIsMutable();
+ namespaceDescriptor_.set(index, value);
+ onChanged();
+ } else {
+ namespaceDescriptorBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ public Builder setNamespaceDescriptor(
+ int index, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.Builder builderForValue) {
+ if (namespaceDescriptorBuilder_ == null) {
+ ensureNamespaceDescriptorIsMutable();
+ namespaceDescriptor_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ namespaceDescriptorBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor value) {
+ if (namespaceDescriptorBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureNamespaceDescriptorIsMutable();
+ namespaceDescriptor_.add(value);
+ onChanged();
+ } else {
+ namespaceDescriptorBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ public Builder addNamespaceDescriptor(
+ int index, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor value) {
+ if (namespaceDescriptorBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureNamespaceDescriptorIsMutable();
+ namespaceDescriptor_.add(index, value);
+ onChanged();
+ } else {
+ namespaceDescriptorBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ public Builder addNamespaceDescriptor(
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.Builder builderForValue) {
+ if (namespaceDescriptorBuilder_ == null) {
+ ensureNamespaceDescriptorIsMutable();
+ namespaceDescriptor_.add(builderForValue.build());
+ onChanged();
+ } else {
+ namespaceDescriptorBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addNamespaceDescriptor(
+ int index, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.Builder builderForValue) {
+ if (namespaceDescriptorBuilder_ == null) {
+ ensureNamespaceDescriptorIsMutable();
+ namespaceDescriptor_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ namespaceDescriptorBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addAllNamespaceDescriptor(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor> values) {
+ if (namespaceDescriptorBuilder_ == null) {
+ ensureNamespaceDescriptorIsMutable();
+ super.addAll(values, namespaceDescriptor_);
+ onChanged();
+ } else {
+ namespaceDescriptorBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ public Builder clearNamespaceDescriptor() {
+ if (namespaceDescriptorBuilder_ == null) {
+ namespaceDescriptor_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ namespaceDescriptorBuilder_.clear();
+ }
+ return this;
+ }
+ public Builder removeNamespaceDescriptor(int index) {
+ if (namespaceDescriptorBuilder_ == null) {
+ ensureNamespaceDescriptorIsMutable();
+ namespaceDescriptor_.remove(index);
+ onChanged();
+ } else {
+ namespaceDescriptorBuilder_.remove(index);
+ }
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.Builder getNamespaceDescriptorBuilder(
+ int index) {
+ return getNamespaceDescriptorFieldBuilder().getBuilder(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(
+ int index) {
+ if (namespaceDescriptorBuilder_ == null) {
+ return namespaceDescriptor_.get(index); } else {
+ return namespaceDescriptorBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorOrBuilder>
+ getNamespaceDescriptorOrBuilderList() {
+ if (namespaceDescriptorBuilder_ != null) {
+ return namespaceDescriptorBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(namespaceDescriptor_);
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.Builder addNamespaceDescriptorBuilder() {
+ return getNamespaceDescriptorFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.getDefaultInstance());
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.Builder addNamespaceDescriptorBuilder(
+ int index) {
+ return getNamespaceDescriptorFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.getDefaultInstance());
+ }
+ public java.util.List
+ getNamespaceDescriptorBuilderList() {
+ return getNamespaceDescriptorFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorOrBuilder>
+ getNamespaceDescriptorFieldBuilder() {
+ if (namespaceDescriptorBuilder_ == null) {
+ namespaceDescriptorBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorOrBuilder>(
+ namespaceDescriptor_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ namespaceDescriptor_ = null;
+ }
+ return namespaceDescriptorBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:NamespaceDescriptorList)
+ }
+
+ static {
+ defaultInstance = new NamespaceDescriptorList(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:NamespaceDescriptorList)
+ }
+
+ public interface NamespaceDescriptorOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string name = 1;
+ boolean hasName();
+ String getName();
+
+ // optional string groupName = 2;
+ boolean hasGroupName();
+ String getGroupName();
+
+ // required .TableList tables = 3;
+ boolean hasTables();
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList getTables();
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableListOrBuilder getTablesOrBuilder();
+
+ // required int64 maxRegions = 4;
+ boolean hasMaxRegions();
+ long getMaxRegions();
+
+ // required int64 maxTables = 5;
+ boolean hasMaxTables();
+ long getMaxTables();
+ }
+ public static final class NamespaceDescriptor extends
+ com.google.protobuf.GeneratedMessage
+ implements NamespaceDescriptorOrBuilder {
+ // Use NamespaceDescriptor.newBuilder() to construct.
+ private NamespaceDescriptor(Builder builder) {
+ super(builder);
+ }
+ private NamespaceDescriptor(boolean noInit) {}
+
+ private static final NamespaceDescriptor defaultInstance;
+ public static NamespaceDescriptor getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public NamespaceDescriptor getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_NamespaceDescriptor_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_NamespaceDescriptor_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required string name = 1;
+ public static final int NAME_FIELD_NUMBER = 1;
+ private java.lang.Object name_;
+ public boolean hasName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getName() {
+ java.lang.Object ref = name_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ name_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional string groupName = 2;
+ public static final int GROUPNAME_FIELD_NUMBER = 2;
+ private java.lang.Object groupName_;
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ groupName_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getGroupNameBytes() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ groupName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // required .TableList tables = 3;
+ public static final int TABLES_FIELD_NUMBER = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList tables_;
+ public boolean hasTables() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList getTables() {
+ return tables_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableListOrBuilder getTablesOrBuilder() {
+ return tables_;
+ }
+
+ // required int64 maxRegions = 4;
+ public static final int MAXREGIONS_FIELD_NUMBER = 4;
+ private long maxRegions_;
+ public boolean hasMaxRegions() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public long getMaxRegions() {
+ return maxRegions_;
+ }
+
+ // required int64 maxTables = 5;
+ public static final int MAXTABLES_FIELD_NUMBER = 5;
+ private long maxTables_;
+ public boolean hasMaxTables() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ public long getMaxTables() {
+ return maxTables_;
+ }
+
+ private void initFields() {
+ name_ = "";
+ groupName_ = "";
+ tables_ = org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.getDefaultInstance();
+ maxRegions_ = 0L;
+ maxTables_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasTables()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasMaxRegions()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasMaxTables()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getGroupNameBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeMessage(3, tables_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeInt64(4, maxRegions_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeInt64(5, maxTables_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getGroupNameBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, tables_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt64Size(4, maxRegions_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt64Size(5, maxTables_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor) obj;
+
+ boolean result = true;
+ result = result && (hasName() == other.hasName());
+ if (hasName()) {
+ result = result && getName()
+ .equals(other.getName());
+ }
+ result = result && (hasGroupName() == other.hasGroupName());
+ if (hasGroupName()) {
+ result = result && getGroupName()
+ .equals(other.getGroupName());
+ }
+ result = result && (hasTables() == other.hasTables());
+ if (hasTables()) {
+ result = result && getTables()
+ .equals(other.getTables());
+ }
+ result = result && (hasMaxRegions() == other.hasMaxRegions());
+ if (hasMaxRegions()) {
+ result = result && (getMaxRegions()
+ == other.getMaxRegions());
+ }
+ result = result && (hasMaxTables() == other.hasMaxTables());
+ if (hasMaxTables()) {
+ result = result && (getMaxTables()
+ == other.getMaxTables());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasName()) {
+ hash = (37 * hash) + NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getName().hashCode();
+ }
+ if (hasGroupName()) {
+ hash = (37 * hash) + GROUPNAME_FIELD_NUMBER;
+ hash = (53 * hash) + getGroupName().hashCode();
+ }
+ if (hasTables()) {
+ hash = (37 * hash) + TABLES_FIELD_NUMBER;
+ hash = (53 * hash) + getTables().hashCode();
+ }
+ if (hasMaxRegions()) {
+ hash = (37 * hash) + MAXREGIONS_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getMaxRegions());
+ }
+ if (hasMaxTables()) {
+ hash = (37 * hash) + MAXTABLES_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getMaxTables());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_NamespaceDescriptor_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_NamespaceDescriptor_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getTablesFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ name_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ groupName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000002);
+ if (tablesBuilder_ == null) {
+ tables_ = org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.getDefaultInstance();
+ } else {
+ tablesBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ maxRegions_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ maxTables_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000010);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor build() {
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.name_ = name_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.groupName_ = groupName_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ if (tablesBuilder_ == null) {
+ result.tables_ = tables_;
+ } else {
+ result.tables_ = tablesBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.maxRegions_ = maxRegions_;
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ result.maxTables_ = maxTables_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.getDefaultInstance()) return this;
+ if (other.hasName()) {
+ setName(other.getName());
+ }
+ if (other.hasGroupName()) {
+ setGroupName(other.getGroupName());
+ }
+ if (other.hasTables()) {
+ mergeTables(other.getTables());
+ }
+ if (other.hasMaxRegions()) {
+ setMaxRegions(other.getMaxRegions());
+ }
+ if (other.hasMaxTables()) {
+ setMaxTables(other.getMaxTables());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasName()) {
+
+ return false;
+ }
+ if (!hasTables()) {
+
+ return false;
+ }
+ if (!hasMaxRegions()) {
+
+ return false;
+ }
+ if (!hasMaxTables()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ name_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ groupName_ = input.readBytes();
+ break;
+ }
+ case 26: {
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.newBuilder();
+ if (hasTables()) {
+ subBuilder.mergeFrom(getTables());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setTables(subBuilder.buildPartial());
+ break;
+ }
+ case 32: {
+ bitField0_ |= 0x00000008;
+ maxRegions_ = input.readInt64();
+ break;
+ }
+ case 40: {
+ bitField0_ |= 0x00000010;
+ maxTables_ = input.readInt64();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required string name = 1;
+ private java.lang.Object name_ = "";
+ public boolean hasName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getName() {
+ java.lang.Object ref = name_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ name_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setName(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ name_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ name_ = getDefaultInstance().getName();
+ onChanged();
+ return this;
+ }
+ void setName(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000001;
+ name_ = value;
+ onChanged();
+ }
+
+ // optional string groupName = 2;
+ private java.lang.Object groupName_ = "";
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ groupName_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setGroupName(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ groupName_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearGroupName() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ groupName_ = getDefaultInstance().getGroupName();
+ onChanged();
+ return this;
+ }
+ void setGroupName(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000002;
+ groupName_ = value;
+ onChanged();
+ }
+
+ // required .TableList tables = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList tables_ = org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.Builder, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableListOrBuilder> tablesBuilder_;
+ public boolean hasTables() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList getTables() {
+ if (tablesBuilder_ == null) {
+ return tables_;
+ } else {
+ return tablesBuilder_.getMessage();
+ }
+ }
+ public Builder setTables(org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList value) {
+ if (tablesBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ tables_ = value;
+ onChanged();
+ } else {
+ tablesBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ public Builder setTables(
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.Builder builderForValue) {
+ if (tablesBuilder_ == null) {
+ tables_ = builderForValue.build();
+ onChanged();
+ } else {
+ tablesBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ public Builder mergeTables(org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList value) {
+ if (tablesBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004) &&
+ tables_ != org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.getDefaultInstance()) {
+ tables_ =
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.newBuilder(tables_).mergeFrom(value).buildPartial();
+ } else {
+ tables_ = value;
+ }
+ onChanged();
+ } else {
+ tablesBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ public Builder clearTables() {
+ if (tablesBuilder_ == null) {
+ tables_ = org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.getDefaultInstance();
+ onChanged();
+ } else {
+ tablesBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.Builder getTablesBuilder() {
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return getTablesFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableListOrBuilder getTablesOrBuilder() {
+ if (tablesBuilder_ != null) {
+ return tablesBuilder_.getMessageOrBuilder();
+ } else {
+ return tables_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.Builder, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableListOrBuilder>
+ getTablesFieldBuilder() {
+ if (tablesBuilder_ == null) {
+ tablesBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.Builder, org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableListOrBuilder>(
+ tables_,
+ getParentForChildren(),
+ isClean());
+ tables_ = null;
+ }
+ return tablesBuilder_;
+ }
+
+ // required int64 maxRegions = 4;
+ private long maxRegions_ ;
+ public boolean hasMaxRegions() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public long getMaxRegions() {
+ return maxRegions_;
+ }
+ public Builder setMaxRegions(long value) {
+ bitField0_ |= 0x00000008;
+ maxRegions_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMaxRegions() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ maxRegions_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // required int64 maxTables = 5;
+ private long maxTables_ ;
+ public boolean hasMaxTables() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ public long getMaxTables() {
+ return maxTables_;
+ }
+ public Builder setMaxTables(long value) {
+ bitField0_ |= 0x00000010;
+ maxTables_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMaxTables() {
+ bitField0_ = (bitField0_ & ~0x00000010);
+ maxTables_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:NamespaceDescriptor)
+ }
+
+ static {
+ defaultInstance = new NamespaceDescriptor(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:NamespaceDescriptor)
+ }
+
+ public interface TableListOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated string table = 1;
+ java.util.List getTableList();
+ int getTableCount();
+ String getTable(int index);
+ }
+ public static final class TableList extends
+ com.google.protobuf.GeneratedMessage
+ implements TableListOrBuilder {
+ // Use TableList.newBuilder() to construct.
+ private TableList(Builder builder) {
+ super(builder);
+ }
+ private TableList(boolean noInit) {}
+
+ private static final TableList defaultInstance;
+ public static TableList getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public TableList getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_TableList_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_TableList_fieldAccessorTable;
+ }
+
+ // repeated string table = 1;
+ public static final int TABLE_FIELD_NUMBER = 1;
+ private com.google.protobuf.LazyStringList table_;
+ public java.util.List
+ getTableList() {
+ return table_;
+ }
+ public int getTableCount() {
+ return table_.size();
+ }
+ public String getTable(int index) {
+ return table_.get(index);
+ }
+
+ private void initFields() {
+ table_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < table_.size(); i++) {
+ output.writeBytes(1, table_.getByteString(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ {
+ int dataSize = 0;
+ for (int i = 0; i < table_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(table_.getByteString(i));
+ }
+ size += dataSize;
+ size += 1 * getTableList().size();
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList other = (org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList) obj;
+
+ boolean result = true;
+ result = result && getTableList()
+ .equals(other.getTableList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getTableCount() > 0) {
+ hash = (37 * hash) + TABLE_FIELD_NUMBER;
+ hash = (53 * hash) + getTableList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableListOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_TableList_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.internal_static_TableList_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ table_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList build() {
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList result = new org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList(this);
+ int from_bitField0_ = bitField0_;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ table_ = new com.google.protobuf.UnmodifiableLazyStringList(
+ table_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.table_ = table_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.getDefaultInstance()) return this;
+ if (!other.table_.isEmpty()) {
+ if (table_.isEmpty()) {
+ table_ = other.table_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureTableIsMutable();
+ table_.addAll(other.table_);
+ }
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ ensureTableIsMutable();
+ table_.add(input.readBytes());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated string table = 1;
+ private com.google.protobuf.LazyStringList table_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ private void ensureTableIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ table_ = new com.google.protobuf.LazyStringArrayList(table_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+ public java.util.List
+ getTableList() {
+ return java.util.Collections.unmodifiableList(table_);
+ }
+ public int getTableCount() {
+ return table_.size();
+ }
+ public String getTable(int index) {
+ return table_.get(index);
+ }
+ public Builder setTable(
+ int index, String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureTableIsMutable();
+ table_.set(index, value);
+ onChanged();
+ return this;
+ }
+ public Builder addTable(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureTableIsMutable();
+ table_.add(value);
+ onChanged();
+ return this;
+ }
+ public Builder addAllTable(
+ java.lang.Iterable values) {
+ ensureTableIsMutable();
+ super.addAll(values, table_);
+ onChanged();
+ return this;
+ }
+ public Builder clearTable() {
+ table_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ return this;
+ }
+ void addTable(com.google.protobuf.ByteString value) {
+ ensureTableIsMutable();
+ table_.add(value);
+ onChanged();
+ }
+
+ // @@protoc_insertion_point(builder_scope:TableList)
+ }
+
+ static {
+ defaultInstance = new TableList(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:TableList)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_NamespaceDescriptorList_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_NamespaceDescriptorList_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_NamespaceDescriptor_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_NamespaceDescriptor_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_TableList_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_TableList_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n!src/main/protobuf/Namespace.proto\"L\n\027N" +
+ "amespaceDescriptorList\0221\n\023namespaceDescr" +
+ "iptor\030\001 \003(\0132\024.NamespaceDescriptor\"y\n\023Nam" +
+ "espaceDescriptor\022\014\n\004name\030\001 \002(\t\022\021\n\tgroupN" +
+ "ame\030\002 \001(\t\022\032\n\006tables\030\003 \002(\0132\n.TableList\022\022\n" +
+ "\nmaxRegions\030\004 \002(\003\022\021\n\tmaxTables\030\005 \002(\003\"\032\n\t" +
+ "TableList\022\r\n\005table\030\001 \003(\tBE\n*org.apache.h" +
+ "adoop.hbase.protobuf.generatedB\017Namespac" +
+ "eProtosH\001\210\001\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_NamespaceDescriptorList_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_NamespaceDescriptorList_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_NamespaceDescriptorList_descriptor,
+ new java.lang.String[] { "NamespaceDescriptor", },
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList.class,
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptorList.Builder.class);
+ internal_static_NamespaceDescriptor_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_NamespaceDescriptor_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_NamespaceDescriptor_descriptor,
+ new java.lang.String[] { "Name", "GroupName", "Tables", "MaxRegions", "MaxTables", },
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.class,
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.NamespaceDescriptor.Builder.class);
+ internal_static_TableList_descriptor =
+ getDescriptor().getMessageTypes().get(2);
+ internal_static_TableList_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_TableList_descriptor,
+ new java.lang.String[] { "Table", },
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.class,
+ org.apache.hadoop.hbase.protobuf.generated.NamespaceProtos.TableList.Builder.class);
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java b/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index 5141121..ab3811a 100644
--- a/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++ b/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -135,10 +135,12 @@ public class FSTableDescriptors implements TableDescriptors {
invocations++;
if (HTableDescriptor.ROOT_TABLEDESC.getNameAsString().equals(tablename)) {
cachehits++;
+ LOG.info("-->hitRoot");
return HTableDescriptor.ROOT_TABLEDESC;
}
if (HTableDescriptor.META_TABLEDESC.getNameAsString().equals(tablename)) {
cachehits++;
+ LOG.info("-->metaRoot");
return HTableDescriptor.META_TABLEDESC;
}
// .META. and -ROOT- is already handled. If some one tries to get the descriptor for
diff --git a/src/main/protobuf/Namespace.proto b/src/main/protobuf/Namespace.proto
new file mode 100644
index 0000000..c07d0ee
--- /dev/null
+++ b/src/main/protobuf/Namespace.proto
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This file contains protocol buffers that are used for group admin service.
+
+option java_package = "org.apache.hadoop.hbase.protobuf.generated";
+option java_outer_classname = "NamespaceProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+message NamespaceDescriptorList {
+ repeated NamespaceDescriptor namespaceDescriptor = 1;
+}
+
+message NamespaceDescriptor {
+ required string name = 1;
+ optional string groupName = 2;
+ required TableList tables = 3;
+ required int64 maxRegions = 4;
+ required int64 maxTables = 5;
+}
+
+message TableList {
+ repeated string table = 1;
+}
+
diff --git a/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
index fe3dfeb..0c03711 100644
--- a/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
+++ b/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
@@ -134,12 +134,12 @@ public class TestKeyValue extends TestCase {
public void testMoreComparisons() throws Exception {
// Root compares
long now = System.currentTimeMillis();
- KeyValue a = new KeyValue(Bytes.toBytes(".META.,,99999999999999"), now);
- KeyValue b = new KeyValue(Bytes.toBytes(".META.,,1"), now);
+ KeyValue a = new KeyValue(Bytes.toBytes("-hbase-..META.,,99999999999999"), now);
+ KeyValue b = new KeyValue(Bytes.toBytes("-hbase-..META.,,1"), now);
KVComparator c = new KeyValue.RootComparator();
assertTrue(c.compare(b, a) < 0);
- KeyValue aa = new KeyValue(Bytes.toBytes(".META.,,1"), now);
- KeyValue bb = new KeyValue(Bytes.toBytes(".META.,,1"),
+ KeyValue aa = new KeyValue(Bytes.toBytes("-hbase-.META.,,1"), now);
+ KeyValue bb = new KeyValue(Bytes.toBytes("-hbase-.META.,,1"),
Bytes.toBytes("info"), Bytes.toBytes("regioninfo"), 1235943454602L,
(byte[])null);
assertTrue(c.compare(aa, bb) < 0);
@@ -226,13 +226,13 @@ public class TestKeyValue extends TestCase {
private void metacomparisons(final KeyValue.MetaComparator c) {
long now = System.currentTimeMillis();
- assertTrue(c.compare(new KeyValue(Bytes.toBytes(".META.,a,,0,1"), now),
- new KeyValue(Bytes.toBytes(".META.,a,,0,1"), now)) == 0);
- KeyValue a = new KeyValue(Bytes.toBytes(".META.,a,,0,1"), now);
- KeyValue b = new KeyValue(Bytes.toBytes(".META.,a,,0,2"), now);
+ assertTrue(c.compare(new KeyValue(Bytes.toBytes("-hbase-..META.,a,,0,1"), now),
+ new KeyValue(Bytes.toBytes("-hbase-..META.,a,,0,1"), now)) == 0);
+ KeyValue a = new KeyValue(Bytes.toBytes("-hbase-..META.,a,,0,1"), now);
+ KeyValue b = new KeyValue(Bytes.toBytes("-hbase-..META.,a,,0,2"), now);
assertTrue(c.compare(a, b) < 0);
- assertTrue(c.compare(new KeyValue(Bytes.toBytes(".META.,a,,0,2"), now),
- new KeyValue(Bytes.toBytes(".META.,a,,0,1"), now)) > 0);
+ assertTrue(c.compare(new KeyValue(Bytes.toBytes("-hbase-..META.,a,,0,2"), now),
+ new KeyValue(Bytes.toBytes("-hbase-..META.,a,,0,1"), now)) > 0);
}
private void comparisons(final KeyValue.KVComparator c) {
@@ -284,12 +284,12 @@ public class TestKeyValue extends TestCase {
}
// Make up -ROOT- table keys.
KeyValue [] rootKeys = {
- new KeyValue(Bytes.toBytes(".META.,aaaaa,\u0000\u0000,0,2"), fam, qf, 2, nb),
- new KeyValue(Bytes.toBytes(".META.,aaaaa,\u0001,0,3"), fam, qf, 3, nb),
- new KeyValue(Bytes.toBytes(".META.,aaaaa,,0,1"), fam, qf, 1, nb),
- new KeyValue(Bytes.toBytes(".META.,aaaaa,\u1000,0,5"), fam, qf, 5, nb),
- new KeyValue(Bytes.toBytes(".META.,aaaaa,a,0,4"), fam, qf, 4, nb),
- new KeyValue(Bytes.toBytes(".META.,,0"), fam, qf, 0, nb),
+ new KeyValue(Bytes.toBytes("-hbase-..META.,aaaaa,\u0000\u0000,0,2"), fam, qf, 2, nb),
+ new KeyValue(Bytes.toBytes("-hbase-..META.,aaaaa,\u0001,0,3"), fam, qf, 3, nb),
+ new KeyValue(Bytes.toBytes("-hbase-..META.,aaaaa,,0,1"), fam, qf, 1, nb),
+ new KeyValue(Bytes.toBytes("-hbase-..META.,aaaaa,\u1000,0,5"), fam, qf, 5, nb),
+ new KeyValue(Bytes.toBytes("-hbase-..META.,aaaaa,a,0,4"), fam, qf, 4, nb),
+ new KeyValue(Bytes.toBytes("-hbase-..META.,,0"), fam, qf, 0, nb),
};
// This will output the keys incorrectly.
set = new TreeSet(new KeyValue.MetaComparator());
diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java b/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java
index 083824f..a34122e 100644
--- a/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java
+++ b/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java
@@ -499,7 +499,7 @@ public class TestMemStore extends TestCase {
for (int k = start; k <= end; k++) {
byte [] kk = Bytes.toBytes(k);
byte [] row =
- Bytes.toBytes(".META.,table," + Bytes.toString(kk) + ",1," + k);
+ Bytes.toBytes("-hbase-..META.,table," + Bytes.toString(kk) + ",1," + k);
KeyValue key = new KeyValue(row, CONTENTS, BASIC,
System.currentTimeMillis(),
(CONTENTSTR + k).getBytes(HConstants.UTF8_ENCODING));
@@ -516,7 +516,7 @@ public class TestMemStore extends TestCase {
System.out.println(kv);
byte [] b = kv.getRow();
// Hardcoded offsets into String
- String str = Bytes.toString(b, 13, 4);
+ String str = Bytes.toString(b, 21, 4);
byte [] bb = Bytes.toBytes(index);
String bbStr = Bytes.toString(bb);
assertEquals(str, bbStr);