diff --git a/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java index 5052878..4c31a1e 100644 --- a/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java +++ b/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java @@ -34,6 +34,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.DeserializationException; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -344,9 +345,14 @@ public class MasterFileSystem { } // as above FSUtils.checkVersion(fs, rd, true, c.getInt(HConstants.THREAD_WAKE_FREQUENCY, - 10 * 1000), c.getInt(HConstants.VERSION_FILE_WRITE_ATTEMPTS, - HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS)); + 10 * 1000), c.getInt(HConstants.VERSION_FILE_WRITE_ATTEMPTS, + HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS)); } + } catch (DeserializationException de) { + LOG.fatal("Please fix invalid configuration for " + HConstants.HBASE_DIR, de); + IOException ioe = new IOException(); + ioe.initCause(de); + throw ioe; } catch (IllegalArgumentException iae) { LOG.fatal("Please fix invalid configuration for " + HConstants.HBASE_DIR + " " + rd.toString(), iae); diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java new file mode 100644 index 0000000..9d75e75 --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java @@ -0,0 +1,468 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: FS.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class FSProtos { + private FSProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface HBaseVersionFileContentOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string version = 1; + boolean hasVersion(); + String getVersion(); + } + public static final class HBaseVersionFileContent extends + com.google.protobuf.GeneratedMessage + implements HBaseVersionFileContentOrBuilder { + // Use HBaseVersionFileContent.newBuilder() to construct. + private HBaseVersionFileContent(Builder builder) { + super(builder); + } + private HBaseVersionFileContent(boolean noInit) {} + + private static final HBaseVersionFileContent defaultInstance; + public static HBaseVersionFileContent getDefaultInstance() { + return defaultInstance; + } + + public HBaseVersionFileContent getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable; + } + + private int bitField0_; + // required string version = 1; + public static final int VERSION_FIELD_NUMBER = 1; + private java.lang.Object version_; + public boolean hasVersion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getVersion() { + java.lang.Object ref = version_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + version_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getVersionBytes() { + java.lang.Object ref = version_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + version_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + version_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasVersion()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getVersionBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getVersionBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent other = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent) obj; + + boolean result = true; + result = result && (hasVersion() == other.hasVersion()); + if (hasVersion()) { + result = result && getVersion() + .equals(other.getVersion()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasVersion()) { + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + version_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent build() { + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = new org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.version_ = version_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance()) return this; + if (other.hasVersion()) { + setVersion(other.getVersion()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasVersion()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + version_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string version = 1; + private java.lang.Object version_ = ""; + public boolean hasVersion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getVersion() { + java.lang.Object ref = version_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + version_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setVersion(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + version_ = value; + onChanged(); + return this; + } + public Builder clearVersion() { + bitField0_ = (bitField0_ & ~0x00000001); + version_ = getDefaultInstance().getVersion(); + onChanged(); + return this; + } + void setVersion(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + version_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:HBaseVersionFileContent) + } + + static { + defaultInstance = new HBaseVersionFileContent(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:HBaseVersionFileContent) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_HBaseVersionFileContent_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_HBaseVersionFileContent_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\010FS.proto\"*\n\027HBaseVersionFileContent\022\017\n" + + "\007version\030\001 \002(\tB;\n*org.apache.hadoop.hbas" + + "e.protobuf.generatedB\010FSProtosH\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_HBaseVersionFileContent_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_HBaseVersionFileContent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_HBaseVersionFileContent_descriptor, + new java.lang.String[] { "Version", }, + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index 3d35d3e..1c66cf5 100644 --- a/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -19,10 +19,12 @@ */ package org.apache.hadoop.hbase.util; +import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.EOFException; import java.io.FileNotFoundException; import java.io.IOException; +import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; @@ -43,17 +45,23 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hbase.DeserializationException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.master.HMaster; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.FSProtos; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hdfs.DistributedFileSystem; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; +import com.google.protobuf.InvalidProtocolBufferException; + /** * Utility methods for interacting with the underlying file system. */ @@ -252,26 +260,75 @@ public abstract class FSUtils { * @param rootdir root hbase directory * @return null if no version file exists, version string otherwise. * @throws IOException e + * @throws DeserializationException */ public static String getVersion(FileSystem fs, Path rootdir) - throws IOException { + throws IOException, DeserializationException { Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME); + FileStatus [] status = fs.listStatus(versionFile); + if (status == null || status.length == 0) return null; String version = null; - if (fs.exists(versionFile)) { - FSDataInputStream s = - fs.open(versionFile); - try { - version = DataInputStream.readUTF(s); - } catch (EOFException eof) { - LOG.warn("Version file was empty, odd, will try to set it."); - } finally { - s.close(); + byte [] content = new byte [(int)status[0].getLen()]; + FSDataInputStream s = fs.open(versionFile); + try { + IOUtils.readFully(s, content, 0, content.length); + if (ProtobufUtil.isPBMagicPrefix(content)) { + version = parseVersionFrom(content); + } else { + // Presume it pre-pb format. + InputStream is = new ByteArrayInputStream(content); + DataInputStream dis = new DataInputStream(is); + try { + version = dis.readUTF(); + } finally { + dis.close(); + } + // Update the format + LOG.info("Updating the hbase.version file format with version=" + version); + setVersion(fs, rootdir, version, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS); } + } catch (EOFException eof) { + LOG.warn("Version file was empty, odd, will try to set it."); + } finally { + s.close(); } return version; } /** + * Parse the content of the ${HBASE_ROOTDIR}/hbase.version file. + * @param bytes The byte content of the hbase.version file. + * @return The version found in the file as a String. + * @throws DeserializationException + */ + static String parseVersionFrom(final byte [] bytes) + throws DeserializationException { + ProtobufUtil.expectPBMagicPrefix(bytes); + int pblen = ProtobufUtil.lengthOfPBMagic(); + FSProtos.HBaseVersionFileContent.Builder builder = + FSProtos.HBaseVersionFileContent.newBuilder(); + FSProtos.HBaseVersionFileContent fileContent; + try { + fileContent = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build(); + return fileContent.getVersion(); + } catch (InvalidProtocolBufferException e) { + // Convert + throw new DeserializationException(e); + } + } + + /** + * Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file. + * @param version Version to persist + * @return Serialized protobuf with version content and a bit of pb magic for a prefix. + */ + static byte [] toVersionByteArray(final String version) { + FSProtos.HBaseVersionFileContent.Builder builder = + FSProtos.HBaseVersionFileContent.newBuilder(); + return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray()); + } + + /** * Verifies current version of file system * * @param fs file system @@ -279,11 +336,11 @@ public abstract class FSUtils { * @param message if true, issues a message on System.out * * @throws IOException e + * @throws DeserializationException */ - public static void checkVersion(FileSystem fs, Path rootdir, - boolean message) throws IOException { - checkVersion(fs, rootdir, message, 0, - HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS); + public static void checkVersion(FileSystem fs, Path rootdir, boolean message) + throws IOException, DeserializationException { + checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS); } /** @@ -296,20 +353,20 @@ public abstract class FSUtils { * @param retries number of times to retry * * @throws IOException e + * @throws DeserializationException */ public static void checkVersion(FileSystem fs, Path rootdir, - boolean message, int wait, int retries) throws IOException { + boolean message, int wait, int retries) + throws IOException, DeserializationException { String version = getVersion(fs, rootdir); - if (version == null) { if (!rootRegionExists(fs, rootdir)) { // rootDir is empty (no version file and no root region) // just create new version file (HBASE-1195) - FSUtils.setVersion(fs, rootdir, wait, retries); + setVersion(fs, rootdir, wait, retries); return; } - } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) - return; + } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return; // version is deprecated require migration // Output on stdout so user sees it in terminal. @@ -332,8 +389,8 @@ public abstract class FSUtils { */ public static void setVersion(FileSystem fs, Path rootdir) throws IOException { - setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0, - HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS); + setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0, + HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS); } /** @@ -367,19 +424,17 @@ public abstract class FSUtils { while (true) { try { FSDataOutputStream s = fs.create(versionFile); - s.writeUTF(version); - LOG.debug("Created version file at " + rootdir.toString() + - " set its version at:" + version); + s.write(toVersionByteArray(version)); s.close(); + LOG.debug("Created version file at " + rootdir.toString() + " with version=" + version); return; } catch (IOException e) { if (retries > 0) { - LOG.warn("Unable to create version file at " + rootdir.toString() + - ", retrying: " + e.getMessage()); + LOG.warn("Unable to create version file at " + rootdir.toString() + ", retrying", e); fs.delete(versionFile, false); try { if (wait > 0) { - Thread.sleep(wait); + Thread.sleep(wait); } } catch (InterruptedException ex) { // ignore diff --git a/src/main/protobuf/FS.proto b/src/main/protobuf/FS.proto new file mode 100644 index 0000000..eac2fa3 --- /dev/null +++ b/src/main/protobuf/FS.proto @@ -0,0 +1,31 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are written into the filesystem + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "FSProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +/** + * The ${HBASE_ROOTDIR}/hbase.version file content + */ +message HBaseVersionFileContent { + required string version = 1; +} diff --git a/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java b/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java index 339a120..b732f5a 100644 --- a/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java +++ b/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java @@ -21,9 +21,12 @@ package org.apache.hadoop.hbase.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.File; +import java.io.IOException; import java.util.UUID; import org.apache.hadoop.conf.Configuration; @@ -32,6 +35,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hbase.DeserializationException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -46,6 +50,30 @@ import org.junit.experimental.categories.Category; */ @Category(MediumTests.class) public class TestFSUtils { + @Test + public void testVersion() throws DeserializationException, IOException { + HBaseTestingUtility htu = new HBaseTestingUtility(); + final FileSystem fs = htu.getTestFileSystem(); + final Path rootdir = htu.getDataTestDir(); + assertNull(FSUtils.getVersion(fs, rootdir)); + // Write out old format version file. See if we can read it in and convert. + Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME); + FSDataOutputStream s = fs.create(versionFile); + final String version = HConstants.FILE_SYSTEM_VERSION; + s.writeUTF(version); + s.close(); + assertTrue(fs.exists(versionFile)); + FileStatus [] status = fs.listStatus(versionFile); + assertNotNull(status); + assertTrue(status.length > 0); + String newVersion = FSUtils.getVersion(fs, rootdir); + assertEquals(version.length(), newVersion.length()); + assertEquals(version, newVersion); + // File will have been converted. Exercise the pb format + assertEquals(version, FSUtils.getVersion(fs, rootdir)); + FSUtils.checkVersion(fs, rootdir, true); + } + @Test public void testIsHDFS() throws Exception { HBaseTestingUtility htu = new HBaseTestingUtility(); htu.getConfiguration().setBoolean("dfs.support.append", false);