diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index 285f69b..6deab6a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -91,6 +91,9 @@ public class ProtobufLogReader extends ReaderBase { static { writerClsNames.add(ProtobufLogWriter.class.getSimpleName()); } + + // cell codec classname + private String codecClsName = null; enum WALHdrResult { EOF, // stream is at EOF when method starts @@ -153,9 +156,16 @@ public class ProtobufLogReader extends ReaderBase { /* * Returns names of the accepted writer classes */ - protected List getWriterClsNames() { + public List getWriterClsNames() { return writerClsNames; } + + /* + * Returns the cell codec classname + */ + public String getCodecClsName() { + return codecClsName; + } protected WALHdrContext readHeader(Builder builder, FSDataInputStream stream) throws IOException { @@ -207,6 +217,9 @@ public class ProtobufLogReader extends ReaderBase { LOG.trace("After reading the trailer: walEditsStopOffset: " + this.walEditsStopOffset + ", fileLength: " + this.fileLength + ", " + "trailerPresent: " + trailerPresent); } + + codecClsName = hdrCtxt.getCellCodecClsName(); + return hdrCtxt.getCellCodecClsName(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java index d175741..0d052d4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java @@ -51,7 +51,7 @@ public class SecureProtobufLogReader extends ProtobufLogReader { } @Override - protected List getWriterClsNames() { + public List getWriterClsNames() { return writerClsNames; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java index f0d1e67..1f3f43b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.codehaus.jackson.map.ObjectMapper; +import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; // imports for things that haven't moved yet. import org.apache.hadoop.hbase.regionserver.wal.WALEdit; @@ -242,11 +243,33 @@ public class WALPrettyPrinter { if (!fs.isFile(p)) { throw new IOException(p + " is not a file"); } - if (outputJSON && !persistentOutput) { - out.print("["); - firstTxn = true; - } + WAL.Reader log = WALFactory.createReader(fs, p, conf); + + if (log instanceof ProtobufLogReader) { + List writerClsNames = ((ProtobufLogReader) log).getWriterClsNames(); + if (writerClsNames != null && writerClsNames.size() > 0) { + out.print("Writer Classes: "); + for (int i = 0; i < writerClsNames.size(); i++) { + out.print(writerClsNames.get(i)); + if (i != writerClsNames.size() - 1) { + out.print(" "); + } + } + out.println(); + } + + String cellCodecClsName = ((ProtobufLogReader) log).getCodecClsName(); + if (cellCodecClsName != null) { + out.println("Cell Codec Class: " + cellCodecClsName); + } + } + + if (outputJSON && !persistentOutput) { + out.print("["); + firstTxn = true; + } + try { WAL.Entry entry; while ((entry = log.next()) != null) {