diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala index e50a3e8..5b6b5c5 100644 --- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala +++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala @@ -673,12 +673,12 @@ class HBaseContext(@transient val sc: SparkContext, //Here is where we finally iterate through the data in this partition of the //RDD that has been sorted and partitioned it.foreach{ case (keyFamilyQualifier, cellValue:Array[Byte]) => - + val timestamp = if(keyFamilyQualifier.timestamp > 0) keyFamilyQualifier.timestamp else nowTimeStamp val wl = writeValueToHFile(keyFamilyQualifier.rowKey, keyFamilyQualifier.family, keyFamilyQualifier.qualifier, cellValue, - nowTimeStamp, + timestamp, fs, conn, localTableName,