Index: lucene/CHANGES.txt
===================================================================
--- lucene/CHANGES.txt	(revision 1552546)
+++ lucene/CHANGES.txt	(working copy)
@@ -104,6 +104,9 @@
 * LUCENE-5285: Improved highlighting of multi-valued fields with
   FastVectorHighlighter. (Nik Everett via Adrien Grand)
 
+* LUCENE-5373: Memory usage of [Lucene42/Memory/Direct]DocValuesFormat was
+  over-estimated. (Shay Banon, Adrien Grand)
+
 Changes in Runtime Behavior
 
 * LUCENE-5362: IndexReader and SegmentCoreReaders now throw 
Index: lucene/codecs/src/java/org/apache/lucene/codecs/memory/DirectDocValuesProducer.java
===================================================================
--- lucene/codecs/src/java/org/apache/lucene/codecs/memory/DirectDocValuesProducer.java	(revision 1552546)
+++ lucene/codecs/src/java/org/apache/lucene/codecs/memory/DirectDocValuesProducer.java	(working copy)
@@ -62,6 +62,7 @@
   private final Map<Integer,Bits> docsWithFieldInstances = new HashMap<Integer,Bits>();
   
   private final int maxDoc;
+  private volatile long ramBytesUsed;
   
   static final byte NUMBER = 0;
   static final byte BYTES = 1;
@@ -76,6 +77,7 @@
     String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
     // read in the entries from the metadata file.
     IndexInput in = state.directory.openInput(metaName, state.context);
+    ramBytesUsed = RamUsageEstimator.shallowSizeOfInstance(getClass());
     boolean success = false;
     final int version;
     try {
@@ -178,8 +180,7 @@
 
   @Override
   public long ramBytesUsed() {
-    // TODO: optimize me
-    return RamUsageEstimator.sizeOf(this);
+    return ramBytesUsed;
   }
   
   @Override
@@ -199,9 +200,8 @@
     case 1:
       {
         final byte[] values = new byte[entry.count];
-        for(int i=0;i<entry.count;i++) {
-          values[i] = data.readByte();
-        }
+        data.readBytes(values, 0, entry.count);
+        ramBytesUsed += RamUsageEstimator.sizeOf(values);
         return new NumericDocValues() {
           @Override
           public long get(int idx) {
@@ -216,6 +216,7 @@
         for(int i=0;i<entry.count;i++) {
           values[i] = data.readShort();
         }
+        ramBytesUsed += RamUsageEstimator.sizeOf(values);
         return new NumericDocValues() {
           @Override
           public long get(int idx) {
@@ -230,6 +231,7 @@
         for(int i=0;i<entry.count;i++) {
           values[i] = data.readInt();
         }
+        ramBytesUsed += RamUsageEstimator.sizeOf(values);
         return new NumericDocValues() {
           @Override
           public long get(int idx) {
@@ -244,6 +246,7 @@
         for(int i=0;i<entry.count;i++) {
           values[i] = data.readLong();
         }
+        ramBytesUsed += RamUsageEstimator.sizeOf(values);
         return new NumericDocValues() {
           @Override
           public long get(int idx) {
@@ -280,6 +283,8 @@
     }
     address[entry.count] = data.readInt();
 
+    ramBytesUsed += RamUsageEstimator.sizeOf(bytes) + RamUsageEstimator.sizeOf(address);
+
     return new BinaryDocValues() {
       @Override
       public void get(int docID, BytesRef result) {
Index: lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java
===================================================================
--- lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java	(revision 1552546)
+++ lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java	(working copy)
@@ -75,8 +75,8 @@
   private final Map<Integer,Bits> docsWithFieldInstances = new HashMap<Integer,Bits>();
   
   private final int maxDoc;
+  private volatile long ramBytesUsed;
   
-  
   static final byte NUMBER = 0;
   static final byte BYTES = 1;
   static final byte FST = 2;
@@ -107,7 +107,7 @@
       binaries = new HashMap<Integer,BinaryEntry>();
       fsts = new HashMap<Integer,FSTEntry>();
       readFields(in, state.fieldInfos);
-
+      ramBytesUsed = RamUsageEstimator.shallowSizeOfInstance(getClass());
       success = true;
     } finally {
       if (success) {
@@ -204,8 +204,7 @@
   
   @Override
   public long ramBytesUsed() {
-    // TODO: optimize me
-    return RamUsageEstimator.sizeOf(this);
+    return ramBytesUsed;
   }
   
   private NumericDocValues loadNumeric(FieldInfo field) throws IOException {
@@ -224,6 +223,7 @@
         final int formatID = data.readVInt();
         final int bitsPerValue = data.readVInt();
         final PackedInts.Reader ordsReader = PackedInts.getReaderNoHeader(data, PackedInts.Format.byId(formatID), entry.packedIntsVersion, maxDoc, bitsPerValue);
+        ramBytesUsed += RamUsageEstimator.sizeOf(decode) + ordsReader.ramBytesUsed();
         return new NumericDocValues() {
           @Override
           public long get(int docID) {
@@ -233,10 +233,12 @@
       case DELTA_COMPRESSED:
         final int blockSize = data.readVInt();
         final BlockPackedReader reader = new BlockPackedReader(data, entry.packedIntsVersion, blockSize, maxDoc, false);
+        ramBytesUsed += reader.ramBytesUsed();
         return reader;
       case UNCOMPRESSED:
         final byte bytes[] = new byte[maxDoc];
         data.readBytes(bytes, 0, bytes.length);
+        ramBytesUsed += RamUsageEstimator.sizeOf(bytes);
         return new NumericDocValues() {
           @Override
           public long get(int docID) {
@@ -248,6 +250,7 @@
         final long mult = data.readLong();
         final int quotientBlockSize = data.readVInt();
         final BlockPackedReader quotientReader = new BlockPackedReader(data, entry.packedIntsVersion, quotientBlockSize, maxDoc, false);
+        ramBytesUsed += quotientReader.ramBytesUsed();
         return new NumericDocValues() {
           @Override
           public long get(int docID) {
@@ -277,6 +280,7 @@
     final PagedBytes.Reader bytesReader = bytes.freeze(true);
     if (entry.minLength == entry.maxLength) {
       final int fixedLength = entry.minLength;
+      ramBytesUsed += bytes.ramBytesUsed();
       return new BinaryDocValues() {
         @Override
         public void get(int docID, BytesRef result) {
@@ -286,6 +290,7 @@
     } else {
       data.seek(data.getFilePointer() + entry.missingBytes);
       final MonotonicBlockPackedReader addresses = new MonotonicBlockPackedReader(data, entry.packedIntsVersion, entry.blockSize, maxDoc, false);
+      ramBytesUsed += bytes.ramBytesUsed() + addresses.ramBytesUsed();
       return new BinaryDocValues() {
         @Override
         public void get(int docID, BytesRef result) {
@@ -309,6 +314,7 @@
       if (instance == null) {
         data.seek(entry.offset);
         instance = new FST<Long>(data, PositiveIntOutputs.getSingleton());
+        ramBytesUsed += instance.sizeInBytes();
         fstInstances.put(field.number, instance);
       }
     }
@@ -383,6 +389,7 @@
       if (instance == null) {
         data.seek(entry.offset);
         instance = new FST<Long>(data, PositiveIntOutputs.getSingleton());
+        ramBytesUsed += instance.sizeInBytes();
         fstInstances.put(field.number, instance);
       }
     }
Index: lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java	(revision 1552546)
+++ lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java	(working copy)
@@ -73,8 +73,10 @@
       new HashMap<Integer,FST<Long>>();
   
   private final int maxDoc;
+  // volatile to make sure ramBytesUsed() always sees the latest version
+  // but increments must always be performed under a lock
+  private volatile long ramBytesUsed;
   
-  
   static final byte NUMBER = 0;
   static final byte BYTES = 1;
   static final byte FST = 2;
@@ -96,6 +98,7 @@
     // read in the entries from the metadata file.
     IndexInput in = state.directory.openInput(metaName, state.context);
     boolean success = false;
+    ramBytesUsed = RamUsageEstimator.shallowSizeOfInstance(getClass());
     final int version;
     try {
       version = CodecUtil.checkHeader(in, metaCodec, 
@@ -190,7 +193,7 @@
   
   @Override
   public long ramBytesUsed() {
-    return RamUsageEstimator.sizeOf(this);
+    return ramBytesUsed;
   }
   
   private NumericDocValues loadNumeric(FieldInfo field) throws IOException {
@@ -209,6 +212,7 @@
         final int formatID = data.readVInt();
         final int bitsPerValue = data.readVInt();
         final PackedInts.Reader ordsReader = PackedInts.getReaderNoHeader(data, PackedInts.Format.byId(formatID), entry.packedIntsVersion, maxDoc, bitsPerValue);
+        ramBytesUsed += RamUsageEstimator.sizeOf(decode) + ordsReader.ramBytesUsed();
         return new NumericDocValues() {
           @Override
           public long get(int docID) {
@@ -218,15 +222,12 @@
       case DELTA_COMPRESSED:
         final int blockSize = data.readVInt();
         final BlockPackedReader reader = new BlockPackedReader(data, entry.packedIntsVersion, blockSize, maxDoc, false);
-        return new NumericDocValues() {
-          @Override
-          public long get(int docID) {
-            return reader.get(docID);
-          }
-        };
+        ramBytesUsed += reader.ramBytesUsed();
+        return reader;
       case UNCOMPRESSED:
         final byte bytes[] = new byte[maxDoc];
         data.readBytes(bytes, 0, bytes.length);
+        ramBytesUsed += RamUsageEstimator.sizeOf(bytes);
         return new NumericDocValues() {
           @Override
           public long get(int docID) {
@@ -238,6 +239,7 @@
         final long mult = data.readLong();
         final int quotientBlockSize = data.readVInt();
         final BlockPackedReader quotientReader = new BlockPackedReader(data, entry.packedIntsVersion, quotientBlockSize, maxDoc, false);
+        ramBytesUsed += quotientReader.ramBytesUsed();
         return new NumericDocValues() {
           @Override
           public long get(int docID) {
@@ -267,6 +269,7 @@
     final PagedBytes.Reader bytesReader = bytes.freeze(true);
     if (entry.minLength == entry.maxLength) {
       final int fixedLength = entry.minLength;
+      ramBytesUsed += bytes.ramBytesUsed();
       return new BinaryDocValues() {
         @Override
         public void get(int docID, BytesRef result) {
@@ -275,6 +278,7 @@
       };
     } else {
       final MonotonicBlockPackedReader addresses = new MonotonicBlockPackedReader(data, entry.packedIntsVersion, entry.blockSize, maxDoc, false);
+      ramBytesUsed += bytes.ramBytesUsed() + addresses.ramBytesUsed();
       return new BinaryDocValues() {
         @Override
         public void get(int docID, BytesRef result) {
@@ -295,6 +299,7 @@
       if (instance == null) {
         data.seek(entry.offset);
         instance = new FST<Long>(data, PositiveIntOutputs.getSingleton());
+        ramBytesUsed += instance.sizeInBytes();
         fstInstances.put(field.number, instance);
       }
     }
@@ -369,6 +374,7 @@
       if (instance == null) {
         data.seek(entry.offset);
         instance = new FST<Long>(data, PositiveIntOutputs.getSingleton());
+        ramBytesUsed += instance.sizeInBytes();
         fstInstances.put(field.number, instance);
       }
     }
