Index: core/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java =================================================================== --- core/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java (revision 941527) +++ core/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java (working copy) @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -195,6 +196,34 @@ } } + /** + * Test that closing a scanner while a client is using it doesn't throw + * NPEs but instead a UnknownScannerException. HBASE-2503 + * @throws Exception + */ + public void testRaceBetweenClientAndTimeout() throws Exception { + try { + this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null); + addContent(this.r, HConstants.CATALOG_FAMILY); + Scan scan = new Scan(); + InternalScanner s = r.getScanner(scan); + List results = new ArrayList(); + try { + s.next(results); + s.close(); + s.next(results); + fail("We don't want anything more, we should be failing"); + } catch (UnknownScannerException ex) { + // ok! + return; + } + } finally { + this.r.close(); + this.r.getLog().closeAndDelete(); + shutdownDfs(this.cluster); + } + } + /** The test! * @throws IOException */ Index: core/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java =================================================================== --- core/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (revision 941527) +++ core/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (working copy) @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.NotServingRegionException; + import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; @@ -1824,6 +1825,8 @@ private Filter filter; private List results = new ArrayList(); private int batch; + // Doesn't need to be volatile, always accessed under a sync'ed method + private boolean filterClosed = false; RegionScanner(Scan scan, List additionalScanners) { this.filter = scan.getFilter(); @@ -1858,7 +1861,13 @@ } } - public boolean next(List outResults, int limit) throws IOException { + public synchronized boolean next(List outResults, int limit) + throws IOException { + if (this.filterClosed) { + throw new UnknownScannerException("Scanner was closed (timed out?) " + + "after we renewed it. Could be caused by a very slow scanner " + + "or a lengthy garbage collection"); + } if (closing.get() || closed.get()) { close(); throw new NotServingRegionException(regionInfo.getRegionNameAsString() + @@ -1877,7 +1886,8 @@ return returnResult; } - public boolean next(List outResults) throws IOException { + public synchronized boolean next(List outResults) + throws IOException { // apply the batching limit by default return next(outResults, batch); } @@ -1885,7 +1895,7 @@ /* * @return True if a filter rules the scanner is over, done. */ - boolean isFilterDone() { + synchronized boolean isFilterDone() { return this.filter != null && this.filter.filterAllRemaining(); } @@ -1955,24 +1965,15 @@ return true; } - public void close() { + public synchronized void close() { storeHeap.close(); + this.filterClosed = true; } - - /** - * - * @param scanner to be closed - */ - public void close(KeyValueScanner scanner) { - try { - scanner.close(); - } catch(NullPointerException npe) {} - } /** * @return the current storeHeap */ - public KeyValueHeap getStoreHeap() { + public synchronized KeyValueHeap getStoreHeap() { return this.storeHeap; } }