Index: src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java =================================================================== --- src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java (revision 943752) +++ src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java (working copy) @@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -212,6 +213,34 @@ } } + /** + * Test that closing a scanner while a client is using it doesn't throw + * NPEs but instead a UnknownScannerException. HBASE-2503 + * @throws Exception + */ + public void testRaceBetweenClientAndTimeout() throws Exception { + try { + this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null); + addContent(this.r, HConstants.CATALOG_FAMILY); + Scan scan = new Scan(); + InternalScanner s = r.getScanner(scan); + List results = new ArrayList(); + try { + s.next(results); + s.close(); + s.next(results); + fail("We don't want anything more, we should be failing"); + } catch (UnknownScannerException ex) { + // ok! + return; + } + } finally { + this.r.close(); + this.r.getLog().closeAndDelete(); + shutdownDfs(this.cluster); + } + } + /** The test! * @throws IOException */ Index: src/java/org/apache/hadoop/hbase/regionserver/HRegion.java =================================================================== --- src/java/org/apache/hadoop/hbase/regionserver/HRegion.java (revision 943752) +++ src/java/org/apache/hadoop/hbase/regionserver/HRegion.java (working copy) @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.NotServingRegionException; + import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; @@ -1818,11 +1819,13 @@ private Scan theScan = null; private int isScan; private List extraScanners = null; + private boolean filterClosed = false; RegionScanner(Scan scan, List additionalScanners) { //DebugPrint.println("HRegionScanner."); this.filter = scan.getFilter(); + // Doesn't need to be volatile, always accessed under a sync'ed method this.oldFilter = scan.getOldFilter(); if (Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW)) { this.stopRow = null; @@ -1869,7 +1872,13 @@ ReadWriteConsistencyControl.resetThreadReadPoint(rwcc); } - public boolean next(List outResults) throws IOException { + public synchronized boolean next(List outResults) + throws IOException { + if (this.filterClosed) { + throw new UnknownScannerException("Scanner was closed (timed out?) " + + "after we renewed it. Could be caused by a very slow scanner " + + "or a lengthy garbage collection"); + } if (closing.get() || closed.get()) { close(); throw new NotServingRegionException(regionInfo.getRegionNameAsString() + @@ -1899,7 +1908,7 @@ /* * @return True if a filter rules the scanner is over, done. */ - boolean isFilterDone() { + synchronized boolean isFilterDone() { return (this.filter != null && this.filter.filterAllRemaining()) || (this.oldFilter != null && oldFilter.filterAllRemaining()); @@ -1974,10 +1983,11 @@ (oldFilter != null && this.oldFilter.filterRowKey(row, 0, row.length)); } - public void close() { + public synchronized void close() { if (storeHeap != null) { storeHeap.close(); storeHeap = null; + this.filterClosed = true; } } @@ -1985,7 +1995,7 @@ * * @param scanner to be closed */ - public void close(KeyValueScanner scanner) { + public synchronized void close(KeyValueScanner scanner) { try { scanner.close(); } catch(NullPointerException npe) {}