Index: tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/search/TestSort.java =================================================================== --- tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/search/TestSort.java (revision 821733) +++ tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/search/TestSort.java (working copy) @@ -35,6 +35,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermRef; import org.apache.lucene.queryParser.ParseException; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.store.LockObtainFailedException; @@ -333,54 +334,78 @@ sort.setSort (new SortField[] { new SortField ("parser", new FieldCache.IntParser(){ - public final int parseInt(final String val) { - return (val.charAt(0)-'A') * 123456; + public final int parseInt(final String term) { + // dummy + return 0; } + public final int parseInt(final TermRef term) { + return (term.bytes[term.offset]-'A') * 123456; + } }), SortField.FIELD_DOC }); assertMatches (full, queryA, sort, "JIHGFEDCBA"); assertSaneFieldCaches(getName() + " IntParser"); fc.purgeAllCaches(); sort.setSort (new SortField[] { new SortField ("parser", new FieldCache.FloatParser(){ - public final float parseFloat(final String val) { - return (float) Math.sqrt( val.charAt(0) ); + public final float parseFloat(final String term) { + // dummy + return 0; } + public final float parseFloat(final TermRef term) { + return (float) Math.sqrt( term.bytes[term.offset] ); + } }), SortField.FIELD_DOC }); assertMatches (full, queryA, sort, "JIHGFEDCBA"); assertSaneFieldCaches(getName() + " FloatParser"); fc.purgeAllCaches(); sort.setSort (new SortField[] { new SortField ("parser", new FieldCache.LongParser(){ - public final long parseLong(final String val) { - return (val.charAt(0)-'A') * 1234567890L; + public final long parseLong(final String term) { + // dummy + return 0; } + public final long parseLong(final TermRef term) { + return (term.bytes[term.offset]-'A') * 1234567890L; + } }), SortField.FIELD_DOC }); assertMatches (full, queryA, sort, "JIHGFEDCBA"); assertSaneFieldCaches(getName() + " LongParser"); fc.purgeAllCaches(); sort.setSort (new SortField[] { new SortField ("parser", new FieldCache.DoubleParser(){ - public final double parseDouble(final String val) { - return Math.pow( val.charAt(0), (val.charAt(0)-'A') ); + public final double parseDouble(final String term) { + // dummy + return 0; } + public final double parseDouble(final TermRef term) { + return Math.pow( term.bytes[term.offset], (term.bytes[term.offset]-'A') ); + } }), SortField.FIELD_DOC }); assertMatches (full, queryA, sort, "JIHGFEDCBA"); assertSaneFieldCaches(getName() + " DoubleParser"); fc.purgeAllCaches(); sort.setSort (new SortField[] { new SortField ("parser", new FieldCache.ByteParser(){ - public final byte parseByte(final String val) { - return (byte) (val.charAt(0)-'A'); + public final byte parseByte(final String term) { + // dummy + return 0; } + public final byte parseByte(final TermRef term) { + return (byte) (term.bytes[term.offset]-'A'); + } }), SortField.FIELD_DOC }); assertMatches (full, queryA, sort, "JIHGFEDCBA"); assertSaneFieldCaches(getName() + " ByteParser"); fc.purgeAllCaches(); sort.setSort (new SortField[] { new SortField ("parser", new FieldCache.ShortParser(){ - public final short parseShort(final String val) { - return (short) (val.charAt(0)-'A'); + public final short parseShort(final String term) { + // dummy + return 0; } + public final short parseShort(final TermRef term) { + return (short) (term.bytes[term.offset]-'A'); + } }), SortField.FIELD_DOC }); assertMatches (full, queryA, sort, "JIHGFEDCBA"); assertSaneFieldCaches(getName() + " ShortParser"); @@ -434,9 +459,13 @@ public void setNextReader(IndexReader reader, int docBase) throws IOException { docValues = FieldCache.DEFAULT.getInts(reader, "parser", new FieldCache.IntParser() { - public final int parseInt(final String val) { - return (val.charAt(0)-'A') * 123456; + public final int parseInt(final String term) { + // dummy + return 0; } + public final int parseInt(final TermRef term) { + return (term.bytes[term.offset]-'A') * 123456; + } }); } Index: tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/search/TestTermScorer.java =================================================================== --- tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/search/TestTermScorer.java (revision 821733) +++ tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/search/TestTermScorer.java (working copy) @@ -71,9 +71,9 @@ Weight weight = termQuery.weight(indexSearcher); - TermScorer ts = new TermScorer(weight, - indexReader.termDocs(allTerm), indexSearcher.getSimilarity(), - indexReader.norms(FIELD)); + Scorer ts = weight.scorer(indexSearcher.getIndexReader(), + true, true); + //we have 2 documents with the term all in them, one document for all the other values final List docs = new ArrayList(); //must call next first @@ -133,9 +133,9 @@ Weight weight = termQuery.weight(indexSearcher); - TermScorer ts = new TermScorer(weight, - indexReader.termDocs(allTerm), indexSearcher.getSimilarity(), - indexReader.norms(FIELD)); + Scorer ts = weight.scorer(indexSearcher.getIndexReader(), + true, true); + assertTrue("next did not return a doc", ts.nextDoc() != DocIdSetIterator.NO_MORE_DOCS); assertTrue("score is not correct", ts.score() == 1.6931472f); assertTrue("next did not return a doc", ts.nextDoc() != DocIdSetIterator.NO_MORE_DOCS); @@ -150,9 +150,9 @@ Weight weight = termQuery.weight(indexSearcher); - TermScorer ts = new TermScorer(weight, - indexReader.termDocs(allTerm), indexSearcher.getSimilarity(), - indexReader.norms(FIELD)); + Scorer ts = weight.scorer(indexSearcher.getIndexReader(), + true, true); + assertTrue("Didn't skip", ts.advance(3) != DocIdSetIterator.NO_MORE_DOCS); //The next doc should be doc 5 assertTrue("doc should be number 5", ts.docID() == 5); @@ -165,9 +165,9 @@ Weight weight = termQuery.weight(indexSearcher); - TermScorer ts = new TermScorer(weight, - indexReader.termDocs(allTerm), indexSearcher.getSimilarity(), - indexReader.norms(FIELD)); + Scorer ts = weight.scorer(indexSearcher.getIndexReader(), + true, true); + Explanation explanation = ts.explain(0); assertTrue("explanation is null and it shouldn't be", explanation != null); //System.out.println("Explanation: " + explanation.toString()); @@ -183,8 +183,9 @@ termQuery = new TermQuery(dogsTerm); weight = termQuery.weight(indexSearcher); - ts = new TermScorer(weight, indexReader.termDocs(dogsTerm), indexSearcher.getSimilarity(), - indexReader.norms(FIELD)); + ts = weight.scorer(indexSearcher.getIndexReader(), + true, true); + explanation = ts.explain(1); assertTrue("explanation is null and it shouldn't be", explanation != null); //System.out.println("Explanation: " + explanation.toString()); Index: tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/index/TestSegmentTermEnum.java =================================================================== --- tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/index/TestSegmentTermEnum.java (revision 821733) +++ tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/index/TestSegmentTermEnum.java (working copy) @@ -61,23 +61,6 @@ verifyDocFreq(); } - public void testPrevTermAtEnd() throws IOException - { - Directory dir = new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); - addDoc(writer, "aaa bbb"); - writer.close(); - SegmentReader reader = SegmentReader.getOnlySegmentReader(dir); - SegmentTermEnum termEnum = (SegmentTermEnum) reader.terms(); - assertTrue(termEnum.next()); - assertEquals("aaa", termEnum.term().text()); - assertTrue(termEnum.next()); - assertEquals("aaa", termEnum.prev().text()); - assertEquals("bbb", termEnum.term().text()); - assertFalse(termEnum.next()); - assertEquals("bbb", termEnum.prev().text()); - } - private void verifyDocFreq() throws IOException { Index: tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/index/TestIndexReader.java =================================================================== --- tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/index/TestIndexReader.java (revision 821733) +++ tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/index/TestIndexReader.java (working copy) @@ -1009,30 +1009,8 @@ // new IndexFileDeleter, have it delete // unreferenced files, then verify that in fact // no files were deleted: - String[] startFiles = dir.listAll(); - SegmentInfos infos = new SegmentInfos(); - infos.read(dir); - new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null); - String[] endFiles = dir.listAll(); + TestIndexWriter.assertNoUnreferencedFiles(dir, "reader.close() failed to delete unreferenced files"); - Arrays.sort(startFiles); - Arrays.sort(endFiles); - - //for(int i=0;i= 0); } Index: tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/index/TestLazyProxSkipping.java =================================================================== --- tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/index/TestLazyProxSkipping.java (revision 821733) +++ tags/lucene_2_9_back_compat_tests_20090930a/src/test/org/apache/lucene/index/TestLazyProxSkipping.java (working copy) @@ -47,7 +47,7 @@ private class SeekCountingDirectory extends RAMDirectory { public IndexInput openInput(String name) throws IOException { IndexInput ii = super.openInput(name); - if (name.endsWith(".prx")) { + if (name.endsWith(".prx") || name.endsWith(".pos")) { // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek() ii = new SeeksCountingStream(ii); }